liblingo/package/
management.rs

1use colored::Colorize;
2use log::error;
3use versions::{Requirement, Versioning};
4
5use crate::util::sha1dir;
6use crate::{GitCloneAndCheckoutCap, GitUrl};
7use std::collections::HashMap;
8use std::fs;
9use std::fs::File;
10use std::io::Write;
11use std::path::{Path, PathBuf};
12use std::str::FromStr;
13use url::{ParseError, Url};
14
15use crate::package::lock::{PackageLockSource, PackageLockSourceType};
16use crate::package::{
17    lock::DependencyLock,
18    target_properties::LibraryTargetProperties,
19    tree::{DependencyTreeNode, GitLock, PackageDetails, ProjectSource},
20    ConfigFile, LIBRARY_DIRECTORY,
21};
22use crate::util::errors::LingoError;
23
24#[derive(Default)]
25pub struct DependencyManager {
26    /// queue of packages that need processing
27    pulling_queue: Vec<(String, PackageDetails)>,
28    /// the flatten dependency tree with selected packages from the dependency tree
29    lock: DependencyLock,
30}
31
32/// this copies all the files recursively from one location to another
33pub fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
34    fs::create_dir_all(&dst)?;
35
36    for entry in fs::read_dir(src)? {
37        let entry = entry?;
38        let ty = entry.file_type()?;
39        if ty.is_dir() {
40            copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
41        } else {
42            fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
43        }
44    }
45    Ok(())
46}
47
48impl TryFrom<&PackageLockSource> for PackageDetails {
49    type Error = ParseError;
50
51    fn try_from(value: &PackageLockSource) -> Result<Self, Self::Error> {
52        let url = &value.uri;
53        Ok(PackageDetails {
54            version: Default::default(),
55            mutual_exclusive: match value.source_type {
56                PackageLockSourceType::REGISTRY => {
57                    todo!()
58                }
59                PackageLockSourceType::GIT => ProjectSource::Git(Url::from_str(url)?),
60                PackageLockSourceType::TARBALL => ProjectSource::TarBall(Url::from_str(url)?),
61                PackageLockSourceType::PATH => ProjectSource::Path(PathBuf::from(url)),
62            },
63            git_tag: value.rev.clone().map(GitLock::Rev),
64            git_rev: value.rev.clone(),
65        })
66    }
67}
68
69impl PackageDetails {
70    /// this function fetches the specified location and places it at the given location
71    pub fn fetch(
72        &mut self,
73        library_path: &PathBuf,
74        clone: &GitCloneAndCheckoutCap,
75    ) -> anyhow::Result<()> {
76        match &self.mutual_exclusive {
77            ProjectSource::Path(path_buf) => {
78                let src = fs::canonicalize(path_buf)?;
79                let dst = fs::canonicalize(library_path)?;
80                Ok(copy_dir_all(src, dst)?)
81            }
82            ProjectSource::Git(git_url) => {
83                self.git_rev = clone(
84                    GitUrl::from(git_url.as_str()),
85                    library_path,
86                    self.git_tag.clone(),
87                )?;
88                Ok(())
89            }
90            _ => Ok(()),
91        }
92    }
93}
94
95impl DependencyManager {
96    pub fn from_dependencies(
97        dependencies: Vec<(String, PackageDetails)>,
98        target_path: &Path,
99        git_clone_and_checkout_cap: &GitCloneAndCheckoutCap,
100    ) -> anyhow::Result<DependencyManager> {
101        // create library folder
102        let library_path = target_path.join(LIBRARY_DIRECTORY);
103        fs::create_dir_all(&library_path)?;
104
105        let mut manager;
106        let mut lock: DependencyLock;
107        let lock_file = target_path.join("../Lingo.lock");
108
109        // checks if a Lingo.lock file exists
110        if lock_file.exists() {
111            // reads and parses Lockfile
112            lock = toml::from_str::<DependencyLock>(&fs::read_to_string(lock_file)?)
113                .expect("cannot parse lock");
114
115            // if a lock file is present it will load the dependencies from it and checks
116            // integrity of the build directory
117            if let Ok(()) = lock.init(&target_path.join("lfc_include"), git_clone_and_checkout_cap)
118            {
119                return Ok(DependencyManager {
120                    pulling_queue: vec![],
121                    lock,
122                });
123            }
124        }
125
126        // creates a new dependency manager object
127        manager = DependencyManager::default();
128
129        // starts recursively pulling dependencies
130        let root_nodes = manager.pull(
131            dependencies.clone(),
132            target_path,
133            git_clone_and_checkout_cap,
134        )?;
135
136        // flattens the dependency tree and makes the package selection
137        let selection = DependencyManager::flatten(root_nodes.clone())?;
138
139        // creates a lock file struct from the selected packages
140        lock = DependencyLock::create(selection);
141
142        // writes the lock file down
143        let mut lock_file = File::create(target_path.join("../Lingo.lock"))?;
144
145        println!("{:?}", lock.dependencies);
146        let serialized_toml = toml::to_string(&lock).expect("cannot generate toml");
147
148        lock_file.write_all(serialized_toml.as_ref())?;
149
150        // moves the selected packages into the include folder
151        let include_folder = target_path.join("lfc_include");
152        lock.create_library_folder(&library_path, &include_folder)
153            .expect("creating lock folder failed");
154
155        // saves the lockfile with the dependency manager
156        manager.lock = lock;
157
158        Ok(manager)
159    }
160
161    pub fn pull(
162        &mut self,
163        mut dependencies: Vec<(String, PackageDetails)>,
164        root_path: &Path,
165        git_clone_and_checkout_cap: &GitCloneAndCheckoutCap,
166    ) -> anyhow::Result<Vec<DependencyTreeNode>> {
167        let mut sub_dependencies = vec![];
168        self.pulling_queue.append(&mut dependencies);
169        let sub_dependency_path = root_path.join("libraries");
170        //fs::remove_dir_all(&sub_dependency_path)?;
171        fs::create_dir_all(&sub_dependency_path)?;
172
173        while !self.pulling_queue.is_empty() {
174            if let Some((package_name, package_details)) = self.pulling_queue.pop() {
175                print!("{} {} ...", "Cloning".green().bold(), package_name);
176                let node = match self.non_recursive_fetching(
177                    &package_name,
178                    package_details,
179                    &sub_dependency_path,
180                    git_clone_and_checkout_cap,
181                ) {
182                    Ok(value) => value,
183                    Err(e) => {
184                        return Err(e);
185                    }
186                };
187
188                sub_dependencies.push(node);
189            } else {
190                break;
191            }
192        }
193
194        //dependencies
195        Ok(sub_dependencies)
196    }
197
198    pub(crate) fn non_recursive_fetching(
199        &mut self,
200        name: &str,
201        mut package: PackageDetails,
202        base_path: &Path,
203        git_clone_and_checkout_cap: &GitCloneAndCheckoutCap,
204    ) -> anyhow::Result<DependencyTreeNode> {
205        // creating the directory where the library will be housed
206        let library_path = base_path; //.join("libs");
207                                      // place where to drop the source
208        let temporary_path = library_path.join("temporary");
209        let _ = fs::remove_dir_all(&temporary_path);
210        let _ = fs::create_dir_all(&temporary_path);
211
212        // directory where the dependencies will be dropped
213
214        // creating the necessary directories
215        fs::create_dir_all(library_path)?;
216        fs::create_dir_all(&temporary_path)?;
217
218        // cloning the specified package
219        package.fetch(&temporary_path, git_clone_and_checkout_cap)?;
220
221        let hash = sha1dir::checksum_current_dir(&temporary_path, false);
222        let include_path = library_path.join(hash.to_string());
223
224        let lingo_toml_text = fs::read_to_string(temporary_path.clone().join("Lingo.toml"))?;
225        let read_toml = toml::from_str::<ConfigFile>(&lingo_toml_text)?.to_config(&temporary_path);
226
227        println!(" {}", read_toml.package.version);
228
229        let config = match read_toml.library {
230            Some(value) => value,
231            None => {
232                // error we expected a library here
233                return Err(
234                    LingoError::NoLibraryInLingoToml(library_path.display().to_string()).into(),
235                );
236            }
237        };
238
239        if !package.version.matches(&read_toml.package.version) {
240            error!("version mismatch between specified location and requested version requirement");
241            return Err(LingoError::LingoVersionMismatch(format!(
242                "requested version {} got version {}",
243                package.version, read_toml.package.version
244            ))
245            .into());
246        }
247
248        let dependencies = vec![];
249
250        for dep in read_toml.dependencies {
251            self.pulling_queue.push(dep);
252        }
253
254        fs::create_dir_all(&include_path)?;
255        copy_dir_all(&temporary_path, &include_path)?;
256
257        Ok(DependencyTreeNode {
258            name: name.to_string(),
259            package: package.clone(),
260            location: include_path.clone(),
261            include_path: config.location.clone(),
262            dependencies: dependencies.clone(),
263            hash: hash.to_string(),
264            version: read_toml.package.version.clone(),
265            properties: config.properties,
266        })
267    }
268
269    fn flatten(root_nodes: Vec<DependencyTreeNode>) -> anyhow::Result<Vec<DependencyTreeNode>> {
270        // implementation idea:
271        // 1.   we collect all the version requirements for packages => are the different
272        //      constraints satisfiable ?
273        // 2.   we collect all the different sources
274        // 3.   finding the set of sources that satisfies the set of version constraints
275        // 4.   pick the newest version from that set
276        // TODO: later we can probably do this in one pass
277
278        let mut constraints = HashMap::<&String, Vec<Requirement>>::new();
279        let mut sources = HashMap::<&String, Vec<&DependencyTreeNode>>::new();
280
281        // this basically flattens the
282        let mut nodes = Vec::new();
283        for node in root_nodes {
284            let mut children = node.aggregate();
285            nodes.append(&mut children);
286        }
287
288        for node in &nodes {
289            let constraint = &node.package.version;
290
291            constraints
292                .entry(&node.name)
293                .and_modify(|value| {
294                    value.push(constraint.clone());
295                })
296                .or_insert(vec![constraint.clone()]);
297
298            sources
299                .entry(&node.name)
300                .and_modify(move |value| {
301                    value.push(node);
302                })
303                .or_insert(vec![&node]);
304        }
305
306        let merged: Vec<(&String, Vec<Requirement>, Vec<&DependencyTreeNode>)> = constraints
307            .into_iter()
308            .filter_map(move |(key, requirements)| {
309                sources
310                    .get_mut(&key)
311                    .map(move |location| (key, requirements, location.clone()))
312            })
313            .collect();
314
315        let mut selection = Vec::new();
316
317        for (_, requirements, location) in merged {
318            //TODO: replace this in the future by first merging all the requirements
319            // (determine upper and lower bound)
320
321            let mut filtered_results: Vec<&DependencyTreeNode> = location
322                .into_iter()
323                .filter(|location| {
324                    let filter = |version: &Versioning| {
325                        for requirement in &requirements {
326                            if !requirement.matches(version) {
327                                return false;
328                            }
329                        }
330                        true
331                    };
332
333                    filter(&location.version)
334                })
335                .collect();
336
337            if filtered_results.is_empty() {
338                error!("no viable package was found that fulfills all the requirements");
339            }
340
341            filtered_results.sort_by_key(|value| value.version.clone());
342
343            let package = filtered_results
344                .last()
345                .expect("There should be at least one viable package remaining!");
346
347            selection.push((*package).clone());
348        }
349
350        Ok(selection)
351    }
352
353    pub fn get_target_properties(&self) -> anyhow::Result<LibraryTargetProperties> {
354        self.lock.aggregate_target_properties()
355    }
356}