use crate::{
lock::Lock,
manifest::{
BuildProfile, ConfigTimeConstant, Dependency, ManifestFile, MemberManifestFiles,
PackageManifest, PackageManifestFile,
},
CORE, PRELUDE, STD,
};
use anyhow::{anyhow, bail, Context, Error, Result};
use forc_util::{
default_output_directory, find_file_name, git_checkouts_directory, kebab_to_snake_case,
print_on_failure, print_on_success, user_forc_directory,
};
use fuel_abi_types::program_abi;
use petgraph::{
self,
visit::{Bfs, Dfs, EdgeRef, Walker},
Directed, Direction,
};
use serde::{Deserialize, Serialize};
use std::{
collections::{hash_map, BTreeMap, BTreeSet, HashMap, HashSet},
fmt,
fs::{self, File},
hash::{Hash, Hasher},
path::{Path, PathBuf},
str::FromStr,
};
use sway_core::{
asm_generation::ProgramABI,
decl_engine::DeclEngine,
fuel_prelude::{
fuel_crypto,
fuel_tx::{self, Contract, ContractId, StorageSlot},
},
language::{
lexed::LexedProgram,
parsed::{ParseProgram, TreeType},
ty,
},
semantic_analysis::namespace,
source_map::SourceMap,
BuildTarget, CompileResult, CompiledBytecode, Engines, FinalizedEntry, TypeEngine,
};
use sway_error::error::CompileError;
use sway_types::Ident;
use sway_utils::constants;
use tracing::{info, warn};
use url::Url;
type GraphIx = u32;
type Node = Pinned;
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct Edge {
pub name: String,
pub kind: DepKind,
}
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum DepKind {
Library,
Contract { salt: fuel_tx::Salt },
}
impl fmt::Display for DepKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DepKind::Library => write!(f, "library"),
DepKind::Contract { .. } => write!(f, "contract"),
}
}
}
pub type Graph = petgraph::stable_graph::StableGraph<Node, Edge, Directed, GraphIx>;
pub type EdgeIx = petgraph::graph::EdgeIndex<GraphIx>;
pub type NodeIx = petgraph::graph::NodeIndex<GraphIx>;
pub type ManifestMap = HashMap<PinnedId, PackageManifestFile>;
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct PinnedId(u64);
#[derive(Debug, Clone)]
pub struct BuiltPackage {
pub build_target: BuildTarget,
pub json_abi_program: ProgramABI,
pub storage_slots: Vec<StorageSlot>,
pub bytecode: Vec<u8>,
pub entries: Vec<FinalizedEntry>,
pub tree_type: TreeType,
source_map: SourceMap,
pub pkg_name: String,
pub decl_engine: DeclEngine,
pub manifest_file: PackageManifestFile,
}
pub type BuiltWorkspace = HashMap<String, BuiltPackage>;
#[derive(Debug)]
pub enum Built {
Package(Box<BuiltPackage>),
Workspace(BuiltWorkspace),
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Pkg {
pub name: String,
pub source: Source,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Pinned {
pub name: String,
pub source: SourcePinned,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub enum Source {
Member(PathBuf),
Git(SourceGit),
Path(PathBuf),
Registry(SourceRegistry),
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct SourceGit {
pub repo: Url,
pub reference: GitReference,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub enum GitReference {
Branch(String),
Tag(String),
Rev(String),
DefaultBranch,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct SourceRegistry {
pub version: semver::Version,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct SourceGitPinned {
pub source: SourceGit,
pub commit_hash: String,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct SourcePathPinned {
pub path_root: PinnedId,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct SourceRegistryPinned {
pub source: SourceRegistry,
pub version: semver::Version,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub enum SourcePinned {
Member,
Git(SourceGitPinned),
Path(SourcePathPinned),
Registry(SourceRegistryPinned),
}
#[derive(Clone, Debug)]
pub struct BuildPlan {
graph: Graph,
manifest_map: ManifestMap,
compilation_order: Vec<NodeIx>,
}
#[derive(Clone, Debug)]
pub struct PinnedIdParseError;
#[derive(Clone, Debug)]
pub struct SourcePathPinnedParseError;
#[derive(Clone, Debug)]
pub enum SourceGitPinnedParseError {
Prefix,
Url,
Reference,
CommitHash,
}
type HeadWithTime = (String, i64);
#[derive(Serialize, Deserialize)]
pub struct GitSourceIndex {
pub git_reference: GitReference,
pub head_with_time: HeadWithTime,
}
#[derive(Default, Clone)]
pub struct PkgOpts {
pub path: Option<String>,
pub offline: bool,
pub terse: bool,
pub locked: bool,
pub output_directory: Option<String>,
}
#[derive(Default, Clone)]
pub struct PrintOpts {
pub ast: bool,
pub dca_graph: bool,
pub finalized_asm: bool,
pub intermediate_asm: bool,
pub ir: bool,
}
#[derive(Default, Clone)]
pub struct MinifyOpts {
pub json_abi: bool,
pub json_storage_slots: bool,
}
type ConstName = String;
type ConstInjectionMap = HashMap<Pinned, Vec<(ConstName, ConfigTimeConstant)>>;
#[derive(Default)]
pub struct BuildOpts {
pub pkg: PkgOpts,
pub print: PrintOpts,
pub minify: MinifyOpts,
pub binary_outfile: Option<String>,
pub debug_outfile: Option<String>,
pub build_target: BuildTarget,
pub build_profile: Option<String>,
pub release: bool,
pub time_phases: bool,
pub tests: bool,
pub inject_map: ConstInjectionMap,
}
impl GitSourceIndex {
pub fn new(time: i64, git_reference: GitReference, commit_hash: String) -> GitSourceIndex {
GitSourceIndex {
git_reference,
head_with_time: (commit_hash, time),
}
}
}
impl Edge {
pub fn new(name: String, kind: DepKind) -> Edge {
Edge { name, kind }
}
}
impl BuiltPackage {
pub fn write_bytecode(&self, path: &Path) -> Result<()> {
fs::write(path, &self.bytecode)?;
Ok(())
}
pub fn write_debug_info(&self, path: &Path) -> Result<()> {
let source_map_json =
serde_json::to_vec(&self.source_map).expect("JSON serialization failed");
fs::write(path, source_map_json)?;
Ok(())
}
pub fn write_output(
&self,
minify: MinifyOpts,
pkg_name: &str,
output_dir: &Path,
) -> Result<()> {
if !output_dir.exists() {
fs::create_dir_all(output_dir)?;
}
let bin_path = output_dir.join(pkg_name).with_extension("bin");
self.write_bytecode(&bin_path)?;
let json_abi_program_stem = format!("{}-abi", pkg_name);
let json_abi_program_path = output_dir
.join(json_abi_program_stem)
.with_extension("json");
match &self.json_abi_program {
ProgramABI::Fuel(json_abi_program) => {
if !json_abi_program.functions.is_empty() {
let file = File::create(json_abi_program_path)?;
let res = if minify.json_abi {
serde_json::to_writer(&file, &json_abi_program)
} else {
serde_json::to_writer_pretty(&file, &json_abi_program)
};
res?
}
}
ProgramABI::Evm(json_abi_program) => {
if !json_abi_program.is_empty() {
let file = File::create(json_abi_program_path)?;
let res = if minify.json_abi {
serde_json::to_writer(&file, &json_abi_program)
} else {
serde_json::to_writer_pretty(&file, &json_abi_program)
};
res?
}
}
}
info!(" Bytecode size is {} bytes.", self.bytecode.len());
match self.tree_type {
TreeType::Contract => {
let json_storage_slots_stem = format!("{}-storage_slots", pkg_name);
let json_storage_slots_path = output_dir
.join(json_storage_slots_stem)
.with_extension("json");
let storage_slots_file = File::create(json_storage_slots_path)?;
let res = if minify.json_storage_slots {
serde_json::to_writer(&storage_slots_file, &self.storage_slots)
} else {
serde_json::to_writer_pretty(&storage_slots_file, &self.storage_slots)
};
res?;
}
TreeType::Predicate => {
let root = format!("0x{}", Contract::root_from_code(&self.bytecode));
let root_file_name = format!("{}{}", &pkg_name, SWAY_BIN_ROOT_SUFFIX);
let root_path = output_dir.join(root_file_name);
fs::write(root_path, &root)?;
info!(" Predicate root: {}", root);
}
TreeType::Script => {
let bytecode_hash = format!("0x{}", fuel_crypto::Hasher::hash(&self.bytecode));
let hash_file_name = format!("{}{}", &pkg_name, SWAY_BIN_HASH_SUFFIX);
let hash_path = output_dir.join(hash_file_name);
fs::write(hash_path, &bytecode_hash)?;
info!(" Script bytecode hash: {}", bytecode_hash);
}
_ => (),
}
Ok(())
}
}
impl Built {
pub fn into_members(self) -> Result<HashMap<String, BuiltPackage>> {
match self {
Built::Package(built_pkg) => {
Ok(std::iter::once((built_pkg.pkg_name.clone(), *built_pkg)).collect())
}
Built::Workspace(built_workspace) => Ok(built_workspace),
}
}
pub fn expect_pkg(self) -> Result<BuiltPackage> {
match self {
Built::Package(built_pkg) => Ok(*built_pkg),
Built::Workspace(_) => bail!("expected `Built` to be `Built::Package`"),
}
}
}
const DEFAULT_REMOTE_NAME: &str = "origin";
#[derive(Clone, Debug)]
pub struct SourcePinnedParseError;
pub type DependencyName = String;
impl BuildPlan {
pub fn from_build_opts(build_options: &BuildOpts) -> Result<Self> {
let path = &build_options.pkg.path;
let manifest_dir = if let Some(ref path) = path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};
let manifest_file = ManifestFile::from_dir(&manifest_dir)?;
let member_manifests = manifest_file.member_manifests()?;
if member_manifests.is_empty() {
bail!("No member found to build")
}
let lock_path = manifest_file.lock_path()?;
Self::from_lock_and_manifests(
&lock_path,
&member_manifests,
build_options.pkg.locked,
build_options.pkg.offline,
)
}
pub fn from_manifests(manifests: &MemberManifestFiles, offline: bool) -> Result<Self> {
validate_version(manifests)?;
let mut graph = Graph::default();
let mut manifest_map = ManifestMap::default();
fetch_graph(manifests, offline, &mut graph, &mut manifest_map)?;
validate_graph(&graph, manifests)?;
let compilation_order = compilation_order(&graph)?;
Ok(Self {
graph,
manifest_map,
compilation_order,
})
}
pub fn from_lock_and_manifests(
lock_path: &Path,
manifests: &MemberManifestFiles,
locked: bool,
offline: bool,
) -> Result<Self> {
validate_version(manifests)?;
let mut new_lock_cause = None;
let lock = Lock::from_path(lock_path).unwrap_or_else(|e| {
new_lock_cause = if e.to_string().contains("No such file or directory") {
Some(anyhow!("lock file did not exist"))
} else {
Some(e)
};
Lock::default()
});
let mut graph = lock.to_graph().unwrap_or_else(|e| {
new_lock_cause = Some(anyhow!("Invalid lock: {}", e));
Graph::default()
});
let invalid_deps = validate_graph(&graph, manifests)?;
let members: HashSet<String> = manifests
.iter()
.map(|(member_name, _)| member_name.clone())
.collect();
remove_deps(&mut graph, &members, &invalid_deps);
let mut manifest_map = graph_to_manifest_map(manifests, &graph)?;
let _added = fetch_graph(manifests, offline, &mut graph, &mut manifest_map)?;
let compilation_order = compilation_order(&graph)?;
let plan = Self {
graph,
manifest_map,
compilation_order,
};
let new_lock = Lock::from_graph(plan.graph());
let lock_diff = new_lock.diff(&lock);
if !lock_diff.removed.is_empty() || !lock_diff.added.is_empty() {
new_lock_cause.get_or_insert(anyhow!("lock file did not match manifest"));
}
if let Some(cause) = new_lock_cause {
if locked {
bail!(
"The lock file {} needs to be updated (Cause: {}) \
but --locked was passed to prevent this.",
lock_path.to_string_lossy(),
cause,
);
}
info!(" Creating a new `Forc.lock` file. (Cause: {})", cause);
let member_names = manifests
.iter()
.map(|(_, manifest)| manifest.project.name.clone())
.collect();
crate::lock::print_diff(&member_names, &lock_diff);
let string = toml::ser::to_string_pretty(&new_lock)
.map_err(|e| anyhow!("failed to serialize lock file: {}", e))?;
fs::write(lock_path, string)
.map_err(|e| anyhow!("failed to write lock file: {}", e))?;
info!(" Created new lock file at {}", lock_path.display());
}
Ok(plan)
}
pub fn member_nodes(&self) -> impl Iterator<Item = NodeIx> + '_ {
self.compilation_order()
.iter()
.cloned()
.filter(|&n| self.graph[n].source == SourcePinned::Member)
}
pub fn member_pinned_pkgs(&self) -> impl Iterator<Item = Pinned> + '_ {
let graph = self.graph();
self.member_nodes().map(|node| &graph[node]).cloned()
}
pub fn graph(&self) -> &Graph {
&self.graph
}
pub fn manifest_map(&self) -> &ManifestMap {
&self.manifest_map
}
pub fn compilation_order(&self) -> &[NodeIx] {
&self.compilation_order
}
pub fn find_member_index(&self, member_name: &str) -> Option<NodeIx> {
self.member_nodes()
.find(|node_ix| self.graph[*node_ix].name == member_name)
}
pub fn node_deps(&self, n: NodeIx) -> impl '_ + Iterator<Item = NodeIx> {
let bfs = Bfs::new(&self.graph, n);
bfs.iter(&self.graph)
}
pub fn build_profiles(&self) -> impl '_ + Iterator<Item = (String, BuildProfile)> {
let manifest_map = &self.manifest_map;
let graph = &self.graph;
self.member_nodes().flat_map(|member_node| {
manifest_map[&graph[member_node].id()]
.build_profiles()
.map(|(n, p)| (n.clone(), p.clone()))
})
}
}
fn potential_proj_nodes<'a>(g: &'a Graph, proj_name: &'a str) -> impl 'a + Iterator<Item = NodeIx> {
member_nodes(g).filter(move |&n| g[n].name == proj_name)
}
fn find_proj_node(graph: &Graph, proj_name: &str) -> Result<NodeIx> {
let mut potentials = potential_proj_nodes(graph, proj_name);
let proj_node = potentials
.next()
.ok_or_else(|| anyhow!("graph contains no project node"))?;
match potentials.next() {
None => Ok(proj_node),
Some(_) => Err(anyhow!("graph contains more than one project node")),
}
}
fn validate_version(member_manifests: &MemberManifestFiles) -> Result<()> {
for member_pkg_manifest in member_manifests.values() {
validate_pkg_version(member_pkg_manifest)?;
}
Ok(())
}
fn validate_pkg_version(pkg_manifest: &PackageManifestFile) -> Result<()> {
match &pkg_manifest.project.forc_version {
Some(min_forc_version) => {
let crate_version = env!("CARGO_PKG_VERSION");
let toolchain_version = semver::Version::parse(crate_version)?;
if toolchain_version < *min_forc_version {
bail!(
"{:?} requires forc version {} but current forc version is {}\nUpdate the toolchain by following: https://fuellabs.github.io/sway/v{}/introduction/installation.html",
pkg_manifest.project.name,
min_forc_version,
crate_version,
crate_version
);
}
}
None => {}
};
Ok(())
}
fn member_nodes(g: &Graph) -> impl Iterator<Item = NodeIx> + '_ {
g.node_indices()
.filter(|&n| g[n].source == SourcePinned::Member)
}
fn validate_graph(graph: &Graph, manifests: &MemberManifestFiles) -> Result<BTreeSet<EdgeIx>> {
let mut member_pkgs: HashMap<&String, &PackageManifestFile> = manifests.iter().collect();
let member_nodes: Vec<_> = member_nodes(graph)
.filter_map(|n| member_pkgs.remove(&graph[n].name).map(|pkg| (n, pkg)))
.collect();
if member_nodes.is_empty() {
return Ok(graph.edge_indices().collect());
}
let mut visited = HashSet::new();
let edges = member_nodes
.into_iter()
.flat_map(move |(n, _)| validate_deps(graph, n, manifests, &mut visited))
.collect();
Ok(edges)
}
fn validate_deps(
graph: &Graph,
node: NodeIx,
manifests: &MemberManifestFiles,
visited: &mut HashSet<NodeIx>,
) -> BTreeSet<EdgeIx> {
let mut remove = BTreeSet::default();
for edge in graph.edges_directed(node, Direction::Outgoing) {
let dep_name = edge.weight();
let dep_node = edge.target();
match validate_dep(graph, manifests, dep_name, dep_node) {
Err(_) => {
remove.insert(edge.id());
}
Ok(_) => {
if visited.insert(dep_node) {
let rm = validate_deps(graph, dep_node, manifests, visited);
remove.extend(rm);
}
continue;
}
}
}
remove
}
fn validate_dep(
graph: &Graph,
manifests: &MemberManifestFiles,
dep_edge: &Edge,
dep_node: NodeIx,
) -> Result<PackageManifestFile> {
let dep_name = &dep_edge.name;
let node_manifest = manifests
.get(dep_name)
.ok_or_else(|| anyhow!("Couldn't find manifest file for {}", dep_name))?;
let dep_path = dep_path(graph, node_manifest, dep_node, manifests).map_err(|e| {
anyhow!(
"failed to construct path for dependency {:?}: {}",
dep_name,
e
)
})?;
let dep_manifest = PackageManifestFile::from_dir(&dep_path)?;
let dep_entry = node_manifest
.dep(dep_name)
.ok_or_else(|| anyhow!("no entry in parent manifest"))?;
let dep_source = dep_to_source_patched(node_manifest, dep_name, dep_entry, manifests)?;
let dep_pkg = graph[dep_node].unpinned(&dep_path);
if dep_pkg.source != dep_source {
bail!("dependency node's source does not match manifest entry");
}
validate_dep_manifest(&graph[dep_node], &dep_manifest, dep_edge)?;
Ok(dep_manifest)
}
fn validate_dep_manifest(
dep: &Pinned,
dep_manifest: &PackageManifestFile,
dep_edge: &Edge,
) -> Result<()> {
let dep_program_type = dep_manifest.program_type()?;
match (&dep_program_type, &dep_edge.kind) {
(TreeType::Contract, DepKind::Contract { salt: _ })
| (TreeType::Library { .. }, DepKind::Library) => {}
_ => bail!(
"\"{}\" is declared as a {} dependency, but is actually a {}",
dep.name,
dep_edge.kind,
dep_program_type
),
}
if dep.name != dep_manifest.project.name {
bail!(
"dependency name {:?} must match the manifest project name {:?} \
unless `package = {:?}` is specified in the dependency declaration",
dep.name,
dep_manifest.project.name,
dep_manifest.project.name,
);
}
validate_pkg_version(dep_manifest)?;
Ok(())
}
fn dep_path(
graph: &Graph,
node_manifest: &PackageManifestFile,
dep_node: NodeIx,
manifests: &MemberManifestFiles,
) -> Result<PathBuf> {
let dep = &graph[dep_node];
let dep_name = &dep.name;
match &dep.source {
SourcePinned::Git(git) => {
let repo_path = git_commit_path(&dep.name, &git.source.repo, &git.commit_hash);
let lock = path_lock(&repo_path)?;
let _guard = lock.read()?;
find_dir_within(&repo_path, &dep.name).ok_or_else(|| {
anyhow!(
"failed to find package `{}` in {}",
dep.name,
git.to_string()
)
})
}
SourcePinned::Path(src) => {
validate_path_root(graph, dep_node, src.path_root)?;
if let Some(path) = node_manifest.dep_path(dep_name) {
if path.exists() {
return Ok(path);
}
}
for (_, patch_map) in node_manifest.patches() {
if let Some(Dependency::Detailed(details)) = patch_map.get(dep_name) {
if let Some(ref rel_path) = details.path {
if let Ok(path) = node_manifest.dir().join(rel_path).canonicalize() {
if path.exists() {
return Ok(path);
}
}
}
}
}
bail!(
"no dependency or patch with name {:?} in manifest of {:?}",
dep_name,
node_manifest.project.name
)
}
SourcePinned::Registry(_reg) => unreachable!("registry dependencies not yet supported"),
SourcePinned::Member => {
manifests
.values()
.find(|manifest| manifest.project.name == *dep_name)
.map(|manifest| manifest.path().to_path_buf())
.ok_or_else(|| anyhow!("cannot find dependency in the workspace"))
}
}
}
fn remove_deps(
graph: &mut Graph,
member_names: &HashSet<String>,
edges_to_remove: &BTreeSet<EdgeIx>,
) {
let member_nodes: HashSet<_> = member_nodes(graph)
.filter(|&n| member_names.contains(&graph[n].name))
.collect();
let node_removal_order = match petgraph::algo::toposort(&*graph, None) {
Ok(nodes) => nodes,
Err(_) => {
graph.clear();
return;
}
};
for &edge in edges_to_remove {
graph.remove_edge(edge);
}
let nodes = node_removal_order.into_iter();
for node in nodes {
if !has_parent(graph, node) && !member_nodes.contains(&node) {
graph.remove_node(node);
}
}
}
fn has_parent(graph: &Graph, node: NodeIx) -> bool {
graph
.edges_directed(node, Direction::Incoming)
.next()
.is_some()
}
impl GitReference {
pub fn resolve(&self, repo: &git2::Repository) -> Result<git2::Oid> {
fn resolve_tag(repo: &git2::Repository, tag: &str) -> Result<git2::Oid> {
let refname = format!("refs/remotes/{}/tags/{}", DEFAULT_REMOTE_NAME, tag);
let id = repo.refname_to_id(&refname)?;
let obj = repo.find_object(id, None)?;
let obj = obj.peel(git2::ObjectType::Commit)?;
Ok(obj.id())
}
fn resolve_branch(repo: &git2::Repository, branch: &str) -> Result<git2::Oid> {
let name = format!("{}/{}", DEFAULT_REMOTE_NAME, branch);
let b = repo
.find_branch(&name, git2::BranchType::Remote)
.with_context(|| format!("failed to find branch `{}`", branch))?;
b.get()
.target()
.ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", branch))
}
fn resolve_default_branch(repo: &git2::Repository) -> Result<git2::Oid> {
let head_id =
repo.refname_to_id(&format!("refs/remotes/{}/HEAD", DEFAULT_REMOTE_NAME))?;
let head = repo.find_object(head_id, None)?;
Ok(head.peel(git2::ObjectType::Commit)?.id())
}
fn resolve_rev(repo: &git2::Repository, rev: &str) -> Result<git2::Oid> {
let obj = repo.revparse_single(rev)?;
match obj.as_tag() {
Some(tag) => Ok(tag.target_id()),
None => Ok(obj.id()),
}
}
match self {
GitReference::Tag(s) => {
resolve_tag(repo, s).with_context(|| format!("failed to find tag `{}`", s))
}
GitReference::Branch(s) => resolve_branch(repo, s),
GitReference::DefaultBranch => resolve_default_branch(repo),
GitReference::Rev(s) => resolve_rev(repo, s),
}
}
}
impl Pinned {
pub fn id(&self) -> PinnedId {
PinnedId::new(&self.name, &self.source)
}
pub fn unpinned(&self, path: &Path) -> Pkg {
let source = match &self.source {
SourcePinned::Member => Source::Member(path.to_owned()),
SourcePinned::Git(git) => Source::Git(git.source.clone()),
SourcePinned::Path(_) => Source::Path(path.to_owned()),
SourcePinned::Registry(reg) => Source::Registry(reg.source.clone()),
};
let name = self.name.clone();
Pkg { name, source }
}
}
impl PinnedId {
pub fn new(name: &str, source: &SourcePinned) -> Self {
let mut hasher = hash_map::DefaultHasher::default();
name.hash(&mut hasher);
source.hash(&mut hasher);
Self(hasher.finish())
}
}
impl SourcePathPinned {
pub const PREFIX: &'static str = "path";
}
impl SourceGitPinned {
pub const PREFIX: &'static str = "git";
}
impl fmt::Display for PinnedId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:016X}", self.0)
}
}
impl fmt::Display for SourcePathPinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}+from-root-{}", Self::PREFIX, self.path_root)
}
}
impl fmt::Display for SourceGitPinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}+{}?{}#{}",
Self::PREFIX,
self.source.repo,
self.source.reference,
self.commit_hash
)
}
}
impl fmt::Display for GitReference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
GitReference::Branch(ref s) => write!(f, "branch={}", s),
GitReference::Tag(ref s) => write!(f, "tag={}", s),
GitReference::Rev(ref _s) => write!(f, "rev"),
GitReference::DefaultBranch => write!(f, "default-branch"),
}
}
}
impl fmt::Display for SourcePinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SourcePinned::Member => write!(f, "member"),
SourcePinned::Path(src) => src.fmt(f),
SourcePinned::Git(src) => src.fmt(f),
SourcePinned::Registry(_reg) => unimplemented!("pkg registries not yet implemented"),
}
}
}
impl FromStr for PinnedId {
type Err = PinnedIdParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(
u64::from_str_radix(s, 16).map_err(|_| PinnedIdParseError)?,
))
}
}
impl FromStr for SourcePathPinned {
type Err = SourcePathPinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim();
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus) != Some(0) {
return Err(SourcePathPinnedParseError);
}
let s = &s[prefix_plus.len()..];
let path_root = s
.split("from-root-")
.nth(1)
.ok_or(SourcePathPinnedParseError)?
.parse()
.map_err(|_| SourcePathPinnedParseError)?;
Ok(Self { path_root })
}
}
impl FromStr for SourceGitPinned {
type Err = SourceGitPinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim();
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus) != Some(0) {
return Err(SourceGitPinnedParseError::Prefix);
}
let s = &s[prefix_plus.len()..];
let repo_str = s.split('?').next().ok_or(SourceGitPinnedParseError::Url)?;
let repo = Url::parse(repo_str).map_err(|_| SourceGitPinnedParseError::Url)?;
let s = &s[repo_str.len() + "?".len()..];
let mut s_iter = s.split('#');
let reference = s_iter.next().ok_or(SourceGitPinnedParseError::Reference)?;
let commit_hash = s_iter
.next()
.ok_or(SourceGitPinnedParseError::CommitHash)?
.to_string();
validate_git_commit_hash(&commit_hash)
.map_err(|_| SourceGitPinnedParseError::CommitHash)?;
const BRANCH: &str = "branch=";
const TAG: &str = "tag=";
let reference = if reference.find(BRANCH) == Some(0) {
GitReference::Branch(reference[BRANCH.len()..].to_string())
} else if reference.find(TAG) == Some(0) {
GitReference::Tag(reference[TAG.len()..].to_string())
} else if reference == "rev" {
GitReference::Rev(commit_hash.to_string())
} else if reference == "default-branch" {
GitReference::DefaultBranch
} else {
return Err(SourceGitPinnedParseError::Reference);
};
let source = SourceGit { repo, reference };
Ok(Self {
source,
commit_hash,
})
}
}
impl FromStr for SourcePinned {
type Err = SourcePinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let source = if s == "root" || s == "member" {
SourcePinned::Member
} else if let Ok(src) = SourcePathPinned::from_str(s) {
SourcePinned::Path(src)
} else if let Ok(src) = SourceGitPinned::from_str(s) {
SourcePinned::Git(src)
} else {
return Err(SourcePinnedParseError);
};
Ok(source)
}
}
fn validate_git_commit_hash(commit_hash: &str) -> Result<()> {
const LEN: usize = 40;
if commit_hash.len() != LEN {
bail!(
"invalid hash length: expected {}, found {}",
LEN,
commit_hash.len()
);
}
if !commit_hash.chars().all(|c| c.is_ascii_alphanumeric()) {
bail!("hash contains one or more non-ascii-alphanumeric characters");
}
Ok(())
}
impl Default for GitReference {
fn default() -> Self {
Self::DefaultBranch
}
}
pub fn compilation_order(graph: &Graph) -> Result<Vec<NodeIx>> {
let rev_pkg_graph = petgraph::visit::Reversed(&graph);
petgraph::algo::toposort(rev_pkg_graph, None).map_err(|_| {
let scc = petgraph::algo::kosaraju_scc(&graph);
let mut path = String::new();
scc.iter()
.filter(|path| path.len() > 1)
.for_each(|cyclic_path| {
let starting_node = &graph[*cyclic_path.last().unwrap()];
path.push_str(&starting_node.name.to_string());
path.push_str(" -> ");
for (node_index, node) in cyclic_path.iter().enumerate() {
path.push_str(&graph[*node].name.to_string());
if node_index != cyclic_path.len() - 1 {
path.push_str(" -> ");
}
}
path.push('\n');
});
anyhow!("dependency cycle detected: {}", path)
})
}
fn graph_to_manifest_map(manifests: &MemberManifestFiles, graph: &Graph) -> Result<ManifestMap> {
let mut manifest_map = HashMap::new();
for pkg_manifest in manifests.values() {
let pkg_name = &pkg_manifest.project.name;
manifest_map.extend(pkg_graph_to_manifest_map(manifests, pkg_name, graph)?);
}
Ok(manifest_map)
}
fn pkg_graph_to_manifest_map(
manifests: &MemberManifestFiles,
pkg_name: &str,
graph: &Graph,
) -> Result<ManifestMap> {
let proj_manifest = manifests
.get(pkg_name)
.ok_or_else(|| anyhow!("Cannot find manifest for {}", pkg_name))?;
let mut manifest_map = ManifestMap::new();
let proj_node = match find_proj_node(graph, &proj_manifest.project.name) {
Ok(node) => node,
Err(_) => return Ok(manifest_map),
};
let proj_id = graph[proj_node].id();
manifest_map.insert(proj_id, proj_manifest.clone());
let mut bfs = Bfs::new(graph, proj_node);
bfs.next(graph);
while let Some(dep_node) = bfs.next(graph) {
let (parent_manifest, dep_name) = graph
.edges_directed(dep_node, Direction::Incoming)
.filter_map(|edge| {
let parent_node = edge.source();
let dep_name = &edge.weight().name;
let parent = &graph[parent_node];
let parent_manifest = manifest_map.get(&parent.id())?;
Some((parent_manifest, dep_name))
})
.next()
.ok_or_else(|| anyhow!("more than one root package detected in graph"))?;
let dep_path = dep_path(graph, parent_manifest, dep_node, manifests).map_err(|e| {
anyhow!(
"failed to construct path for dependency {:?}: {}",
dep_name,
e
)
})?;
let dep_manifest = PackageManifestFile::from_dir(&dep_path)?;
let dep = &graph[dep_node];
manifest_map.insert(dep.id(), dep_manifest);
}
Ok(manifest_map)
}
fn validate_path_root(graph: &Graph, path_dep: NodeIx, path_root: PinnedId) -> Result<()> {
let path_root_node = find_path_root(graph, path_dep)?;
if graph[path_root_node].id() != path_root {
bail!(
"invalid `path_root` for path dependency package {:?}",
&graph[path_dep].name
)
}
Ok(())
}
fn find_path_root(graph: &Graph, mut node: NodeIx) -> Result<NodeIx> {
loop {
let pkg = &graph[node];
match &pkg.source {
SourcePinned::Path(src) => {
let parent = graph
.edges_directed(node, Direction::Incoming)
.next()
.map(|edge| edge.source())
.ok_or_else(|| {
anyhow!(
"Failed to find path root: `path` dependency \"{}\" has no parent",
src
)
})?;
node = parent;
}
SourcePinned::Git(_) | SourcePinned::Registry(_) | SourcePinned::Member => {
return Ok(node);
}
}
}
}
pub fn fetch_id(path: &Path, timestamp: std::time::Instant) -> u64 {
let mut hasher = hash_map::DefaultHasher::new();
path.hash(&mut hasher);
timestamp.hash(&mut hasher);
hasher.finish()
}
fn fetch_graph(
member_manifests: &MemberManifestFiles,
offline: bool,
graph: &mut Graph,
manifest_map: &mut ManifestMap,
) -> Result<HashSet<NodeIx>> {
let mut added_nodes = HashSet::default();
for member_pkg_manifest in member_manifests.values() {
added_nodes.extend(&fetch_pkg_graph(
member_pkg_manifest,
offline,
graph,
manifest_map,
member_manifests,
)?);
}
validate_contract_deps(graph)?;
Ok(added_nodes)
}
fn fetch_pkg_graph(
proj_manifest: &PackageManifestFile,
offline: bool,
graph: &mut Graph,
manifest_map: &mut ManifestMap,
member_manifests: &MemberManifestFiles,
) -> Result<HashSet<NodeIx>> {
let proj_node = match find_proj_node(graph, &proj_manifest.project.name) {
Ok(proj_node) => proj_node,
Err(_) => {
let name = proj_manifest.project.name.clone();
let source = SourcePinned::Member;
let pkg = Pinned { name, source };
let pkg_id = pkg.id();
manifest_map.insert(pkg_id, proj_manifest.clone());
graph.add_node(pkg)
}
};
let fetch_ts = std::time::Instant::now();
let fetch_id = fetch_id(proj_manifest.dir(), fetch_ts);
let path_root = graph[proj_node].id();
let mut fetched = graph
.node_indices()
.map(|n| {
let pinned = &graph[n];
let manifest = &manifest_map[&pinned.id()];
let pkg = pinned.unpinned(manifest.dir());
(pkg, n)
})
.collect();
let mut visited = HashSet::default();
fetch_deps(
fetch_id,
offline,
proj_node,
path_root,
graph,
manifest_map,
&mut fetched,
&mut visited,
member_manifests,
)
}
#[allow(clippy::too_many_arguments)]
fn fetch_deps(
fetch_id: u64,
offline: bool,
node: NodeIx,
path_root: PinnedId,
graph: &mut Graph,
manifest_map: &mut ManifestMap,
fetched: &mut HashMap<Pkg, NodeIx>,
visited: &mut HashSet<NodeIx>,
member_manifests: &MemberManifestFiles,
) -> Result<HashSet<NodeIx>> {
let mut added = HashSet::default();
let parent_id = graph[node].id();
let package_manifest = &manifest_map[&parent_id];
let deps: Vec<(String, Dependency, DepKind)> = package_manifest
.contract_deps()
.map(|(n, d)| {
(
n.clone(),
d.dependency.clone(),
DepKind::Contract { salt: d.salt },
)
})
.chain(
package_manifest
.deps()
.map(|(n, d)| (n.clone(), d.clone(), DepKind::Library)),
)
.collect();
for (dep_name, dep, dep_kind) in deps {
let name = dep.package().unwrap_or(&dep_name).to_string();
let parent_manifest = &manifest_map[&parent_id];
let source = dep_to_source_patched(parent_manifest, &name, &dep, member_manifests)
.context("Failed to source dependency")?;
let dep_pkg = Pkg { name, source };
let dep_node = match fetched.entry(dep_pkg) {
hash_map::Entry::Occupied(entry) => *entry.get(),
hash_map::Entry::Vacant(entry) => {
let dep_pinned = pin_pkg(fetch_id, path_root, entry.key(), manifest_map, offline)?;
let dep_node = graph.add_node(dep_pinned);
added.insert(dep_node);
*entry.insert(dep_node)
}
};
let dep_edge = Edge::new(dep_name.to_string(), dep_kind.clone());
graph.update_edge(node, dep_node, dep_edge.clone());
if !visited.insert(dep_node) {
continue;
}
let dep_pinned = &graph[dep_node];
let dep_pkg_id = dep_pinned.id();
validate_dep_manifest(dep_pinned, &manifest_map[&dep_pkg_id], &dep_edge).map_err(|e| {
let parent = &graph[node];
anyhow!(
"dependency of {:?} named {:?} is invalid: {}",
parent.name,
dep_name,
e
)
})?;
let path_root = match dep_pinned.source {
SourcePinned::Member | SourcePinned::Git(_) | SourcePinned::Registry(_) => dep_pkg_id,
SourcePinned::Path(_) => path_root,
};
added.extend(fetch_deps(
fetch_id,
offline,
dep_node,
path_root,
graph,
manifest_map,
fetched,
visited,
member_manifests,
)?);
}
Ok(added)
}
fn git_repo_dir_name(name: &str, repo: &Url) -> String {
let repo_url_hash = hash_url(repo);
format!("{}-{:x}", name, repo_url_hash)
}
fn hash_url(url: &Url) -> u64 {
let mut hasher = hash_map::DefaultHasher::new();
url.hash(&mut hasher);
hasher.finish()
}
fn tmp_git_repo_dir(fetch_id: u64, name: &str, repo: &Url) -> PathBuf {
let repo_dir_name = format!("{:x}-{}", fetch_id, git_repo_dir_name(name, repo));
git_checkouts_directory().join("tmp").join(repo_dir_name)
}
fn git_ref_to_refspecs(reference: &GitReference) -> (Vec<String>, bool) {
let mut refspecs = vec![];
let mut tags = false;
match reference {
GitReference::Branch(s) => {
refspecs.push(format!(
"+refs/heads/{1}:refs/remotes/{0}/{1}",
DEFAULT_REMOTE_NAME, s
));
}
GitReference::Tag(s) => {
refspecs.push(format!(
"+refs/tags/{1}:refs/remotes/{0}/tags/{1}",
DEFAULT_REMOTE_NAME, s
));
}
GitReference::Rev(s) => {
if s.starts_with("refs/") {
refspecs.push(format!("+{0}:{0}", s));
} else {
refspecs.push(format!(
"+refs/heads/*:refs/remotes/{}/*",
DEFAULT_REMOTE_NAME
));
refspecs.push(format!("+HEAD:refs/remotes/{}/HEAD", DEFAULT_REMOTE_NAME));
tags = true;
}
}
GitReference::DefaultBranch => {
refspecs.push(format!("+HEAD:refs/remotes/{}/HEAD", DEFAULT_REMOTE_NAME));
}
}
(refspecs, tags)
}
fn with_tmp_git_repo<F, O>(fetch_id: u64, name: &str, source: &SourceGit, f: F) -> Result<O>
where
F: FnOnce(git2::Repository) -> Result<O>,
{
let repo_dir = tmp_git_repo_dir(fetch_id, name, &source.repo);
if repo_dir.exists() {
let _ = std::fs::remove_dir_all(&repo_dir);
}
let repo = git2::Repository::init(&repo_dir)
.map_err(|e| anyhow!("failed to init repo at \"{}\": {}", repo_dir.display(), e))?;
let (refspecs, tags) = git_ref_to_refspecs(&source.reference);
let mut fetch_opts = git2::FetchOptions::new();
if tags {
fetch_opts.download_tags(git2::AutotagOption::All);
}
repo.remote_anonymous(source.repo.as_str())?
.fetch(&refspecs, Some(&mut fetch_opts), None)
.with_context(|| {
format!(
"failed to fetch `{}`. Check your connection or run in `--offline` mode",
&source.repo
)
})?;
let output = f(repo)?;
let _ = std::fs::remove_dir_all(&repo_dir);
Ok(output)
}
pub fn pin_git(fetch_id: u64, name: &str, source: SourceGit) -> Result<SourceGitPinned> {
let commit_hash = with_tmp_git_repo(fetch_id, name, &source, |repo| {
let commit_id = source
.reference
.resolve(&repo)
.with_context(|| "failed to resolve reference".to_string())?;
Ok(format!("{}", commit_id))
})?;
Ok(SourceGitPinned {
source,
commit_hash,
})
}
fn pin_pkg(
fetch_id: u64,
path_root: PinnedId,
pkg: &Pkg,
manifest_map: &mut ManifestMap,
offline: bool,
) -> Result<Pinned> {
let name = pkg.name.clone();
let pinned = match &pkg.source {
Source::Member(path) => {
let source = SourcePinned::Member;
let pinned = Pinned { name, source };
let id = pinned.id();
let manifest = PackageManifestFile::from_dir(path)?;
manifest_map.insert(id, manifest);
pinned
}
Source::Path(path) => {
let path_pinned = SourcePathPinned { path_root };
let source = SourcePinned::Path(path_pinned);
let pinned = Pinned { name, source };
let id = pinned.id();
let manifest = PackageManifestFile::from_dir(path)?;
manifest_map.insert(id, manifest);
pinned
}
Source::Git(ref git_source) => {
let (pinned_git, repo_path) = if offline {
let (local_path, commit_hash) = search_git_source_locally(&name, git_source)?
.ok_or_else(|| {
anyhow!(
"Unable to fetch pkg {:?} from {:?} in offline mode",
name,
git_source.repo
)
})?;
let pinned_git = SourceGitPinned {
source: git_source.clone(),
commit_hash,
};
(pinned_git, local_path)
} else if let GitReference::DefaultBranch | GitReference::Branch(_) =
git_source.reference
{
let pinned_git = pin_git(fetch_id, &name, git_source.clone())?;
let repo_path =
git_commit_path(&name, &pinned_git.source.repo, &pinned_git.commit_hash);
(pinned_git, repo_path)
} else {
match search_git_source_locally(&name, git_source) {
Ok(Some((local_path, commit_hash))) => {
let pinned_git = SourceGitPinned {
source: git_source.clone(),
commit_hash,
};
(pinned_git, local_path)
}
_ => {
let pinned_git = pin_git(fetch_id, &name, git_source.clone())?;
let repo_path = git_commit_path(
&name,
&pinned_git.source.repo,
&pinned_git.commit_hash,
);
(pinned_git, repo_path)
}
}
};
let source = SourcePinned::Git(pinned_git.clone());
let pinned = Pinned { name, source };
let id = pinned.id();
if let hash_map::Entry::Vacant(entry) = manifest_map.entry(id) {
let mut lock = path_lock(&repo_path)?;
{
let _guard = lock.write()?;
if !repo_path.exists() {
info!(" Fetching {}", pinned_git.to_string());
fetch_git(fetch_id, &pinned.name, &pinned_git)?;
}
}
let path = {
let _guard = lock.read()?;
find_dir_within(&repo_path, &pinned.name).ok_or_else(|| {
anyhow!(
"failed to find package `{}` in {}",
pinned.name,
pinned_git.to_string()
)
})?
};
let manifest = PackageManifestFile::from_dir(&path)?;
entry.insert(manifest);
}
pinned
}
Source::Registry(ref _source) => {
if offline {
bail!("Unable to fetch pkg {:?} in offline mode", name);
}
bail!("registry dependencies are not yet supported");
}
};
Ok(pinned)
}
fn fd_lock_path(path: &Path) -> PathBuf {
const LOCKS_DIR_NAME: &str = ".locks";
const LOCK_EXT: &str = "forc-lock";
let mut hasher = hash_map::DefaultHasher::default();
path.hash(&mut hasher);
let hash = hasher.finish();
let file_name = match path.file_stem().and_then(|s| s.to_str()) {
None => format!("{:X}", hash),
Some(stem) => format!("{:X}-{}", hash, stem),
};
user_forc_directory()
.join(LOCKS_DIR_NAME)
.join(file_name)
.with_extension(LOCK_EXT)
}
fn path_lock(path: &Path) -> Result<fd_lock::RwLock<File>> {
let lock_path = fd_lock_path(path);
let lock_dir = lock_path
.parent()
.expect("lock path has no parent directory");
std::fs::create_dir_all(lock_dir).context("failed to create forc advisory lock directory")?;
let lock_file = File::create(&lock_path).context("failed to create advisory lock file")?;
Ok(fd_lock::RwLock::new(lock_file))
}
pub fn git_commit_path(name: &str, repo: &Url, commit_hash: &str) -> PathBuf {
let repo_dir_name = git_repo_dir_name(name, repo);
git_checkouts_directory()
.join(repo_dir_name)
.join(commit_hash)
}
pub fn fetch_git(fetch_id: u64, name: &str, pinned: &SourceGitPinned) -> Result<PathBuf> {
let path = git_commit_path(name, &pinned.source.repo, &pinned.commit_hash);
with_tmp_git_repo(fetch_id, name, &pinned.source, |repo| {
let id = git2::Oid::from_str(&pinned.commit_hash)?;
repo.set_head_detached(id)?;
if path.exists() {
let _ = fs::remove_dir_all(&path);
}
fs::create_dir_all(&path)?;
let mut checkout = git2::build::CheckoutBuilder::new();
checkout.force().target_dir(&path);
repo.checkout_head(Some(&mut checkout))?;
let current_head = repo.revparse_single("HEAD")?;
let head_commit = current_head
.as_commit()
.ok_or_else(|| anyhow!("Cannot get commit from {}", current_head.id().to_string()))?;
let head_time = head_commit.time().seconds();
let source_index = GitSourceIndex::new(
head_time,
pinned.source.reference.clone(),
pinned.commit_hash.clone(),
);
fs::write(
path.join(".forc_index"),
serde_json::to_string(&source_index)?,
)?;
Ok(())
})?;
Ok(path)
}
fn search_git_source_locally(
name: &str,
git_source: &SourceGit,
) -> Result<Option<(PathBuf, String)>> {
let checkouts_dir = git_checkouts_directory();
match &git_source.reference {
GitReference::Branch(branch) => {
let repos_from_branch = collect_local_repos_with_branch(checkouts_dir, name, branch)?;
let newest_branch_repo = repos_from_branch
.into_iter()
.max_by_key(|&(_, (_, time))| time)
.map(|(repo_path, (hash, _))| (repo_path, hash));
Ok(newest_branch_repo)
}
_ => find_exact_local_repo_with_reference(checkouts_dir, name, &git_source.reference),
}
}
fn collect_local_repos_with_branch(
checkouts_dir: PathBuf,
package_name: &str,
branch_name: &str,
) -> Result<Vec<(PathBuf, HeadWithTime)>> {
let mut list_of_repos = Vec::new();
with_search_checkouts(checkouts_dir, package_name, |repo_index, repo_dir_path| {
if let GitReference::Branch(branch) = repo_index.git_reference {
if branch == branch_name {
list_of_repos.push((repo_dir_path, repo_index.head_with_time));
}
}
Ok(())
})?;
Ok(list_of_repos)
}
fn find_exact_local_repo_with_reference(
checkouts_dir: PathBuf,
package_name: &str,
git_reference: &GitReference,
) -> Result<Option<(PathBuf, String)>> {
let mut found_local_repo = None;
if let GitReference::Tag(tag) = git_reference {
found_local_repo = find_repo_with_tag(tag, package_name, checkouts_dir)?;
} else if let GitReference::Rev(rev) = git_reference {
found_local_repo = find_repo_with_rev(rev, package_name, checkouts_dir)?;
}
Ok(found_local_repo)
}
fn find_repo_with_tag(
tag: &str,
package_name: &str,
checkouts_dir: PathBuf,
) -> Result<Option<(PathBuf, String)>> {
let mut found_local_repo = None;
with_search_checkouts(checkouts_dir, package_name, |repo_index, repo_dir_path| {
let current_head = repo_index.head_with_time.0;
if let GitReference::Tag(curr_repo_tag) = repo_index.git_reference {
if curr_repo_tag == tag {
found_local_repo = Some((repo_dir_path, current_head))
}
}
Ok(())
})?;
Ok(found_local_repo)
}
fn find_repo_with_rev(
rev: &str,
package_name: &str,
checkouts_dir: PathBuf,
) -> Result<Option<(PathBuf, String)>> {
let mut found_local_repo = None;
with_search_checkouts(checkouts_dir, package_name, |repo_index, repo_dir_path| {
let current_head = repo_index.head_with_time.0;
if let GitReference::Rev(curr_repo_rev) = repo_index.git_reference {
if curr_repo_rev == rev {
found_local_repo = Some((repo_dir_path, current_head));
}
}
Ok(())
})?;
Ok(found_local_repo)
}
fn with_search_checkouts<F>(checkouts_dir: PathBuf, package_name: &str, mut f: F) -> Result<()>
where
F: FnMut(GitSourceIndex, PathBuf) -> Result<()>,
{
for entry in fs::read_dir(checkouts_dir)? {
let entry = entry?;
let folder_name = entry
.file_name()
.into_string()
.map_err(|_| anyhow!("invalid folder name"))?;
if folder_name.starts_with(package_name) {
for repo_dir in fs::read_dir(entry.path())? {
let repo_dir = repo_dir
.map_err(|e| anyhow!("Cannot find local repo at checkouts dir {}", e))?;
if repo_dir.file_type()?.is_dir() {
let repo_dir_path = repo_dir.path();
if let Ok(index_file) = fs::read_to_string(repo_dir_path.join(".forc_index")) {
let index = serde_json::from_str(&index_file)?;
f(index, repo_dir_path)?;
}
}
}
}
}
Ok(())
}
fn dep_to_source(
pkg_path: &Path,
dep: &Dependency,
member_manifests: &MemberManifestFiles,
) -> Result<Source> {
let source = match dep {
Dependency::Simple(ref ver_str) => {
bail!(
"Unsupported dependency declaration in \"{}\": `{}` - \
currently only `git` and `path` dependencies are supported",
pkg_path.display(),
ver_str
)
}
Dependency::Detailed(ref det) => match (&det.path, &det.version, &det.git) {
(Some(relative_path), _, _) => {
let path = pkg_path.join(relative_path);
let canonical_path = path.canonicalize().map_err(|e| {
anyhow!("Failed to canonicalize dependency path {:?}: {}", path, e)
})?;
if member_manifests
.values()
.any(|pkg_manifest| pkg_manifest.dir() == canonical_path)
{
Source::Member(canonical_path)
} else {
Source::Path(canonical_path)
}
}
(_, _, Some(repo)) => {
let reference = match (&det.branch, &det.tag, &det.rev) {
(Some(branch), None, None) => GitReference::Branch(branch.clone()),
(None, Some(tag), None) => GitReference::Tag(tag.clone()),
(None, None, Some(rev)) => GitReference::Rev(rev.clone()),
(None, None, None) => GitReference::DefaultBranch,
_ => bail!(
"git dependencies support at most one reference: \
either `branch`, `tag` or `rev`"
),
};
let repo = Url::parse(repo)?;
let source = SourceGit { repo, reference };
Source::Git(source)
}
_ => {
bail!("unsupported set of fields for dependency: {:?}", dep);
}
},
};
Ok(source)
}
fn dep_source_patch<'manifest>(
manifest: &'manifest PackageManifestFile,
dep_name: &str,
dep_source: &Source,
) -> Option<&'manifest Dependency> {
if let Source::Git(git) = dep_source {
if let Some(patches) = manifest.patch(git.repo.as_str()) {
if let Some(patch) = patches.get(dep_name) {
return Some(patch);
}
}
}
None
}
fn apply_patch(
manifest: &PackageManifestFile,
dep_name: &str,
dep_source: &Source,
member_manifests: &MemberManifestFiles,
) -> Result<Source> {
match dep_source_patch(manifest, dep_name, dep_source) {
Some(patch) => dep_to_source(manifest.dir(), patch, member_manifests),
None => Ok(dep_source.clone()),
}
}
fn dep_to_source_patched(
manifest: &PackageManifestFile,
dep_name: &str,
dep: &Dependency,
member_manifests: &MemberManifestFiles,
) -> Result<Source> {
let unpatched = dep_to_source(manifest.dir(), dep, member_manifests)?;
apply_patch(manifest, dep_name, &unpatched, member_manifests)
}
pub fn sway_build_config(
manifest_dir: &Path,
entry_path: &Path,
build_target: BuildTarget,
build_profile: &BuildProfile,
) -> Result<sway_core::BuildConfig> {
let file_name = find_file_name(manifest_dir, entry_path)?;
let build_config = sway_core::BuildConfig::root_from_file_name_and_manifest_path(
file_name.to_path_buf(),
manifest_dir.to_path_buf(),
build_target,
)
.print_dca_graph(build_profile.print_dca_graph)
.print_finalized_asm(build_profile.print_finalized_asm)
.print_intermediate_asm(build_profile.print_intermediate_asm)
.print_ir(build_profile.print_ir)
.include_tests(build_profile.include_tests);
Ok(build_config)
}
pub fn dependency_namespace(
lib_namespace_map: &HashMap<NodeIx, namespace::Module>,
compiled_contract_deps: &HashMap<NodeIx, BuiltPackage>,
graph: &Graph,
node: NodeIx,
constants: BTreeMap<String, ConfigTimeConstant>,
engines: Engines<'_>,
) -> Result<namespace::Module, vec1::Vec1<CompileError>> {
let mut namespace = namespace::Module::default_with_constants(engines, constants)?;
let node_idx = &graph[node];
namespace.name = Some(Ident::new_no_span(Box::leak(
node_idx.name.clone().into_boxed_str(),
)));
let mut core_added = false;
for edge in graph.edges_directed(node, Direction::Outgoing) {
let dep_node = edge.target();
let dep_name = kebab_to_snake_case(&edge.weight().name);
let dep_edge = edge.weight();
let dep_namespace = match dep_edge.kind {
DepKind::Library => lib_namespace_map
.get(&dep_node)
.cloned()
.expect("no namespace module"),
DepKind::Contract { salt } => {
let mut constants = BTreeMap::default();
let compiled_dep = compiled_contract_deps.get(&dep_node);
let dep_contract_id = match compiled_dep {
Some(dep_contract_compiled) => contract_id(dep_contract_compiled, &salt),
None => ContractId::default(),
};
let contract_dep_constant_name = "CONTRACT_ID";
let contract_id_value = format!("0x{dep_contract_id}");
let contract_id_constant = ConfigTimeConstant {
r#type: "b256".to_string(),
value: contract_id_value,
public: true,
};
constants.insert(contract_dep_constant_name.to_string(), contract_id_constant);
namespace::Module::default_with_constants(engines, constants)?
}
};
namespace.insert_submodule(dep_name, dep_namespace);
let dep = &graph[dep_node];
if dep.name == CORE {
core_added = true;
}
}
if !core_added {
if let Some(core_node) = find_core_dep(graph, node) {
let core_namespace = &lib_namespace_map[&core_node];
namespace.insert_submodule(CORE.to_string(), core_namespace.clone());
}
}
namespace.star_import_with_reexports(&[CORE, PRELUDE].map(Ident::new_no_span), &[], engines);
if has_std_dep(graph, node) {
namespace.star_import_with_reexports(&[STD, PRELUDE].map(Ident::new_no_span), &[], engines);
}
Ok(namespace)
}
fn has_std_dep(graph: &Graph, node: NodeIx) -> bool {
let pkg = &graph[node];
if pkg.name == STD {
return false;
}
graph.edges_directed(node, Direction::Outgoing).any(|edge| {
let dep_node = edge.target();
let dep = &graph[dep_node];
matches!(&dep.name[..], STD)
})
}
fn find_core_dep(graph: &Graph, node: NodeIx) -> Option<NodeIx> {
let pkg = &graph[node];
if pkg.name == CORE {
return None;
}
let mut maybe_std = None;
for edge in graph.edges_directed(node, Direction::Outgoing) {
let dep_node = edge.target();
let dep = &graph[dep_node];
match &dep.name[..] {
CORE => return Some(dep_node),
STD => maybe_std = Some(dep_node),
_ => {}
}
}
if let Some(std) = maybe_std {
return find_core_dep(graph, std);
}
for dep_node in Dfs::new(graph, node).iter(graph) {
let dep = &graph[dep_node];
if dep.name == CORE {
return Some(dep_node);
}
}
None
}
pub fn compile_ast(
engines: Engines<'_>,
manifest: &PackageManifestFile,
build_target: BuildTarget,
build_profile: &BuildProfile,
namespace: namespace::Module,
) -> Result<CompileResult<ty::TyProgram>> {
let source = manifest.entry_string()?;
let sway_build_config = sway_build_config(
manifest.dir(),
&manifest.entry_path(),
build_target,
build_profile,
)?;
let ast_res = sway_core::compile_to_ast(engines, source, namespace, Some(&sway_build_config));
Ok(ast_res)
}
pub fn compile(
pkg: &Pinned,
manifest: &PackageManifestFile,
build_target: BuildTarget,
build_profile: &BuildProfile,
namespace: namespace::Module,
engines: Engines<'_>,
source_map: &mut SourceMap,
) -> Result<(BuiltPackage, namespace::Root)> {
macro_rules! time_expr {
($description:expr, $expression:expr) => {{
if build_profile.time_phases {
let expr_start = std::time::Instant::now();
let output = { $expression };
println!(
" Time elapsed to {}: {:?}",
$description,
expr_start.elapsed()
);
output
} else {
$expression
}
}};
}
let entry_path = manifest.entry_path();
let sway_build_config = time_expr!(
"produce `sway_core::BuildConfig`",
sway_build_config(manifest.dir(), &entry_path, build_target, build_profile)?
);
let terse_mode = build_profile.terse;
let fail = |warnings, errors| {
print_on_failure(terse_mode, warnings, errors);
bail!("Failed to compile {}", pkg.name);
};
let ast_res = time_expr!(
"compile to ast",
compile_ast(engines, manifest, build_target, build_profile, namespace)?
);
let typed_program = match ast_res.value.as_ref() {
None => return fail(&ast_res.warnings, &ast_res.errors),
Some(typed_program) => typed_program,
};
if build_profile.print_ast {
tracing::info!("{:#?}", typed_program);
}
let storage_slots = typed_program.storage_slots.clone();
let tree_type = typed_program.kind.tree_type();
let namespace = typed_program.root.namespace.clone().into();
if !ast_res.errors.is_empty() {
return fail(&ast_res.warnings, &ast_res.errors);
}
let asm_res = time_expr!(
"compile ast to asm",
sway_core::ast_to_asm(engines, &ast_res, &sway_build_config)
);
let mut json_abi_program = match build_target {
BuildTarget::Fuel => {
let mut types = vec![];
ProgramABI::Fuel(time_expr!(
"generate JSON ABI program",
typed_program.generate_json_abi_program(engines.te(), &mut types)
))
}
BuildTarget::EVM => match &asm_res.value {
Some(ref v) => v.0.abi.as_ref().unwrap().clone(),
None => todo!(),
},
};
let entries = asm_res
.value
.as_ref()
.map(|asm| asm.0.entries.clone())
.unwrap_or_default();
let bc_res = time_expr!(
"compile asm to bytecode",
sway_core::asm_to_bytecode(asm_res, source_map)
);
match bc_res.value {
Some(CompiledBytecode {
bytecode: bytes,
config_const_offsets: config_offsets,
}) if bc_res.errors.is_empty() => {
print_on_success(terse_mode, &pkg.name, &bc_res.warnings, &tree_type);
if let ProgramABI::Fuel(ref mut json_abi_program) = json_abi_program {
for (config, offset) in config_offsets {
if let Some(ref mut configurables) = json_abi_program.configurables {
if let Some(idx) = configurables.iter().position(|c| c.name == config) {
configurables[idx].offset = offset
}
}
}
}
let bytecode = bytes;
let built_package = BuiltPackage {
build_target,
json_abi_program,
storage_slots,
bytecode,
tree_type,
entries,
source_map: source_map.to_owned(),
pkg_name: pkg.name.clone(),
decl_engine: engines.de().clone(),
manifest_file: manifest.clone(),
};
Ok((built_package, namespace))
}
_ => fail(&bc_res.warnings, &bc_res.errors),
}
}
pub const SWAY_BIN_HASH_SUFFIX: &str = "-bin-hash";
pub const SWAY_BIN_ROOT_SUFFIX: &str = "-bin-root";
fn build_profile_from_opts(
build_profiles: &HashMap<String, BuildProfile>,
build_options: &BuildOpts,
) -> Result<(String, BuildProfile)> {
let BuildOpts {
pkg,
print,
build_profile,
release,
time_phases,
tests,
..
} = build_options;
let mut selected_build_profile = BuildProfile::DEBUG;
match &build_profile {
Some(build_profile) => {
if *release {
warn!(
"You specified both {} and 'release' profiles. Using the 'release' profile",
build_profile
);
selected_build_profile = BuildProfile::RELEASE;
} else {
selected_build_profile = build_profile;
}
}
None => {
if *release {
selected_build_profile = BuildProfile::RELEASE;
}
}
}
let mut profile = build_profiles
.get(selected_build_profile)
.cloned()
.unwrap_or_else(|| {
warn!(
"provided profile option {} is not present in the manifest file. \
Using default profile.",
selected_build_profile
);
Default::default()
});
profile.print_ast |= print.ast;
profile.print_dca_graph |= print.dca_graph;
profile.print_ir |= print.ir;
profile.print_finalized_asm |= print.finalized_asm;
profile.print_intermediate_asm |= print.intermediate_asm;
profile.terse |= pkg.terse;
profile.time_phases |= time_phases;
profile.include_tests |= tests;
Ok((selected_build_profile.to_string(), profile))
}
pub fn build_with_options(build_options: BuildOpts) -> Result<Built> {
let BuildOpts {
minify,
binary_outfile,
debug_outfile,
pkg,
inject_map,
build_target,
..
} = &build_options;
let current_dir = std::env::current_dir()?;
let path = &build_options
.pkg
.path
.as_ref()
.map(PathBuf::from)
.unwrap_or_else(|| current_dir);
let build_plan = BuildPlan::from_build_opts(&build_options)?;
let graph = build_plan.graph();
let manifest_map = build_plan.manifest_map();
let curr_manifest = manifest_map
.values()
.find(|&pkg_manifest| pkg_manifest.dir() == path);
let build_profiles: HashMap<String, BuildProfile> = build_plan.build_profiles().collect();
let (profile_name, build_profile) = build_profile_from_opts(&build_profiles, &build_options)?;
let outputs = match curr_manifest {
Some(pkg_manifest) => std::iter::once(
build_plan
.find_member_index(&pkg_manifest.project.name)
.ok_or_else(|| anyhow!("Cannot found project node in the graph"))?,
)
.collect(),
None => build_plan.member_nodes().collect(),
};
let mut built_workspace = HashMap::new();
let built_packages = build(
&build_plan,
*build_target,
&build_profile,
&outputs,
inject_map,
)?;
let output_dir = pkg.output_directory.as_ref().map(PathBuf::from);
for (node_ix, built_package) in built_packages.into_iter() {
let pinned = &graph[node_ix];
let pkg_manifest = manifest_map
.get(&pinned.id())
.ok_or_else(|| anyhow!("Couldn't find member manifest for {}", pinned.name))?;
let output_dir = output_dir
.clone()
.unwrap_or_else(|| default_output_directory(pkg_manifest.dir()).join(&profile_name));
if let Some(outfile) = &binary_outfile {
built_package.write_bytecode(outfile.as_ref())?;
}
if let Some(outfile) = &debug_outfile {
built_package.write_debug_info(outfile.as_ref())?;
}
built_package.write_output(minify.clone(), &pkg_manifest.project.name, &output_dir)?;
built_workspace.insert(pinned.name.clone(), built_package);
}
match curr_manifest {
Some(pkg_manifest) => {
let built_pkg = built_workspace
.remove(&pkg_manifest.project.name)
.expect("package didn't exist in workspace");
Ok(Built::Package(Box::new(built_pkg)))
}
None => Ok(Built::Workspace(built_workspace)),
}
}
fn contract_id(built_package: &BuiltPackage, salt: &fuel_tx::Salt) -> ContractId {
let contract = Contract::from(built_package.bytecode.clone());
let mut storage_slots = built_package.storage_slots.clone();
storage_slots.sort();
let state_root = Contract::initial_state_root(storage_slots.iter());
contract.id(salt, &contract.root(), &state_root)
}
fn validate_contract_deps(graph: &Graph) -> Result<()> {
for node in graph.node_indices() {
let pkg = &graph[node];
let name = pkg.name.clone();
let salt_declarations: HashSet<fuel_tx::Salt> = graph
.edges_directed(node, Direction::Incoming)
.filter_map(|e| match e.weight().kind {
DepKind::Library => None,
DepKind::Contract { salt } => Some(salt),
})
.collect();
if salt_declarations.len() > 1 {
bail!(
"There are conflicting salt declarations for contract dependency named: {}\nDeclared salts: {:?}",
name,
salt_declarations,
)
}
}
Ok(())
}
pub fn build(
plan: &BuildPlan,
target: BuildTarget,
profile: &BuildProfile,
outputs: &HashSet<NodeIx>,
inject_map: &ConstInjectionMap,
) -> anyhow::Result<Vec<(NodeIx, BuiltPackage)>> {
let mut built_packages = Vec::new();
let required: HashSet<NodeIx> = outputs
.iter()
.flat_map(|output_node| plan.node_deps(*output_node))
.collect();
let type_engine = TypeEngine::default();
let decl_engine = DeclEngine::default();
let engines = Engines::new(&type_engine, &decl_engine);
let mut lib_namespace_map = Default::default();
let mut compiled_contract_deps = HashMap::new();
for &node in plan
.compilation_order
.iter()
.filter(|node| required.contains(node))
{
let mut source_map = SourceMap::new();
let pkg = &plan.graph()[node];
let manifest = &plan.manifest_map()[&pkg.id()];
let constants = if let Some(injected_ctc) = inject_map.get(pkg) {
let mut constants = manifest.config_time_constants();
constants.extend(
injected_ctc
.iter()
.map(|(name, ctc)| (name.clone(), ctc.clone())),
);
constants
} else {
manifest.config_time_constants()
};
let dep_namespace = match dependency_namespace(
&lib_namespace_map,
&compiled_contract_deps,
&plan.graph,
node,
constants,
engines,
) {
Ok(o) => o,
Err(errs) => {
print_on_failure(profile.terse, &[], &errs);
bail!("Failed to compile {}", pkg.name);
}
};
let res = compile(
pkg,
manifest,
target,
profile,
dep_namespace,
engines,
&mut source_map,
)?;
let (mut built_package, namespace) = res;
if plan
.graph()
.edges_directed(node, Direction::Incoming)
.any(|e| matches!(e.weight().kind, DepKind::Contract { .. }))
{
compiled_contract_deps.insert(node, built_package.clone());
}
if let TreeType::Library { .. } = built_package.tree_type {
lib_namespace_map.insert(node, namespace.into());
}
source_map.insert_dependency(manifest.dir());
if let ProgramABI::Fuel(ref mut json_abi_program) = built_package.json_abi_program {
standardize_json_abi_types(json_abi_program);
}
if outputs.contains(&node) {
built_packages.push((node, built_package));
}
}
Ok(built_packages)
}
fn standardize_json_abi_types(json_abi_program: &mut program_abi::ProgramABI) {
loop {
let mut old_to_new_id: HashMap<usize, usize> = HashMap::new();
let mut deduped_types: Vec<program_abi::TypeDeclaration> = Vec::new();
for decl in json_abi_program.types.iter() {
if let Some(ty) = deduped_types.iter().find(|d| {
d.type_field == decl.type_field
&& d.components == decl.components
&& d.type_parameters == decl.type_parameters
}) {
old_to_new_id.insert(decl.type_id, ty.type_id);
} else {
deduped_types.push(decl.clone());
}
}
if old_to_new_id.is_empty() {
break;
}
json_abi_program.types = deduped_types;
update_all_types(json_abi_program, &old_to_new_id);
}
json_abi_program
.types
.sort_by(|t1, t2| t1.type_field.cmp(&t2.type_field));
let mut old_to_new_id: HashMap<usize, usize> = HashMap::new();
for (ix, decl) in json_abi_program.types.iter_mut().enumerate() {
old_to_new_id.insert(decl.type_id, ix);
decl.type_id = ix;
}
update_all_types(json_abi_program, &old_to_new_id);
}
fn update_all_types(
json_abi_program: &mut program_abi::ProgramABI,
old_to_new_id: &HashMap<usize, usize>,
) {
for func in json_abi_program.functions.iter_mut() {
for input in func.inputs.iter_mut() {
update_json_type_application(input, old_to_new_id);
}
update_json_type_application(&mut func.output, old_to_new_id);
}
for decl in json_abi_program.types.iter_mut() {
update_json_type_declaration(decl, old_to_new_id);
}
if let Some(logged_types) = &mut json_abi_program.logged_types {
for logged_type in logged_types.iter_mut() {
update_json_type_application(&mut logged_type.application, old_to_new_id);
}
}
if let Some(messages_types) = &mut json_abi_program.messages_types {
for logged_type in messages_types.iter_mut() {
update_json_type_application(&mut logged_type.application, old_to_new_id);
}
}
if let Some(configurables) = &mut json_abi_program.configurables {
for logged_type in configurables.iter_mut() {
update_json_type_application(&mut logged_type.application, old_to_new_id);
}
}
}
fn update_json_type_application(
type_application: &mut program_abi::TypeApplication,
old_to_new_id: &HashMap<usize, usize>,
) {
if let Some(new_id) = old_to_new_id.get(&type_application.type_id) {
type_application.type_id = *new_id;
}
if let Some(args) = &mut type_application.type_arguments {
for arg in args.iter_mut() {
update_json_type_application(arg, old_to_new_id);
}
}
}
fn update_json_type_declaration(
type_declaration: &mut program_abi::TypeDeclaration,
old_to_new_id: &HashMap<usize, usize>,
) {
if let Some(params) = &mut type_declaration.type_parameters {
for param in params.iter_mut() {
if let Some(new_id) = old_to_new_id.get(param) {
*param = *new_id;
}
}
}
if let Some(components) = &mut type_declaration.components {
for component in components.iter_mut() {
update_json_type_application(component, old_to_new_id);
}
}
}
pub struct Programs {
pub lexed: LexedProgram,
pub parsed: ParseProgram,
pub typed: Option<ty::TyProgram>,
}
impl Programs {
pub fn new(
lexed: LexedProgram,
parsed: ParseProgram,
typed: Option<ty::TyProgram>,
) -> Programs {
Programs {
lexed,
parsed,
typed,
}
}
}
pub fn check(
plan: &BuildPlan,
build_target: BuildTarget,
terse_mode: bool,
include_tests: bool,
engines: Engines<'_>,
) -> anyhow::Result<Vec<CompileResult<Programs>>> {
let mut lib_namespace_map = Default::default();
let mut source_map = SourceMap::new();
let compiled_contract_deps = HashMap::new();
let mut results = vec![];
for &node in plan.compilation_order.iter() {
let pkg = &plan.graph[node];
let manifest = &plan.manifest_map()[&pkg.id()];
let constants = manifest.config_time_constants();
let dep_namespace = dependency_namespace(
&lib_namespace_map,
&compiled_contract_deps,
&plan.graph,
node,
constants,
engines,
)
.expect("failed to create dependency namespace");
let CompileResult {
value,
mut warnings,
mut errors,
} = parse(manifest, build_target, terse_mode, include_tests, engines)?;
let (lexed, parsed) = match value {
None => {
results.push(CompileResult::new(None, warnings, errors));
return Ok(results);
}
Some(modules) => modules,
};
let ast_result = sway_core::parsed_to_ast(engines, &parsed, dep_namespace, None);
warnings.extend(ast_result.warnings);
errors.extend(ast_result.errors);
let typed_program = match ast_result.value {
None => {
let value = Some(Programs::new(lexed, parsed, None));
results.push(CompileResult::new(value, warnings, errors));
return Ok(results);
}
Some(typed_program) => typed_program,
};
if let TreeType::Library { .. } = typed_program.kind.tree_type() {
lib_namespace_map.insert(node, typed_program.root.namespace.clone());
}
source_map.insert_dependency(manifest.dir());
let value = Some(Programs::new(lexed, parsed, Some(typed_program)));
results.push(CompileResult::new(value, warnings, errors));
}
if results.is_empty() {
bail!("unable to check sway program: build plan contains no packages")
}
Ok(results)
}
pub fn parse(
manifest: &PackageManifestFile,
build_target: BuildTarget,
terse_mode: bool,
include_tests: bool,
engines: Engines<'_>,
) -> anyhow::Result<CompileResult<(LexedProgram, ParseProgram)>> {
let profile = BuildProfile {
terse: terse_mode,
..BuildProfile::debug()
};
let source = manifest.entry_string()?;
let sway_build_config = sway_build_config(
manifest.dir(),
&manifest.entry_path(),
build_target,
&profile,
)?
.include_tests(include_tests);
Ok(sway_core::parse(source, engines, Some(&sway_build_config)))
}
pub fn find_within(dir: &Path, pkg_name: &str) -> Option<PathBuf> {
walkdir::WalkDir::new(dir)
.into_iter()
.filter_map(Result::ok)
.filter(|entry| entry.path().ends_with(constants::MANIFEST_FILE_NAME))
.find_map(|entry| {
let path = entry.path();
let manifest = PackageManifest::from_file(path).ok()?;
if manifest.project.name == pkg_name {
Some(path.to_path_buf())
} else {
None
}
})
}
pub fn find_dir_within(dir: &Path, pkg_name: &str) -> Option<PathBuf> {
find_within(dir, pkg_name).and_then(|path| path.parent().map(Path::to_path_buf))
}
#[test]
fn test_root_pkg_order() {
let current_dir = env!("CARGO_MANIFEST_DIR");
let manifest_dir = PathBuf::from(current_dir)
.parent()
.unwrap()
.join("test/src/e2e_vm_tests/test_programs/should_pass/forc/workspace_building/");
let manifest_file = ManifestFile::from_dir(&manifest_dir).unwrap();
let member_manifests = manifest_file.member_manifests().unwrap();
let lock_path = manifest_file.lock_path().unwrap();
let build_plan =
BuildPlan::from_lock_and_manifests(&lock_path, &member_manifests, false, false).unwrap();
let graph = build_plan.graph();
let order: Vec<String> = build_plan
.member_nodes()
.map(|order| graph[order].name.clone())
.collect();
assert_eq!(order, vec!["test_lib", "test_contract", "test_script"])
}
#[test]
fn test_source_git_pinned_parsing() {
let strings = [
"git+https://github.com/foo/bar?branch=baz#64092602dd6158f3e41d775ed889389440a2cd86",
"git+https://github.com/fuellabs/sway-lib-std?tag=v0.1.0#0000000000000000000000000000000000000000",
"git+https://github.com/fuellabs/sway-lib-core?tag=v0.0.1#0000000000000000000000000000000000000000",
"git+https://some-git-host.com/owner/repo?rev#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"git+https://some-git-host.com/owner/repo?default-branch#AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
];
let expected = [
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://github.com/foo/bar").unwrap(),
reference: GitReference::Branch("baz".to_string()),
},
commit_hash: "64092602dd6158f3e41d775ed889389440a2cd86".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://github.com/fuellabs/sway-lib-std").unwrap(),
reference: GitReference::Tag("v0.1.0".to_string()),
},
commit_hash: "0000000000000000000000000000000000000000".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://github.com/fuellabs/sway-lib-core").unwrap(),
reference: GitReference::Tag("v0.0.1".to_string()),
},
commit_hash: "0000000000000000000000000000000000000000".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://some-git-host.com/owner/repo").unwrap(),
reference: GitReference::Rev(
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".to_string(),
),
},
commit_hash: "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://some-git-host.com/owner/repo").unwrap(),
reference: GitReference::DefaultBranch,
},
commit_hash: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA".to_string(),
},
];
for (&string, expected) in strings.iter().zip(&expected) {
let parsed = SourceGitPinned::from_str(string).unwrap();
assert_eq!(&parsed, expected);
let serialized = expected.to_string();
assert_eq!(&serialized, string);
}
}
pub fn manifest_file_missing(dir: &Path) -> anyhow::Error {
let message = format!(
"could not find `{}` in `{}` or any parent directory",
constants::MANIFEST_FILE_NAME,
dir.display()
);
Error::msg(message)
}
pub fn parsing_failed(project_name: &str, errors: Vec<CompileError>) -> anyhow::Error {
let error = errors
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<String>>()
.join("\n");
let message = format!("Parsing {} failed: \n{}", project_name, error);
Error::msg(message)
}
pub fn wrong_program_type(
project_name: &str,
expected_types: Vec<TreeType>,
parse_type: TreeType,
) -> anyhow::Error {
let message = format!(
"{} is not a '{:?}' it is a '{:?}'",
project_name, expected_types, parse_type
);
Error::msg(message)
}
pub fn fuel_core_not_running(node_url: &str) -> anyhow::Error {
let message = format!("could not get a response from node at the URL {}. Start a node with `fuel-core`. See https://github.com/FuelLabs/fuel-core#running for more information", node_url);
Error::msg(message)
}