use crate::{
lock::Lock,
manifest::{BuildProfile, ConfigTimeConstant, Dependency, Manifest, ManifestFile},
CORE, STD,
};
use anyhow::{anyhow, bail, Context, Error, Result};
use forc_util::{
default_output_directory, find_file_name, git_checkouts_directory, kebab_to_snake_case,
print_on_failure, print_on_success, print_on_success_library,
};
use fuel_gql_client::fuel_tx::{Contract, StorageSlot};
use petgraph::{
self,
visit::{Bfs, Dfs, EdgeRef, Walker},
Directed, Direction,
};
use serde::{Deserialize, Serialize};
use std::{
collections::{hash_map, BTreeMap, BTreeSet, HashMap, HashSet},
fmt,
fs::{self, File},
hash::{Hash, Hasher},
path::{Path, PathBuf},
str::FromStr,
};
use sway_core::{
semantic_analysis::namespace, source_map::SourceMap, BytecodeCompilationResult,
CompileAstResult, CompileError, CompileResult, ParseProgram, TreeType,
};
use sway_types::{JsonABIProgram, JsonTypeApplication, JsonTypeDeclaration};
use sway_utils::constants;
use tracing::{info, warn};
use url::Url;
type GraphIx = u32;
type Node = Pinned;
type Edge = DependencyName;
pub type Graph = petgraph::stable_graph::StableGraph<Node, Edge, Directed, GraphIx>;
pub type EdgeIx = petgraph::graph::EdgeIndex<GraphIx>;
pub type NodeIx = petgraph::graph::NodeIndex<GraphIx>;
pub type ManifestMap = HashMap<PinnedId, ManifestFile>;
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct PinnedId(u64);
pub struct Compiled {
pub json_abi_program: JsonABIProgram,
pub storage_slots: Vec<StorageSlot>,
pub bytecode: Vec<u8>,
pub tree_type: TreeType,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Pkg {
pub name: String,
pub source: Source,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct Pinned {
pub name: String,
pub source: SourcePinned,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub enum Source {
Root(PathBuf),
Git(SourceGit),
Path(PathBuf),
Registry(SourceRegistry),
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct SourceGit {
pub repo: Url,
pub reference: GitReference,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub enum GitReference {
Branch(String),
Tag(String),
Rev(String),
DefaultBranch,
}
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct SourceRegistry {
pub version: semver::Version,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct SourceGitPinned {
pub source: SourceGit,
pub commit_hash: String,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct SourcePathPinned {
pub path_root: PinnedId,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub struct SourceRegistryPinned {
pub source: SourceRegistry,
pub version: semver::Version,
}
#[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)]
pub enum SourcePinned {
Root,
Git(SourceGitPinned),
Path(SourcePathPinned),
Registry(SourceRegistryPinned),
}
#[derive(Clone)]
pub struct BuildPlan {
graph: Graph,
manifest_map: ManifestMap,
compilation_order: Vec<NodeIx>,
}
#[derive(Clone, Debug)]
pub struct PinnedIdParseError;
#[derive(Clone, Debug)]
pub struct SourcePathPinnedParseError;
#[derive(Clone, Debug)]
pub enum SourceGitPinnedParseError {
Prefix,
Url,
Reference,
CommitHash,
}
#[derive(Clone, Debug)]
pub struct SourcePinnedParseError;
pub type DependencyName = String;
impl BuildPlan {
pub fn from_manifest(manifest: &ManifestFile, offline: bool) -> Result<Self> {
validate_version(manifest)?;
let mut graph = Graph::default();
let mut manifest_map = ManifestMap::default();
fetch_graph(manifest, offline, &mut graph, &mut manifest_map)?;
validate_graph(&graph, manifest);
let compilation_order = compilation_order(&graph)?;
Ok(Self {
graph,
manifest_map,
compilation_order,
})
}
pub fn from_lock_and_manifest(
manifest: &ManifestFile,
locked: bool,
offline: bool,
) -> Result<Self> {
validate_version(manifest)?;
let mut new_lock_cause = None;
let lock_path = forc_util::lock_path(manifest.dir());
let lock = Lock::from_path(&lock_path).unwrap_or_else(|e| {
new_lock_cause = if e.to_string().contains("No such file or directory") {
Some(anyhow!("lock file did not exist"))
} else {
Some(e)
};
Lock::default()
});
let mut graph = lock.to_graph().unwrap_or_else(|e| {
new_lock_cause = Some(anyhow!("Invalid lock: {}", e));
Graph::default()
});
let invalid_deps = validate_graph(&graph, manifest);
remove_deps(&mut graph, &manifest.project.name, &invalid_deps);
let mut manifest_map = graph_to_manifest_map(manifest.clone(), &graph)?;
let _added = fetch_graph(manifest, offline, &mut graph, &mut manifest_map)?;
let compilation_order = compilation_order(&graph)?;
let plan = Self {
graph,
manifest_map,
compilation_order,
};
let new_lock = Lock::from_graph(plan.graph());
let lock_diff = new_lock.diff(&lock);
if !lock_diff.removed.is_empty() || !lock_diff.added.is_empty() {
new_lock_cause.get_or_insert(anyhow!("lock file did not match manifest"));
}
if let Some(cause) = new_lock_cause {
if locked {
bail!(
"The lock file {} needs to be updated (Cause: {}) \
but --locked was passed to prevent this.",
lock_path.to_string_lossy(),
cause,
);
}
info!(" Creating a new `Forc.lock` file. (Cause: {})", cause);
crate::lock::print_diff(&manifest.project.name, &lock_diff);
let string = toml::ser::to_string_pretty(&new_lock)
.map_err(|e| anyhow!("failed to serialize lock file: {}", e))?;
fs::write(&lock_path, &string)
.map_err(|e| anyhow!("failed to write lock file: {}", e))?;
info!(" Created new lock file at {}", lock_path.display());
}
Ok(plan)
}
pub fn graph(&self) -> &Graph {
&self.graph
}
pub fn manifest_map(&self) -> &ManifestMap {
&self.manifest_map
}
pub fn compilation_order(&self) -> &[NodeIx] {
&self.compilation_order
}
}
fn potential_proj_nodes<'a>(g: &'a Graph, proj_name: &'a str) -> impl 'a + Iterator<Item = NodeIx> {
g.node_indices()
.filter(|&n| g.edges_directed(n, Direction::Incoming).next().is_none())
.filter(move |&n| g[n].name == proj_name)
}
fn find_proj_node(graph: &Graph, proj_name: &str) -> Result<NodeIx> {
let mut potentials = potential_proj_nodes(graph, proj_name);
let proj_node = potentials
.next()
.ok_or_else(|| anyhow!("graph contains no project node"))?;
match potentials.next() {
None => Ok(proj_node),
Some(_) => Err(anyhow!("graph contains more than one project node")),
}
}
fn validate_version(pkg_manifest: &ManifestFile) -> Result<()> {
match &pkg_manifest.project.forc_version {
Some(min_forc_version) => {
let crate_version = env!("CARGO_PKG_VERSION");
let toolchain_version = semver::Version::parse(crate_version)?;
if toolchain_version < *min_forc_version {
bail!(
"{:?} requires forc version {} but current forc version is {}\nUpdate the toolchain by following: https://fuellabs.github.io/sway/v{}/introduction/installation.html",
pkg_manifest.project.name,
min_forc_version,
crate_version,
crate_version
);
}
}
None => {}
}
Ok(())
}
fn validate_graph(graph: &Graph, proj_manifest: &ManifestFile) -> BTreeSet<EdgeIx> {
let proj_node = match find_proj_node(graph, &proj_manifest.project.name) {
Ok(node) => node,
Err(_) => return graph.edge_indices().collect(),
};
let mut visited = HashSet::new();
validate_deps(graph, proj_node, proj_manifest, &mut visited)
}
fn validate_deps(
graph: &Graph,
node: NodeIx,
node_manifest: &ManifestFile,
visited: &mut HashSet<NodeIx>,
) -> BTreeSet<EdgeIx> {
let mut remove = BTreeSet::default();
for edge in graph.edges_directed(node, Direction::Outgoing) {
let dep_name = edge.weight();
let dep_node = edge.target();
match validate_dep(graph, node_manifest, dep_name, dep_node) {
Err(_) => {
remove.insert(edge.id());
}
Ok(dep_manifest) => {
if visited.insert(dep_node) {
let rm = validate_deps(graph, dep_node, &dep_manifest, visited);
remove.extend(rm);
}
continue;
}
}
}
remove
}
fn validate_dep(
graph: &Graph,
node_manifest: &ManifestFile,
dep_name: &str,
dep_node: NodeIx,
) -> Result<ManifestFile> {
let dep_path = dep_path(graph, node_manifest, dep_name, dep_node).map_err(|e| {
anyhow!(
"failed to construct path for dependency {:?}: {}",
dep_name,
e
)
})?;
let dep_manifest = ManifestFile::from_dir(&dep_path)?;
let dep_entry = node_manifest
.dep(dep_name)
.ok_or_else(|| anyhow!("no entry in parent manifest"))?;
let dep_source = dep_to_source_patched(node_manifest, dep_name, dep_entry)?;
let dep_pkg = graph[dep_node].unpinned(&dep_path);
if dep_pkg.source != dep_source {
bail!("dependency node's source does not match manifest entry");
}
validate_dep_manifest(&graph[dep_node], &dep_manifest)?;
Ok(dep_manifest)
}
fn validate_dep_manifest(dep: &Pinned, dep_manifest: &ManifestFile) -> Result<()> {
if !matches!(dep_manifest.program_type()?, TreeType::Library { .. }) {
bail!(
"\"{}\" is not a library! Depending on a non-library package is not supported.",
dep.name
);
}
if dep.name != dep_manifest.project.name {
bail!(
"dependency name {:?} must match the manifest project name {:?} \
unless `package = {:?}` is specified in the dependency declaration",
dep.name,
dep_manifest.project.name,
dep_manifest.project.name,
);
}
validate_version(dep_manifest)?;
Ok(())
}
fn dep_path(
graph: &Graph,
node_manifest: &ManifestFile,
dep_name: &str,
dep_node: NodeIx,
) -> Result<PathBuf> {
let dep = &graph[dep_node];
match &dep.source {
SourcePinned::Git(git) => {
let repo_path = git_commit_path(&dep.name, &git.source.repo, &git.commit_hash);
find_dir_within(&repo_path, &dep.name).ok_or_else(|| {
anyhow!(
"failed to find package `{}` in {}",
dep.name,
git.to_string()
)
})
}
SourcePinned::Path(src) => {
validate_path_root(graph, dep_node, src.path_root)?;
if let Some(path) = node_manifest.dep_path(dep_name) {
if path.exists() {
return Ok(path);
}
}
for (_, patch_map) in node_manifest.patches() {
if let Some(Dependency::Detailed(details)) = patch_map.get(dep_name) {
if let Some(ref rel_path) = details.path {
if let Ok(path) = node_manifest.dir().join(rel_path).canonicalize() {
if path.exists() {
return Ok(path);
}
}
}
}
}
bail!(
"no dependency or patch with name {:?} in manifest of {:?}",
dep_name,
node_manifest.project.name
)
}
SourcePinned::Registry(_reg) => unreachable!("registry dependencies not yet supported"),
SourcePinned::Root => unreachable!("a `Root` node cannot be a dependency"),
}
}
fn remove_deps(graph: &mut Graph, proj_name: &str, edges_to_remove: &BTreeSet<EdgeIx>) {
let proj_node = match find_proj_node(graph, proj_name) {
Ok(node) => node,
Err(_) => {
graph.clear();
return;
}
};
let node_removal_order = match petgraph::algo::toposort(&*graph, None) {
Ok(nodes) => nodes,
Err(_) => {
graph.clear();
return;
}
};
for &edge in edges_to_remove {
graph.remove_edge(edge);
}
let mut nodes = node_removal_order.into_iter();
assert_eq!(nodes.next(), Some(proj_node));
for node in nodes {
if !has_parent(graph, node) {
graph.remove_node(node);
}
}
}
fn has_parent(graph: &Graph, node: NodeIx) -> bool {
graph
.edges_directed(node, Direction::Incoming)
.next()
.is_some()
}
impl GitReference {
pub fn resolve(&self, repo: &git2::Repository) -> Result<git2::Oid> {
fn resolve_tag(repo: &git2::Repository, tag: &str) -> Result<git2::Oid> {
let refname = format!("refs/remotes/origin/tags/{}", tag);
let id = repo.refname_to_id(&refname)?;
let obj = repo.find_object(id, None)?;
let obj = obj.peel(git2::ObjectType::Commit)?;
Ok(obj.id())
}
fn resolve_branch(repo: &git2::Repository, branch: &str) -> Result<git2::Oid> {
let name = format!("origin/{}", branch);
let b = repo
.find_branch(&name, git2::BranchType::Remote)
.with_context(|| format!("failed to find branch `{}`", branch))?;
b.get()
.target()
.ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", branch))
}
fn resolve_default_branch(repo: &git2::Repository) -> Result<git2::Oid> {
let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?;
let head = repo.find_object(head_id, None)?;
Ok(head.peel(git2::ObjectType::Commit)?.id())
}
fn resolve_rev(repo: &git2::Repository, rev: &str) -> Result<git2::Oid> {
let obj = repo.revparse_single(rev)?;
match obj.as_tag() {
Some(tag) => Ok(tag.target_id()),
None => Ok(obj.id()),
}
}
match self {
GitReference::Tag(s) => {
resolve_tag(repo, s).with_context(|| format!("failed to find tag `{}`", s))
}
GitReference::Branch(s) => resolve_branch(repo, s),
GitReference::DefaultBranch => resolve_default_branch(repo),
GitReference::Rev(s) => resolve_rev(repo, s),
}
}
}
impl Pinned {
pub fn id(&self) -> PinnedId {
PinnedId::new(&self.name, &self.source)
}
pub fn unpinned(&self, path: &Path) -> Pkg {
let source = match &self.source {
SourcePinned::Root => Source::Root(path.to_owned()),
SourcePinned::Git(git) => Source::Git(git.source.clone()),
SourcePinned::Path(_) => Source::Path(path.to_owned()),
SourcePinned::Registry(reg) => Source::Registry(reg.source.clone()),
};
let name = self.name.clone();
Pkg { name, source }
}
}
impl PinnedId {
pub fn new(name: &str, source: &SourcePinned) -> Self {
let mut hasher = hash_map::DefaultHasher::default();
name.hash(&mut hasher);
source.hash(&mut hasher);
Self(hasher.finish())
}
}
impl SourcePathPinned {
pub const PREFIX: &'static str = "path";
}
impl SourceGitPinned {
pub const PREFIX: &'static str = "git";
}
impl fmt::Display for PinnedId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:016X}", self.0)
}
}
impl fmt::Display for SourcePathPinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}+from-root-{}", Self::PREFIX, self.path_root)
}
}
impl fmt::Display for SourceGitPinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}+{}?{}#{}",
Self::PREFIX,
self.source.repo,
self.source.reference,
self.commit_hash
)
}
}
impl fmt::Display for GitReference {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
GitReference::Branch(ref s) => write!(f, "branch={}", s),
GitReference::Tag(ref s) => write!(f, "tag={}", s),
GitReference::Rev(ref _s) => write!(f, "rev"),
GitReference::DefaultBranch => write!(f, "default-branch"),
}
}
}
impl fmt::Display for SourcePinned {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SourcePinned::Root => write!(f, "root"),
SourcePinned::Path(src) => src.fmt(f),
SourcePinned::Git(src) => src.fmt(f),
SourcePinned::Registry(_reg) => unimplemented!("pkg registries not yet implemented"),
}
}
}
impl FromStr for PinnedId {
type Err = PinnedIdParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(
u64::from_str_radix(s, 16).map_err(|_| PinnedIdParseError)?,
))
}
}
impl FromStr for SourcePathPinned {
type Err = SourcePathPinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim();
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus) != Some(0) {
return Err(SourcePathPinnedParseError);
}
let s = &s[prefix_plus.len()..];
let path_root = s
.split("from-root-")
.nth(1)
.ok_or(SourcePathPinnedParseError)?
.parse()
.map_err(|_| SourcePathPinnedParseError)?;
Ok(Self { path_root })
}
}
impl FromStr for SourceGitPinned {
type Err = SourceGitPinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = s.trim();
let prefix_plus = format!("{}+", Self::PREFIX);
if s.find(&prefix_plus) != Some(0) {
return Err(SourceGitPinnedParseError::Prefix);
}
let s = &s[prefix_plus.len()..];
let repo_str = s.split('?').next().ok_or(SourceGitPinnedParseError::Url)?;
let repo = Url::parse(repo_str).map_err(|_| SourceGitPinnedParseError::Url)?;
let s = &s[repo_str.len() + "?".len()..];
let mut s_iter = s.split('#');
let reference = s_iter.next().ok_or(SourceGitPinnedParseError::Reference)?;
let commit_hash = s_iter
.next()
.ok_or(SourceGitPinnedParseError::CommitHash)?
.to_string();
validate_git_commit_hash(&commit_hash)
.map_err(|_| SourceGitPinnedParseError::CommitHash)?;
const BRANCH: &str = "branch=";
const TAG: &str = "tag=";
let reference = if reference.find(BRANCH) == Some(0) {
GitReference::Branch(reference[BRANCH.len()..].to_string())
} else if reference.find(TAG) == Some(0) {
GitReference::Tag(reference[TAG.len()..].to_string())
} else if reference == "rev" {
GitReference::Rev(commit_hash.to_string())
} else if reference == "default-branch" {
GitReference::DefaultBranch
} else {
return Err(SourceGitPinnedParseError::Reference);
};
let source = SourceGit { repo, reference };
Ok(Self {
source,
commit_hash,
})
}
}
impl FromStr for SourcePinned {
type Err = SourcePinnedParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let source = if s == "root" {
SourcePinned::Root
} else if let Ok(src) = SourcePathPinned::from_str(s) {
SourcePinned::Path(src)
} else if let Ok(src) = SourceGitPinned::from_str(s) {
SourcePinned::Git(src)
} else {
return Err(SourcePinnedParseError);
};
Ok(source)
}
}
fn validate_git_commit_hash(commit_hash: &str) -> Result<()> {
const LEN: usize = 40;
if commit_hash.len() != LEN {
bail!(
"invalid hash length: expected {}, found {}",
LEN,
commit_hash.len()
);
}
if !commit_hash.chars().all(|c| c.is_ascii_alphanumeric()) {
bail!("hash contains one or more non-ascii-alphanumeric characters");
}
Ok(())
}
impl Default for GitReference {
fn default() -> Self {
Self::DefaultBranch
}
}
pub fn compilation_order(graph: &Graph) -> Result<Vec<NodeIx>> {
let rev_pkg_graph = petgraph::visit::Reversed(&graph);
petgraph::algo::toposort(rev_pkg_graph, None).map_err(|_| {
let scc = petgraph::algo::kosaraju_scc(&graph);
let mut path = String::new();
scc.iter()
.filter(|path| path.len() > 1)
.for_each(|cyclic_path| {
let starting_node = &graph[*cyclic_path.last().unwrap()];
path.push_str(&starting_node.name.to_string());
path.push_str(" -> ");
for (node_index, node) in cyclic_path.iter().enumerate() {
path.push_str(&graph[*node].name.to_string());
if node_index != cyclic_path.len() - 1 {
path.push_str(" -> ");
}
}
path.push('\n');
});
anyhow!("dependency cycle detected: {}", path)
})
}
fn graph_to_manifest_map(proj_manifest: ManifestFile, graph: &Graph) -> Result<ManifestMap> {
let mut manifest_map = ManifestMap::new();
let proj_node = match find_proj_node(graph, &proj_manifest.project.name) {
Ok(node) => node,
Err(_) => return Ok(manifest_map),
};
let proj_id = graph[proj_node].id();
manifest_map.insert(proj_id, proj_manifest);
let mut bfs = Bfs::new(graph, proj_node);
bfs.next(graph);
while let Some(dep_node) = bfs.next(graph) {
let (parent_manifest, dep_name) = graph
.edges_directed(dep_node, Direction::Incoming)
.filter_map(|edge| {
let parent_node = edge.source();
let dep_name = edge.weight();
let parent = &graph[parent_node];
let parent_manifest = manifest_map.get(&parent.id())?;
Some((parent_manifest, dep_name))
})
.next()
.ok_or_else(|| anyhow!("more than one root package detected in graph"))?;
let dep_path = dep_path(graph, parent_manifest, dep_name, dep_node).map_err(|e| {
anyhow!(
"failed to construct path for dependency {:?}: {}",
dep_name,
e
)
})?;
let dep_manifest = ManifestFile::from_dir(&dep_path)?;
let dep = &graph[dep_node];
manifest_map.insert(dep.id(), dep_manifest);
}
Ok(manifest_map)
}
fn validate_path_root(graph: &Graph, path_dep: NodeIx, path_root: PinnedId) -> Result<()> {
let path_root_node = find_path_root(graph, path_dep)?;
if graph[path_root_node].id() != path_root {
bail!(
"invalid `path_root` for path dependency package {:?}",
&graph[path_dep].name
)
}
Ok(())
}
fn find_path_root(graph: &Graph, mut node: NodeIx) -> Result<NodeIx> {
loop {
let pkg = &graph[node];
match &pkg.source {
SourcePinned::Path(src) => {
let parent = graph
.edges_directed(node, Direction::Incoming)
.next()
.map(|edge| edge.source())
.ok_or_else(|| {
anyhow!(
"Failed to find path root: `path` dependency \"{}\" has no parent",
src
)
})?;
node = parent;
}
SourcePinned::Git(_) | SourcePinned::Registry(_) | SourcePinned::Root => {
return Ok(node);
}
}
}
}
pub fn fetch_id(path: &Path, timestamp: std::time::Instant) -> u64 {
let mut hasher = hash_map::DefaultHasher::new();
path.hash(&mut hasher);
timestamp.hash(&mut hasher);
hasher.finish()
}
fn fetch_graph(
proj_manifest: &ManifestFile,
offline: bool,
graph: &mut Graph,
manifest_map: &mut ManifestMap,
) -> Result<HashSet<NodeIx>> {
let proj_node = match find_proj_node(graph, &proj_manifest.project.name) {
Ok(proj_node) => proj_node,
Err(_) => {
let name = proj_manifest.project.name.clone();
let source = SourcePinned::Root;
let pkg = Pinned { name, source };
let pkg_id = pkg.id();
manifest_map.insert(pkg_id, proj_manifest.clone());
graph.add_node(pkg)
}
};
let fetch_ts = std::time::Instant::now();
let fetch_id = fetch_id(proj_manifest.dir(), fetch_ts);
let path_root = graph[proj_node].id();
let mut fetched = graph
.node_indices()
.map(|n| {
let pinned = &graph[n];
let manifest = &manifest_map[&pinned.id()];
let pkg = pinned.unpinned(manifest.dir());
(pkg, n)
})
.collect();
let mut visited = HashSet::default();
fetch_deps(
fetch_id,
offline,
proj_node,
path_root,
graph,
manifest_map,
&mut fetched,
&mut visited,
)
}
#[allow(clippy::too_many_arguments)]
fn fetch_deps(
fetch_id: u64,
offline: bool,
node: NodeIx,
path_root: PinnedId,
graph: &mut Graph,
manifest_map: &mut ManifestMap,
fetched: &mut HashMap<Pkg, NodeIx>,
visited: &mut HashSet<NodeIx>,
) -> Result<HashSet<NodeIx>> {
let mut added = HashSet::default();
let parent_id = graph[node].id();
let deps: Vec<_> = manifest_map[&parent_id]
.deps()
.map(|(n, d)| (n.clone(), d.clone()))
.collect();
for (dep_name, dep) in deps {
let name = dep.package().unwrap_or(&dep_name).to_string();
let source = dep_to_source_patched(&manifest_map[&parent_id], &name, &dep)
.context("Failed to source dependency")?;
let dep_pkg = Pkg { name, source };
let dep_node = match fetched.entry(dep_pkg) {
hash_map::Entry::Occupied(entry) => *entry.get(),
hash_map::Entry::Vacant(entry) => {
let dep_pinned = pin_pkg(fetch_id, path_root, entry.key(), manifest_map, offline)?;
let dep_node = graph.add_node(dep_pinned);
added.insert(dep_node);
*entry.insert(dep_node)
}
};
graph.update_edge(node, dep_node, dep_name.to_string());
if !visited.insert(dep_node) {
continue;
}
let dep_pinned = &graph[dep_node];
let dep_pkg_id = dep_pinned.id();
validate_dep_manifest(dep_pinned, &manifest_map[&dep_pkg_id]).map_err(|e| {
let parent = &graph[node];
anyhow!(
"dependency of {:?} named {:?} is invalid: {}",
parent.name,
dep_name,
e
)
})?;
let path_root = match dep_pinned.source {
SourcePinned::Root | SourcePinned::Git(_) | SourcePinned::Registry(_) => dep_pkg_id,
SourcePinned::Path(_) => path_root,
};
added.extend(fetch_deps(
fetch_id,
offline,
dep_node,
path_root,
graph,
manifest_map,
fetched,
visited,
)?);
}
Ok(added)
}
fn git_repo_dir_name(name: &str, repo: &Url) -> String {
let repo_url_hash = hash_url(repo);
format!("{}-{:x}", name, repo_url_hash)
}
fn hash_url(url: &Url) -> u64 {
let mut hasher = hash_map::DefaultHasher::new();
url.hash(&mut hasher);
hasher.finish()
}
fn tmp_git_repo_dir(fetch_id: u64, name: &str, repo: &Url) -> PathBuf {
let repo_dir_name = format!("{:x}-{}", fetch_id, git_repo_dir_name(name, repo));
git_checkouts_directory().join("tmp").join(repo_dir_name)
}
fn git_ref_to_refspecs(reference: &GitReference) -> (Vec<String>, bool) {
let mut refspecs = vec![];
let mut tags = false;
match reference {
GitReference::Branch(s) => {
refspecs.push(format!("+refs/heads/{0}:refs/remotes/origin/{0}", s));
}
GitReference::Tag(s) => {
refspecs.push(format!("+refs/tags/{0}:refs/remotes/origin/tags/{0}", s));
}
GitReference::Rev(s) => {
if s.starts_with("refs/") {
refspecs.push(format!("+{0}:{0}", s));
} else {
refspecs.push("+refs/heads/*:refs/remotes/origin/*".to_string());
refspecs.push("+HEAD:refs/remotes/origin/HEAD".to_string());
tags = true;
}
}
GitReference::DefaultBranch => {
refspecs.push("+HEAD:refs/remotes/origin/HEAD".to_string());
}
}
(refspecs, tags)
}
fn with_tmp_git_repo<F, O>(fetch_id: u64, name: &str, source: &SourceGit, f: F) -> Result<O>
where
F: FnOnce(git2::Repository) -> Result<O>,
{
let repo_dir = tmp_git_repo_dir(fetch_id, name, &source.repo);
if repo_dir.exists() {
let _ = std::fs::remove_dir_all(&repo_dir);
}
let repo = git2::Repository::init(&repo_dir)
.map_err(|e| anyhow!("failed to init repo at \"{}\": {}", repo_dir.display(), e))?;
let (refspecs, tags) = git_ref_to_refspecs(&source.reference);
let mut fetch_opts = git2::FetchOptions::new();
if tags {
fetch_opts.download_tags(git2::AutotagOption::All);
}
repo.remote_anonymous(source.repo.as_str())?
.fetch(&refspecs, Some(&mut fetch_opts), None)
.with_context(|| format!("failed to fetch `{}`", &source.repo))?;
let output = f(repo)?;
let _ = std::fs::remove_dir_all(&repo_dir);
Ok(output)
}
pub fn pin_git(fetch_id: u64, name: &str, source: SourceGit) -> Result<SourceGitPinned> {
let commit_hash = with_tmp_git_repo(fetch_id, name, &source, |repo| {
let commit_id = source
.reference
.resolve(&repo)
.with_context(|| "failed to resolve reference".to_string())?;
Ok(format!("{}", commit_id))
})?;
Ok(SourceGitPinned {
source,
commit_hash,
})
}
fn pin_pkg(
fetch_id: u64,
path_root: PinnedId,
pkg: &Pkg,
manifest_map: &mut ManifestMap,
offline: bool,
) -> Result<Pinned> {
let name = pkg.name.clone();
let pinned = match &pkg.source {
Source::Root(path) => {
let source = SourcePinned::Root;
let pinned = Pinned { name, source };
let id = pinned.id();
let manifest = ManifestFile::from_dir(path)?;
manifest_map.insert(id, manifest);
pinned
}
Source::Path(path) => {
let path_pinned = SourcePathPinned { path_root };
let source = SourcePinned::Path(path_pinned);
let pinned = Pinned { name, source };
let id = pinned.id();
let manifest = ManifestFile::from_dir(path)?;
manifest_map.insert(id, manifest);
pinned
}
Source::Git(ref git_source) => {
if offline {
bail!(
"Unable to fetch pkg {:?} from {:?} in offline mode",
name,
git_source.repo
);
}
let pinned_git = pin_git(fetch_id, &name, git_source.clone())?;
let repo_path =
git_commit_path(&name, &pinned_git.source.repo, &pinned_git.commit_hash);
let source = SourcePinned::Git(pinned_git.clone());
let pinned = Pinned { name, source };
let id = pinned.id();
if let hash_map::Entry::Vacant(entry) = manifest_map.entry(id) {
if !repo_path.exists() {
info!(" Fetching {}", pinned_git.to_string());
fetch_git(fetch_id, &pinned.name, &pinned_git)?;
}
let path = find_dir_within(&repo_path, &pinned.name).ok_or_else(|| {
anyhow!(
"failed to find package `{}` in {}",
pinned.name,
pinned_git.to_string()
)
})?;
let manifest = ManifestFile::from_dir(&path)?;
entry.insert(manifest);
}
pinned
}
Source::Registry(ref _source) => {
if offline {
bail!("Unable to fetch pkg {:?} in offline mode", name);
}
bail!("registry dependencies are not yet supported");
}
};
Ok(pinned)
}
pub fn git_commit_path(name: &str, repo: &Url, commit_hash: &str) -> PathBuf {
let repo_dir_name = git_repo_dir_name(name, repo);
git_checkouts_directory()
.join(repo_dir_name)
.join(commit_hash)
}
pub fn fetch_git(fetch_id: u64, name: &str, pinned: &SourceGitPinned) -> Result<PathBuf> {
let path = git_commit_path(name, &pinned.source.repo, &pinned.commit_hash);
with_tmp_git_repo(fetch_id, name, &pinned.source, |repo| {
let id = git2::Oid::from_str(&pinned.commit_hash)?;
repo.set_head_detached(id)?;
if path.exists() {
let _ = std::fs::remove_dir_all(&path);
}
std::fs::create_dir_all(&path)?;
let mut checkout = git2::build::CheckoutBuilder::new();
checkout.force().target_dir(&path);
repo.checkout_head(Some(&mut checkout))?;
Ok(())
})?;
Ok(path)
}
fn dep_to_source(pkg_path: &Path, dep: &Dependency) -> Result<Source> {
let source = match dep {
Dependency::Simple(ref ver_str) => {
bail!(
"Unsupported dependency declaration in \"{}\": `{}` - \
currently only `git` and `path` dependencies are supported",
pkg_path.display(),
ver_str
)
}
Dependency::Detailed(ref det) => match (&det.path, &det.version, &det.git) {
(Some(relative_path), _, _) => {
let path = pkg_path.join(relative_path);
let canonical_path = path.canonicalize().map_err(|e| {
anyhow!("Failed to canonicalize dependency path {:?}: {}", path, e)
})?;
Source::Path(canonical_path)
}
(_, _, Some(repo)) => {
let reference = match (&det.branch, &det.tag, &det.rev) {
(Some(branch), None, None) => GitReference::Branch(branch.clone()),
(None, Some(tag), None) => GitReference::Tag(tag.clone()),
(None, None, Some(rev)) => GitReference::Rev(rev.clone()),
(None, None, None) => GitReference::DefaultBranch,
_ => bail!(
"git dependencies support at most one reference: \
either `branch`, `tag` or `rev`"
),
};
let repo = Url::parse(repo)?;
let source = SourceGit { repo, reference };
Source::Git(source)
}
_ => {
bail!("unsupported set of fields for dependency: {:?}", dep);
}
},
};
Ok(source)
}
fn dep_source_patch<'manifest>(
manifest: &'manifest ManifestFile,
dep_name: &str,
dep_source: &Source,
) -> Option<&'manifest Dependency> {
if let Source::Git(git) = dep_source {
if let Some(patches) = manifest.patch(git.repo.as_str()) {
if let Some(patch) = patches.get(dep_name) {
return Some(patch);
}
}
}
None
}
fn apply_patch(manifest: &ManifestFile, dep_name: &str, dep_source: &Source) -> Result<Source> {
match dep_source_patch(manifest, dep_name, dep_source) {
Some(patch) => dep_to_source(manifest.dir(), patch),
None => Ok(dep_source.clone()),
}
}
fn dep_to_source_patched(
manifest: &ManifestFile,
dep_name: &str,
dep: &Dependency,
) -> Result<Source> {
let unpatched = dep_to_source(manifest.dir(), dep)?;
apply_patch(manifest, dep_name, &unpatched)
}
pub fn sway_build_config(
manifest_dir: &Path,
entry_path: &Path,
build_profile: &BuildProfile,
) -> Result<sway_core::BuildConfig> {
let file_name = find_file_name(manifest_dir, entry_path)?;
let build_config = sway_core::BuildConfig::root_from_file_name_and_manifest_path(
file_name.to_path_buf(),
manifest_dir.to_path_buf(),
)
.print_finalized_asm(build_profile.print_finalized_asm)
.print_intermediate_asm(build_profile.print_intermediate_asm)
.print_ir(build_profile.print_ir)
.generate_logged_types(build_profile.generate_logged_types);
Ok(build_config)
}
pub fn dependency_namespace(
namespace_map: &HashMap<NodeIx, namespace::Module>,
graph: &Graph,
node: NodeIx,
constants: BTreeMap<String, ConfigTimeConstant>,
) -> Result<namespace::Module, vec1::Vec1<CompileError>> {
let mut namespace = namespace::Module::default_with_constants(constants)?;
let mut core_added = false;
for edge in graph.edges_directed(node, Direction::Outgoing) {
let dep_node = edge.target();
let dep_namespace = &namespace_map[&dep_node];
let dep_name = kebab_to_snake_case(edge.weight());
namespace.insert_submodule(dep_name, dep_namespace.clone());
let dep = &graph[dep_node];
if dep.name == CORE {
core_added = true;
}
}
if !core_added {
if let Some(core_node) = find_core_dep(graph, node) {
let core_namespace = &namespace_map[&core_node];
namespace.insert_submodule(CORE.to_string(), core_namespace.clone());
}
}
Ok(namespace)
}
fn find_core_dep(graph: &Graph, node: NodeIx) -> Option<NodeIx> {
let pkg = &graph[node];
if pkg.name == CORE {
return None;
}
let mut maybe_std = None;
for edge in graph.edges_directed(node, Direction::Outgoing) {
let dep_node = edge.target();
let dep = &graph[dep_node];
match &dep.name[..] {
CORE => return Some(dep_node),
STD => maybe_std = Some(dep_node),
_ => (),
}
}
if let Some(std) = maybe_std {
return find_core_dep(graph, std);
}
for dep_node in Dfs::new(graph, node).iter(graph) {
let dep = &graph[dep_node];
if dep.name == CORE {
return Some(dep_node);
}
}
None
}
pub fn compile_ast(
manifest: &ManifestFile,
build_profile: &BuildProfile,
namespace: namespace::Module,
) -> Result<CompileAstResult> {
let source = manifest.entry_string()?;
let sway_build_config =
sway_build_config(manifest.dir(), &manifest.entry_path(), build_profile)?;
let ast_res = sway_core::compile_to_ast(source, namespace, Some(&sway_build_config));
Ok(ast_res)
}
pub fn compile(
pkg: &Pinned,
manifest: &ManifestFile,
build_profile: &BuildProfile,
namespace: namespace::Module,
source_map: &mut SourceMap,
) -> Result<(Compiled, Option<namespace::Root>)> {
macro_rules! time_expr {
($description:expr, $expression:expr) => {{
if build_profile.time_phases {
let expr_start = std::time::Instant::now();
let output = { $expression };
println!(
" Time elapsed to {}: {:?}",
$description,
expr_start.elapsed()
);
output
} else {
$expression
}
}};
}
let entry_path = manifest.entry_path();
let sway_build_config = time_expr!(
"produce `sway_core::BuildConfig`",
sway_build_config(manifest.dir(), &entry_path, build_profile,)?
);
let silent_mode = build_profile.silent;
let ast_res = time_expr!(
"compile to ast",
compile_ast(manifest, build_profile, namespace,)?
);
match &ast_res {
CompileAstResult::Failure { warnings, errors } => {
print_on_failure(silent_mode, warnings, errors);
bail!("Failed to compile {}", pkg.name);
}
CompileAstResult::Success {
typed_program,
warnings,
} => {
if build_profile.print_ast {
tracing::info!("{:#?}", typed_program);
}
let mut types = vec![];
let json_abi_program = time_expr!(
"generate JSON ABI program",
typed_program.generate_json_abi_program(&mut types)
);
let storage_slots = typed_program.storage_slots.clone();
let tree_type = typed_program.kind.tree_type();
match tree_type {
TreeType::Library { .. } => {
print_on_success_library(silent_mode, &pkg.name, warnings);
let bytecode = vec![];
let lib_namespace = typed_program.root.namespace.clone();
let compiled = Compiled {
json_abi_program,
storage_slots,
bytecode,
tree_type,
};
Ok((compiled, Some(lib_namespace.into())))
}
TreeType::Contract | TreeType::Predicate | TreeType::Script => {
let asm_res = time_expr!(
"compile ast to asm",
sway_core::ast_to_asm(ast_res, &sway_build_config)
);
let bc_res = time_expr!(
"compile asm to bytecode",
sway_core::asm_to_bytecode(asm_res, source_map)
);
match bc_res {
BytecodeCompilationResult::Success { bytes, warnings } => {
print_on_success(silent_mode, &pkg.name, &warnings, &tree_type);
let bytecode = bytes;
let compiled = Compiled {
json_abi_program,
storage_slots,
bytecode,
tree_type,
};
Ok((compiled, None))
}
BytecodeCompilationResult::Library { .. } => {
unreachable!("compilation of library program types is handled above")
}
BytecodeCompilationResult::Failure { errors, warnings } => {
print_on_failure(silent_mode, &warnings, &errors);
bail!("Failed to compile {}", pkg.name);
}
}
}
}
}
}
}
#[derive(Default)]
pub struct BuildOptions {
pub path: Option<String>,
pub print_ast: bool,
pub print_finalized_asm: bool,
pub print_intermediate_asm: bool,
pub print_ir: bool,
pub binary_outfile: Option<String>,
pub debug_outfile: Option<String>,
pub offline_mode: bool,
pub silent_mode: bool,
pub output_directory: Option<String>,
pub minify_json_abi: bool,
pub minify_json_storage_slots: bool,
pub locked: bool,
pub build_profile: Option<String>,
pub release: bool,
pub time_phases: bool,
pub generate_logged_types: bool,
}
pub const SWAY_BIN_HASH_SUFFIX: &str = "-bin-hash";
pub const SWAY_BIN_ROOT_SUFFIX: &str = "-bin-root";
pub fn build_with_options(build_options: BuildOptions) -> Result<Compiled> {
let key_debug: String = "debug".to_string();
let key_release: String = "release".to_string();
let BuildOptions {
path,
binary_outfile,
debug_outfile,
print_ast,
print_finalized_asm,
print_intermediate_asm,
print_ir,
offline_mode,
silent_mode,
output_directory,
minify_json_abi,
minify_json_storage_slots,
locked,
build_profile,
release,
time_phases,
generate_logged_types,
} = build_options;
let mut selected_build_profile = key_debug;
match &build_profile {
Some(build_profile) => {
if release {
warn!(
"You specified both {} and 'release' profiles. Using the 'release' profile",
build_profile
);
selected_build_profile = key_release;
} else {
selected_build_profile = build_profile.clone();
}
}
None => {
if release {
selected_build_profile = key_release;
}
}
}
let this_dir = if let Some(ref path) = path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};
let manifest = ManifestFile::from_dir(&this_dir)?;
let plan = BuildPlan::from_lock_and_manifest(&manifest, locked, offline_mode)?;
let mut profile = manifest
.build_profile(&selected_build_profile)
.cloned()
.unwrap_or_else(|| {
warn!(
"provided profile option {} is not present in the manifest file. \
Using default profile.",
selected_build_profile
);
Default::default()
});
profile.print_ast |= print_ast;
profile.print_ir |= print_ir;
profile.print_finalized_asm |= print_finalized_asm;
profile.print_intermediate_asm |= print_intermediate_asm;
profile.silent |= silent_mode;
profile.time_phases |= time_phases;
profile.generate_logged_types |= generate_logged_types;
let (compiled, source_map) = build(&plan, &profile)?;
if let Some(outfile) = binary_outfile {
fs::write(&outfile, &compiled.bytecode)?;
}
if let Some(outfile) = debug_outfile {
let source_map_json = serde_json::to_vec(&source_map).expect("JSON serialization failed");
fs::write(outfile, &source_map_json)?;
}
let output_dir = output_directory
.map(PathBuf::from)
.unwrap_or_else(|| default_output_directory(manifest.dir()).join(selected_build_profile));
if !output_dir.exists() {
fs::create_dir_all(&output_dir)?;
}
let bin_path = output_dir
.join(&manifest.project.name)
.with_extension("bin");
fs::write(&bin_path, &compiled.bytecode)?;
if !compiled.json_abi_program.functions.is_empty() {
let json_abi_program_stem = format!("{}-abi", manifest.project.name);
let json_abi_program_path = output_dir
.join(&json_abi_program_stem)
.with_extension("json");
let file = File::create(json_abi_program_path)?;
let res = if minify_json_abi {
serde_json::to_writer(&file, &compiled.json_abi_program)
} else {
serde_json::to_writer_pretty(&file, &compiled.json_abi_program)
};
res?;
}
info!(" Bytecode size is {} bytes.", compiled.bytecode.len());
match compiled.tree_type {
TreeType::Contract => {
let json_storage_slots_stem = format!("{}-storage_slots", manifest.project.name);
let json_storage_slots_path = output_dir
.join(&json_storage_slots_stem)
.with_extension("json");
let file = File::create(json_storage_slots_path)?;
let res = if minify_json_storage_slots {
serde_json::to_writer(&file, &compiled.storage_slots)
} else {
serde_json::to_writer_pretty(&file, &compiled.storage_slots)
};
res?;
}
TreeType::Predicate => {
let root = format!("0x{}", Contract::root_from_code(&compiled.bytecode));
let root_file_name = format!("{}{}", &manifest.project.name, SWAY_BIN_ROOT_SUFFIX);
let root_path = output_dir.join(root_file_name);
fs::write(root_path, &root)?;
info!(" Predicate root: {}", root);
}
TreeType::Script => {
let bytecode_hash = format!("0x{}", fuel_crypto::Hasher::hash(&compiled.bytecode));
let hash_file_name = format!("{}{}", &manifest.project.name, SWAY_BIN_HASH_SUFFIX);
let hash_path = output_dir.join(hash_file_name);
fs::write(hash_path, &bytecode_hash)?;
info!(" Script bytecode hash: {}", bytecode_hash);
}
_ => (),
}
Ok(compiled)
}
pub fn build(plan: &BuildPlan, profile: &BuildProfile) -> anyhow::Result<(Compiled, SourceMap)> {
sway_core::clear_lazy_statics();
let mut namespace_map = Default::default();
let mut source_map = SourceMap::new();
let mut json_abi_program = JsonABIProgram {
types: vec![],
functions: vec![],
logged_types: vec![],
};
let mut storage_slots = vec![];
let mut bytecode = vec![];
let mut tree_type = None;
for &node in &plan.compilation_order {
let pkg = &plan.graph()[node];
let manifest = &plan.manifest_map()[&pkg.id()];
let constants = manifest.config_time_constants();
let dep_namespace = match dependency_namespace(&namespace_map, &plan.graph, node, constants)
{
Ok(o) => o,
Err(errs) => {
print_on_failure(profile.silent, &[], &errs);
bail!("Failed to compile {}", pkg.name);
}
};
let res = compile(pkg, manifest, profile, dep_namespace, &mut source_map)?;
let (compiled, maybe_namespace) = res;
if let Some(namespace) = maybe_namespace {
namespace_map.insert(node, namespace.into());
}
json_abi_program
.types
.extend(compiled.json_abi_program.types);
json_abi_program
.functions
.extend(compiled.json_abi_program.functions);
json_abi_program
.logged_types
.extend(compiled.json_abi_program.logged_types);
storage_slots.extend(compiled.storage_slots);
bytecode = compiled.bytecode;
tree_type = Some(compiled.tree_type);
source_map.insert_dependency(manifest.dir());
}
standardize_json_abi_types(&mut json_abi_program);
let tree_type =
tree_type.ok_or_else(|| anyhow!("build plan must contain at least one package"))?;
let compiled = Compiled {
bytecode,
json_abi_program,
storage_slots,
tree_type,
};
Ok((compiled, source_map))
}
fn standardize_json_abi_types(json_abi_program: &mut JsonABIProgram) {
loop {
let mut old_to_new_id: HashMap<usize, usize> = HashMap::new();
let mut types_set: HashSet<JsonTypeDeclaration> = HashSet::new();
for decl in json_abi_program.types.iter_mut() {
if let Some(ty) = types_set.get(decl) {
old_to_new_id.insert(decl.type_id, ty.type_id);
} else {
types_set.insert(decl.clone());
}
}
if old_to_new_id.is_empty() {
break;
}
let mut filtered_types = vec![];
for t in json_abi_program.types.iter() {
if let Some(ty) = types_set.get(t) {
if ty.type_id == t.type_id {
filtered_types.push((*ty).clone());
types_set.remove(t);
}
}
}
json_abi_program.types = filtered_types;
update_all_types(json_abi_program, &old_to_new_id);
}
json_abi_program
.types
.sort_by(|t1, t2| t1.type_field.cmp(&t2.type_field));
let mut old_to_new_id: HashMap<usize, usize> = HashMap::new();
for (ix, decl) in json_abi_program.types.iter_mut().enumerate() {
old_to_new_id.insert(decl.type_id, ix);
decl.type_id = ix;
}
update_all_types(json_abi_program, &old_to_new_id);
}
fn update_all_types(json_abi_program: &mut JsonABIProgram, old_to_new_id: &HashMap<usize, usize>) {
for func in json_abi_program.functions.iter_mut() {
for input in func.inputs.iter_mut() {
update_json_type_application(input, old_to_new_id);
}
update_json_type_application(&mut func.output, old_to_new_id);
}
for decl in json_abi_program.types.iter_mut() {
update_json_type_declaration(decl, old_to_new_id);
}
for logged_type in json_abi_program.logged_types.iter_mut() {
update_json_type_application(&mut logged_type.logged_type, old_to_new_id);
}
}
fn update_json_type_application(
type_application: &mut JsonTypeApplication,
old_to_new_id: &HashMap<usize, usize>,
) {
if let Some(new_id) = old_to_new_id.get(&type_application.type_id) {
type_application.type_id = *new_id;
}
if let Some(args) = &mut type_application.type_arguments {
for arg in args.iter_mut() {
update_json_type_application(arg, old_to_new_id);
}
}
}
fn update_json_type_declaration(
type_declaration: &mut JsonTypeDeclaration,
old_to_new_id: &HashMap<usize, usize>,
) {
if let Some(params) = &mut type_declaration.type_parameters {
for param in params.iter_mut() {
if let Some(new_id) = old_to_new_id.get(param) {
*param = *new_id;
}
}
}
if let Some(components) = &mut type_declaration.components {
for component in components.iter_mut() {
update_json_type_application(component, old_to_new_id);
}
}
}
pub fn check(
plan: &BuildPlan,
silent_mode: bool,
) -> anyhow::Result<(CompileResult<ParseProgram>, CompileAstResult)> {
sway_core::clear_lazy_statics();
let mut namespace_map = Default::default();
let mut source_map = SourceMap::new();
for (i, &node) in plan.compilation_order.iter().enumerate() {
let pkg = &plan.graph[node];
let manifest = &plan.manifest_map()[&pkg.id()];
let constants = manifest.config_time_constants();
let dep_namespace =
dependency_namespace(&namespace_map, &plan.graph, node, constants).expect("TODO");
let parsed_result = parse(manifest, silent_mode)?;
let parse_program = match &parsed_result.value {
None => bail!("unable to parse"),
Some(program) => program,
};
let ast_result = sway_core::parsed_to_ast(parse_program, dep_namespace, false);
let typed_program = match &ast_result {
CompileAstResult::Failure { .. } => bail!("unable to type check"),
CompileAstResult::Success { typed_program, .. } => typed_program,
};
if let TreeType::Library { .. } = typed_program.kind.tree_type() {
namespace_map.insert(node, typed_program.root.namespace.clone());
}
source_map.insert_dependency(manifest.dir());
if i == plan.compilation_order.len() - 1 {
return Ok((parsed_result, ast_result));
}
}
bail!("unable to check sway program: build plan contains no packages")
}
pub fn parse(
manifest: &ManifestFile,
silent_mode: bool,
) -> anyhow::Result<CompileResult<ParseProgram>> {
let profile = BuildProfile {
silent: silent_mode,
..BuildProfile::debug()
};
let source = manifest.entry_string()?;
let sway_build_config = sway_build_config(manifest.dir(), &manifest.entry_path(), &profile)?;
Ok(sway_core::parse(source, Some(&sway_build_config)))
}
pub fn find_within(dir: &Path, pkg_name: &str) -> Option<PathBuf> {
walkdir::WalkDir::new(dir)
.into_iter()
.filter_map(Result::ok)
.filter(|entry| entry.path().ends_with(constants::MANIFEST_FILE_NAME))
.find_map(|entry| {
let path = entry.path();
let manifest = Manifest::from_file(path).ok()?;
if manifest.project.name == pkg_name {
Some(path.to_path_buf())
} else {
None
}
})
}
pub fn find_dir_within(dir: &Path, pkg_name: &str) -> Option<PathBuf> {
find_within(dir, pkg_name).and_then(|path| path.parent().map(Path::to_path_buf))
}
#[test]
fn test_source_git_pinned_parsing() {
let strings = [
"git+https://github.com/foo/bar?branch=baz#64092602dd6158f3e41d775ed889389440a2cd86",
"git+https://github.com/fuellabs/sway-lib-std?tag=v0.1.0#0000000000000000000000000000000000000000",
"git+https://github.com/fuellabs/sway-lib-core?tag=v0.0.1#0000000000000000000000000000000000000000",
"git+https://some-git-host.com/owner/repo?rev#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"git+https://some-git-host.com/owner/repo?default-branch#AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
];
let expected = [
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://github.com/foo/bar").unwrap(),
reference: GitReference::Branch("baz".to_string()),
},
commit_hash: "64092602dd6158f3e41d775ed889389440a2cd86".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://github.com/fuellabs/sway-lib-std").unwrap(),
reference: GitReference::Tag("v0.1.0".to_string()),
},
commit_hash: "0000000000000000000000000000000000000000".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://github.com/fuellabs/sway-lib-core").unwrap(),
reference: GitReference::Tag("v0.0.1".to_string()),
},
commit_hash: "0000000000000000000000000000000000000000".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://some-git-host.com/owner/repo").unwrap(),
reference: GitReference::Rev(
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".to_string(),
),
},
commit_hash: "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".to_string(),
},
SourceGitPinned {
source: SourceGit {
repo: Url::parse("https://some-git-host.com/owner/repo").unwrap(),
reference: GitReference::DefaultBranch,
},
commit_hash: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA".to_string(),
},
];
for (&string, expected) in strings.iter().zip(&expected) {
let parsed = SourceGitPinned::from_str(string).unwrap();
assert_eq!(&parsed, expected);
let serialized = expected.to_string();
assert_eq!(&serialized, string);
}
}
pub fn manifest_file_missing(dir: &Path) -> anyhow::Error {
let message = format!(
"could not find `{}` in `{}` or any parent directory",
constants::MANIFEST_FILE_NAME,
dir.display()
);
Error::msg(message)
}
pub fn parsing_failed(project_name: &str, errors: Vec<CompileError>) -> anyhow::Error {
let error = errors
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<String>>()
.join("\n");
let message = format!("Parsing {} failed: \n{}", project_name, error);
Error::msg(message)
}
pub fn wrong_program_type(
project_name: &str,
expected_types: Vec<TreeType>,
parse_type: TreeType,
) -> anyhow::Error {
let message = format!(
"{} is not a '{:?}' it is a '{:?}'",
project_name, expected_types, parse_type
);
Error::msg(message)
}
pub fn fuel_core_not_running(node_url: &str) -> anyhow::Error {
let message = format!("could not get a response from node at the URL {}. Start a node with `fuel-core`. See https://github.com/FuelLabs/fuel-core#running for more information", node_url);
Error::msg(message)
}