#![allow(clippy::doc_overindented_list_items)]
use core::fmt;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::sync::LazyLock;
use nonempty::NonEmpty;
use serde::{Deserialize, Serialize};
use serde_json as json;
use thiserror::Error;
use crate::git;
use crate::git::canonical;
use crate::git::canonical::Canonical;
use crate::git::fmt::refspec::QualifiedPattern;
use crate::git::fmt::Qualified;
use crate::git::fmt::{refname, RefString};
use crate::identity::{doc, Did};
const ASTERISK: char = '*';
static REFS_RAD: LazyLock<RefString> = LazyLock::new(|| refname!("refs/rad"));
trait Sealed {}
impl Sealed for Allowed {}
impl Sealed for usize {}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(into = "QualifiedPattern", try_from = "QualifiedPattern")]
pub struct Pattern(QualifiedPattern<'static>);
impl fmt::Display for Pattern {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.0.as_str())
}
}
impl From<Pattern> for QualifiedPattern<'static> {
fn from(Pattern(pattern): Pattern) -> Self {
pattern
}
}
impl<'a> TryFrom<QualifiedPattern<'a>> for Pattern {
type Error = PatternError;
fn try_from(pattern: QualifiedPattern<'a>) -> Result<Self, Self::Error> {
if pattern.starts_with(REFS_RAD.as_str()) {
Err(PatternError::ProtectedRef {
prefix: (*REFS_RAD).clone(),
pattern: pattern.to_owned(),
})
} else {
Ok(Self(pattern.to_owned()))
}
}
}
impl<'a> TryFrom<Qualified<'a>> for Pattern {
type Error = PatternError;
fn try_from(name: Qualified<'a>) -> Result<Self, Self::Error> {
Self::try_from(QualifiedPattern::from(name))
}
}
impl Pattern {
pub fn matches(&self, refname: &Qualified) -> bool {
let spec = match self.0.as_str().split_once(ASTERISK) {
None => self.0.to_string(),
Some((prefix, "")) => {
let mut spec = prefix.to_string();
spec.push_str("**/*");
spec
}
Some((prefix, suffix)) => {
let mut spec = prefix.to_string();
spec.push_str("**");
spec.push_str(suffix);
spec
}
};
fast_glob::glob_match(&spec, refname.as_str())
}
}
impl AsRef<QualifiedPattern<'static>> for Pattern {
fn as_ref(&self) -> &QualifiedPattern<'static> {
&self.0
}
}
impl PartialOrd for Pattern {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Pattern {
fn cmp(&self, other: &Self) -> Ordering {
#[derive(Debug, Clone, Copy)]
#[repr(i8)]
enum ComponentOrdering {
MatchLength(Ordering),
Lexicographic(Ordering),
}
impl ComponentOrdering {
fn merge(&mut self, other: Self) {
*self = match (*self, other) {
(Self::Lexicographic(Ordering::Equal), Self::Lexicographic(other)) => {
Self::Lexicographic(other)
}
(Self::Lexicographic(_), Self::MatchLength(other)) => Self::MatchLength(other),
(Self::MatchLength(Ordering::Equal), Self::MatchLength(other)) => {
Self::MatchLength(other)
}
(clone, _) => clone,
}
}
}
impl From<ComponentOrdering> for Ordering {
fn from(value: ComponentOrdering) -> Self {
match value {
ComponentOrdering::MatchLength(ordering) => ordering,
ComponentOrdering::Lexicographic(ordering) => ordering,
}
}
}
impl Default for ComponentOrdering {
fn default() -> Self {
Self::Lexicographic(Ordering::Equal)
}
}
use git::fmt::refspec::Component;
fn cmp_component(lhs: Component<'_>, rhs: Component<'_>) -> ComponentOrdering {
let (l, r) = (lhs.as_str(), rhs.as_str());
match (l.find(ASTERISK), r.find(ASTERISK)) {
(Some(_), None) => ComponentOrdering::MatchLength(Ordering::Greater),
(None, Some(_)) => ComponentOrdering::MatchLength(Ordering::Less),
(Some(li), Some(ri)) => {
if li != ri {
ComponentOrdering::MatchLength(li.cmp(&ri).reverse())
} else if l.len() != r.len() {
ComponentOrdering::MatchLength(l.len().cmp(&r.len()).reverse())
} else {
ComponentOrdering::Lexicographic(l.cmp(r))
}
}
(None, None) => ComponentOrdering::Lexicographic(l.cmp(r)),
}
}
let mut result = ComponentOrdering::default();
let mut lhs = self.0.components();
let mut rhs = other.0.components();
loop {
match (lhs.next(), rhs.next()) {
(None, Some(_)) => return Ordering::Greater, (Some(_), None) => return Ordering::Less, (Some(lhs), Some(rhs)) => {
result.merge(cmp_component(lhs, rhs));
}
(None, None) => return result.into(),
}
}
}
}
pub type RawRule = Rule<Allowed, usize>;
impl RawRule {
pub fn validate<R>(self, resolve: &mut R) -> Result<ValidRule, ValidationError>
where
R: Fn() -> doc::Delegates,
{
let Self {
allow: delegates,
threshold,
..
} = self;
let allow = match &delegates {
Allowed::Delegates => ResolvedDelegates::Delegates(resolve()),
Allowed::Set(delegates) => {
let valid =
doc::Delegates::new(delegates.clone()).map_err(ValidationError::from)?;
ResolvedDelegates::Set(valid)
}
};
let threshold = doc::Threshold::new(threshold, &allow)?;
Ok(Rule {
allow,
threshold,
extensions: self.extensions,
})
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct RawRules {
#[serde(flatten)]
pub rules: BTreeMap<Pattern, RawRule>,
}
impl RawRules {
pub fn iter(&self) -> impl Iterator<Item = (&Pattern, &RawRule)> {
self.rules.iter()
}
pub fn insert(&mut self, pattern: Pattern, rule: RawRule) -> Option<RawRule> {
self.rules.insert(pattern, rule)
}
pub fn remove(&mut self, pattern: &Pattern) -> Option<RawRule> {
self.rules.remove(pattern)
}
pub fn exact_match(&self, refname: &Qualified) -> bool {
let refname = refname.as_str();
self.rules
.iter()
.any(|(pattern, _)| pattern.0.as_str() == refname)
}
pub fn matches<'a, 'b>(
&self,
refname: &Qualified<'b>,
) -> impl Iterator<Item = (&Pattern, &RawRule)> + use<'a, '_, 'b> {
let refname = refname.clone();
self.rules
.iter()
.filter(move |(pattern, _)| pattern.matches(&refname))
}
}
impl Extend<(Pattern, RawRule)> for RawRules {
fn extend<T: IntoIterator<Item = (Pattern, RawRule)>>(&mut self, iter: T) {
self.rules.extend(iter)
}
}
impl From<BTreeMap<Pattern, RawRule>> for RawRules {
fn from(rules: BTreeMap<Pattern, RawRule>) -> Self {
RawRules { rules }
}
}
impl FromIterator<(Pattern, RawRule)> for RawRules {
fn from_iter<T: IntoIterator<Item = (Pattern, RawRule)>>(iter: T) -> Self {
iter.into_iter().collect::<BTreeMap<_, _>>().into()
}
}
impl IntoIterator for RawRules {
type Item = (Pattern, RawRule);
type IntoIter = std::collections::btree_map::IntoIter<Pattern, RawRule>;
fn into_iter(self) -> Self::IntoIter {
self.rules.into_iter()
}
}
pub type ValidRule = Rule<ResolvedDelegates, doc::Threshold>;
impl ValidRule {
pub fn default_branch(
did: Did,
name: &git::fmt::RefStr,
) -> Result<(Pattern, Self), PatternError> {
let pattern = Pattern::try_from(git::refs::branch(name).to_owned())?;
let rule = Self {
allow: ResolvedDelegates::Delegates(doc::Delegates::from(did)),
threshold: doc::Threshold::MIN,
extensions: json::Map::new(),
};
Ok((pattern, rule))
}
}
impl From<ValidRule> for RawRule {
fn from(rule: ValidRule) -> Self {
let Rule {
allow,
threshold,
extensions,
} = rule;
Self {
allow: allow.into(),
threshold: threshold.into(),
extensions,
}
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub enum Allowed {
#[serde(rename = "delegates")]
#[default]
Delegates,
#[serde(untagged)]
Set(NonEmpty<Did>),
}
impl From<NonEmpty<Did>> for Allowed {
fn from(dids: NonEmpty<Did>) -> Self {
Self::Set(dids)
}
}
impl From<Did> for Allowed {
fn from(did: Did) -> Self {
Self::Set(NonEmpty::new(did))
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
#[serde(into = "Allowed")]
pub enum ResolvedDelegates {
Delegates(doc::Delegates),
Set(doc::Delegates),
}
impl From<ResolvedDelegates> for Allowed {
fn from(ds: ResolvedDelegates) -> Self {
match ds {
ResolvedDelegates::Delegates(_) => Self::Delegates,
ResolvedDelegates::Set(ds) => Self::Set(ds.into()),
}
}
}
impl std::ops::Deref for ResolvedDelegates {
type Target = doc::Delegates;
fn deref(&self) -> &Self::Target {
match self {
ResolvedDelegates::Delegates(ds) => ds,
ResolvedDelegates::Set(ds) => ds,
}
}
}
#[derive(Debug)]
pub struct MatchedRule<'a> {
refname: Qualified<'a>,
rule: ValidRule,
}
impl MatchedRule<'_> {
pub fn refname(&self) -> &Qualified<'_> {
&self.refname
}
pub fn rule(&self) -> &ValidRule {
&self.rule
}
pub fn allowed(&self) -> &doc::Delegates {
self.rule().allowed()
}
pub fn threshold(&self) -> &doc::Threshold {
self.rule().threshold()
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize)]
pub struct Rules {
#[serde(flatten)]
rules: BTreeMap<Pattern, ValidRule>,
}
impl FromIterator<(Pattern, ValidRule)> for Rules {
fn from_iter<T: IntoIterator<Item = (Pattern, ValidRule)>>(iter: T) -> Self {
Self {
rules: iter.into_iter().collect(),
}
}
}
impl<'a> IntoIterator for &'a Rules {
type Item = (&'a Pattern, &'a ValidRule);
type IntoIter = std::collections::btree_map::Iter<'a, Pattern, ValidRule>;
fn into_iter(self) -> Self::IntoIter {
self.rules.iter()
}
}
impl IntoIterator for Rules {
type Item = (Pattern, ValidRule);
type IntoIter = std::collections::btree_map::IntoIter<Pattern, ValidRule>;
fn into_iter(self) -> Self::IntoIter {
self.rules.into_iter()
}
}
impl Extend<(Pattern, ValidRule)> for Rules {
fn extend<T: IntoIterator<Item = (Pattern, ValidRule)>>(&mut self, iter: T) {
self.rules.extend(iter)
}
}
impl From<Rules> for RawRules {
fn from(Rules { rules }: Rules) -> Self {
Self {
rules: rules
.into_iter()
.map(|(pattern, rule)| (pattern, rule.into()))
.collect(),
}
}
}
impl Rules {
pub fn iter(&self) -> impl Iterator<Item = (&Pattern, &ValidRule)> {
self.rules.iter()
}
pub fn is_empty(&self) -> bool {
self.rules.is_empty()
}
pub fn from_raw<R>(
rules: impl IntoIterator<Item = (Pattern, RawRule)>,
resolve: &mut R,
) -> Result<Self, ValidationError>
where
R: Fn() -> doc::Delegates,
{
let valid = rules
.into_iter()
.map(|(pattern, rule)| rule.validate(resolve).map(|rule| (pattern, rule)))
.collect::<Result<_, _>>()?;
Ok(Self { rules: valid })
}
pub fn matches<'a>(
&self,
refname: &Qualified<'a>,
) -> impl Iterator<Item = (&Pattern, &ValidRule)> + use<'a, '_> {
let refname_cloned = refname.clone();
self.rules
.iter()
.filter(move |(pattern, _)| pattern.matches(&refname_cloned))
}
pub fn canonical<'a, 'b, 'r, R>(
&'a self,
refname: Qualified<'b>,
repo: &'r R,
) -> Option<Canonical<'b, 'a, 'r, R, canonical::Initial>>
where
R: canonical::effects::Ancestry
+ canonical::effects::FindMergeBase
+ canonical::effects::FindObjects,
{
self.matches(&refname)
.next()
.map(|(_, rule)| Canonical::new(refname, rule, repo))
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(bound(deserialize = "D: Sealed + Deserialize<'de>, T: Sealed + Deserialize<'de>"))]
pub struct Rule<D, T> {
allow: D,
threshold: T,
#[serde(skip_serializing_if = "json::Map::is_empty")]
#[serde(flatten)]
extensions: json::Map<String, json::Value>,
}
impl<D, T> Rule<D, T> {
pub fn new(allow: D, threshold: T) -> Self {
Self {
allow,
threshold,
extensions: json::Map::new(),
}
}
pub fn allowed(&self) -> &D {
&self.allow
}
pub fn threshold(&self) -> &T {
&self.threshold
}
pub fn extensions(&self) -> &json::Map<String, json::Value> {
&self.extensions
}
pub fn add_extensions(&mut self, extensions: impl Into<json::Map<String, json::Value>>) {
self.extensions.extend(extensions.into());
}
}
#[derive(Debug, Error)]
pub enum PatternError {
#[error("cannot create rule for '{pattern}' since references under '{prefix}' are protected")]
ProtectedRef {
prefix: RefString,
pattern: QualifiedPattern<'static>,
},
}
#[derive(Debug, Error)]
pub enum ValidationError {
#[error(transparent)]
Threshold(#[from] doc::ThresholdError),
#[error(transparent)]
Delegates(#[from] doc::DelegatesError),
#[error("cannot create rule for reserved `rad` references '{pattern}'")]
RadRef { pattern: QualifiedPattern<'static> },
}
#[derive(Debug, Error)]
pub enum CanonicalError {
#[error(transparent)]
Git(#[from] crate::git::raw::Error),
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use std::collections::BTreeMap;
use nonempty::nonempty;
use crate::crypto::{test::signer::MockSigner, Signer};
use crate::git;
use crate::git::fmt::qualified_pattern;
use crate::git::fmt::RefString;
use crate::identity::doc::Doc;
use crate::identity::Visibility;
use crate::node::device::Device;
use crate::rad;
use crate::storage::refs::{IDENTITY_BRANCH, IDENTITY_ROOT, SIGREFS_BRANCH, SIGREFS_PARENT};
use crate::storage::{git::transport, ReadStorage};
use crate::test::{arbitrary, fixtures};
use crate::Storage;
use super::*;
fn roundtrip(rule: &Rule<Allowed, usize>) {
let json = serde_json::to_string(rule).unwrap();
assert_eq!(
*rule,
serde_json::from_str(&json).unwrap(),
"failed to roundtrip: {json}"
)
}
fn did(s: &str) -> Did {
s.parse().unwrap()
}
fn pattern(qp: QualifiedPattern<'static>) -> Pattern {
Pattern::try_from(qp).unwrap()
}
fn resolve_from_doc(doc: &Doc) -> doc::Delegates {
doc.delegates().clone()
}
fn tag(name: RefString, head: git::raw::Oid, repo: &git::raw::Repository) -> git::Oid {
let commit = fixtures::commit(name.as_str(), &[head], repo);
let target = repo.find_object(commit.into(), None).unwrap();
let tagger = repo.signature().unwrap();
repo.tag(name.as_str(), &target, &tagger, name.as_str(), false)
.unwrap()
.into()
}
#[test]
fn test_roundtrip() {
let rule1 = Rule::new(Allowed::Delegates, 1);
let rule2 = Rule::new(Allowed::Delegates, 1);
let rule3 = Rule::new(Allowed::Delegates, 1);
let mut rule4 = Rule::new(
Allowed::Set(nonempty![
did("did:key:z6MkpQTLwr8QyADGmBGAMsGttvWzP4PojUMs4hREZW5T5E3K"),
did("did:key:z6MknG1nYDftMYUQ7eTBSGgqB2PL1xK5Pif33J3sRym3e8ye"),
]),
2,
);
rule4.add_extensions(
serde_json::json!({
"foo": "bar",
"quux": 5,
})
.as_object()
.cloned()
.unwrap(),
);
roundtrip(&rule1);
roundtrip(&rule2);
roundtrip(&rule3);
roundtrip(&rule4);
}
#[test]
fn test_deserialization() {
let examples = r#"
{
"refs/heads/main": {
"threshold": 2,
"allow": [
"did:key:z6MkpQTLwr8QyADGmBGAMsGttvWzP4PojUMs4hREZW5T5E3K",
"did:key:z6MknG1nYDftMYUQ7eTBSGgqB2PL1xK5Pif33J3sRym3e8ye"
]
},
"refs/tags/releases/*": {
"threshold": 2,
"allow": [
"did:key:z6MknLWe8A7UJxvTfY36JcB8XrP1KTLb5HFTX38hEmdY3b56",
"did:key:z6Mkq2E5Se5H9gk1DsL1EMwR2t4CqSg3GFkNN2UeG4FNqXoP",
"did:key:z6MkqRmXW5fbP9hJ1Y8j2N4CgVdJ2XJ6TsyXYf3FQ2NJgXax"
]
},
"refs/heads/development": {
"threshold": 1,
"allow": [
"did:key:z6MkhH7ENYE62JAjTiRZPU71MGZ6xCwnbyHHWfrBu3fr6PVG"
]
},
"refs/heads/release/*": {
"threshold": 1,
"allow": "delegates"
}
}
"#;
let expected = [
(
pattern(qualified_pattern!("refs/heads/main")),
Rule::new(
Allowed::Set(nonempty![
did("did:key:z6MkpQTLwr8QyADGmBGAMsGttvWzP4PojUMs4hREZW5T5E3K"),
did("did:key:z6MknG1nYDftMYUQ7eTBSGgqB2PL1xK5Pif33J3sRym3e8ye"),
]),
2,
),
),
(
pattern(qualified_pattern!("refs/tags/releases/*")),
Rule::new(
Allowed::Set(nonempty![
did("did:key:z6MknLWe8A7UJxvTfY36JcB8XrP1KTLb5HFTX38hEmdY3b56"),
did("did:key:z6Mkq2E5Se5H9gk1DsL1EMwR2t4CqSg3GFkNN2UeG4FNqXoP"),
did("did:key:z6MkqRmXW5fbP9hJ1Y8j2N4CgVdJ2XJ6TsyXYf3FQ2NJgXax")
]),
2,
),
),
(
pattern(qualified_pattern!("refs/heads/development")),
Rule::new(
Allowed::Set(nonempty![did(
"did:key:z6MkhH7ENYE62JAjTiRZPU71MGZ6xCwnbyHHWfrBu3fr6PVG"
)]),
1,
),
),
(
pattern(qualified_pattern!("refs/heads/release/*")),
Rule::new(Allowed::Delegates, 1),
),
]
.into_iter()
.collect::<RawRules>();
let rules = serde_json::from_str::<BTreeMap<Pattern, RawRule>>(examples)
.unwrap()
.into();
assert_eq!(expected, rules)
}
#[test]
fn test_order() {
assert!(
pattern(qualified_pattern!("refs/heads/a/b/c/d/*"))
< pattern(qualified_pattern!("refs/heads/*/x")),
"example 1"
);
assert!(
pattern(qualified_pattern!("refs/heads/a"))
< pattern(qualified_pattern!("refs/heads/*")),
"example 2.a"
);
assert!(
pattern(qualified_pattern!("refs/heads/abc"))
< pattern(qualified_pattern!("refs/heads/a*")),
"example 2.a"
);
assert!(
pattern(qualified_pattern!("refs/heads/a/b/*"))
< pattern(qualified_pattern!("refs/heads/a/*/c")),
"example 2.a"
);
assert!(
pattern(qualified_pattern!("refs/heads/aa*"))
< pattern(qualified_pattern!("refs/heads/a*")),
"example 2.b.A"
);
assert!(
pattern(qualified_pattern!("refs/heads/a*b"))
< pattern(qualified_pattern!("refs/heads/a*")),
"example 2.b.B"
);
let pattern01 = pattern(qualified_pattern!("refs/tags/*"));
let pattern02 = pattern(qualified_pattern!("refs/tags/v1"));
let pattern04 = pattern(qualified_pattern!("refs/tags/v1.0.0"));
let pattern05 = pattern(qualified_pattern!("refs/tags/release/v1.0.0"));
let pattern03 = pattern(qualified_pattern!("refs/heads/main"));
let pattern06 = pattern(qualified_pattern!("refs/tags/*/v1.0.0"));
let pattern07 = pattern(qualified_pattern!("refs/tags/x*"));
let pattern08 = pattern(qualified_pattern!("refs/tags/xx*"));
let pattern09 = pattern(qualified_pattern!("refs/foos/*"));
let pattern10 = pattern(qualified_pattern!("refs/heads/a"));
let pattern11 = pattern(qualified_pattern!("refs/heads/b"));
let pattern12 = pattern(qualified_pattern!("refs/heads/a/*"));
let pattern13 = pattern(qualified_pattern!("refs/heads/b/*"));
let pattern14 = pattern(qualified_pattern!("refs/heads/a/*/ab"));
let pattern15 = pattern(qualified_pattern!("refs/heads/a/*/a"));
let pattern16 = pattern(qualified_pattern!("refs/heads/a/*/b"));
let pattern17 = pattern(qualified_pattern!("refs/heads/a/*/a"));
assert!(
pattern06 < pattern02,
"match for 06 is always more specific since it has more components"
);
assert!(pattern02 < pattern01, "match for 02 is also match for 01");
assert!(pattern08 < pattern07, "match for 08 is also match for 07");
assert!(pattern02 == pattern02);
assert!(pattern02 < pattern04);
assert!(pattern03 < pattern01);
assert!(pattern09 < pattern01);
assert!(pattern10 < pattern11);
assert!(pattern12 < pattern13);
assert!(pattern15 < pattern14);
assert!(
pattern17 < pattern16,
"matches have same length, but lexicographically, 'a' < 'b'"
);
let pattern18 = pattern(qualified_pattern!("refs/tags/release/candidates/*"));
let pattern19 = pattern(qualified_pattern!("refs/tags/release/*"));
let pattern20 = pattern(qualified_pattern!("refs/tags/*"));
assert!(pattern18 < pattern19);
assert!(pattern19 < pattern20);
let pattern21 = pattern(qualified_pattern!("refs/heads/dev"));
assert!(pattern21 < pattern03);
let mut patterns = [
pattern01.clone(),
pattern02.clone(),
pattern03.clone(),
pattern04.clone(),
pattern05.clone(),
pattern06.clone(),
];
patterns.sort();
assert_eq!(
patterns,
[pattern05, pattern06, pattern03, pattern02, pattern04, pattern01]
);
}
#[test]
fn test_deserialize_extensions() {
let example = r#"
{
"threshold": 2,
"allow": [
"did:key:z6MkpQTLwr8QyADGmBGAMsGttvWzP4PojUMs4hREZW5T5E3K",
"did:key:z6MknG1nYDftMYUQ7eTBSGgqB2PL1xK5Pif33J3sRym3e8ye"
],
"foo": "bar",
"quux": 5
}
"#;
let rule = serde_json::from_str::<Rule<Allowed, usize>>(example).unwrap();
assert!(!rule.extensions().is_empty());
let extensions = rule.extensions();
assert_eq!(
extensions.get("foo"),
Some(serde_json::Value::String("bar".to_string())).as_ref()
);
assert_eq!(
extensions.get("quux"),
Some(serde_json::Value::Number(5.into())).as_ref()
);
}
#[test]
fn test_rule_validate_success() {
let doc = arbitrary::gen::<Doc>(1);
let delegates = Allowed::Set(doc.delegates().as_ref().clone());
let threshold = doc.majority();
let rule = Rule::new(delegates, threshold);
let result = rule.validate(&mut || resolve_from_doc(&doc));
assert!(result.is_ok(), "failed to validate doc: {result:?}");
let rule = Rule::new(Allowed::Delegates, 1);
let result = rule.validate(&mut || resolve_from_doc(&doc));
assert!(result.is_ok(), "failed to validate doc: {result:?}");
}
#[test]
fn test_rule_validate_failures() {
let doc = arbitrary::gen::<Doc>(1);
let pattern = pattern(qualified_pattern!("refs/heads/main"));
assert!(matches!(
Rule::new(Allowed::Delegates, 256).validate(&mut || resolve_from_doc(&doc)),
Err(ValidationError::Threshold(_))
));
let threshold = doc.delegates().len().saturating_add(1);
assert!(matches!(
Rule::new(Allowed::Delegates, threshold).validate(&mut || resolve_from_doc(&doc)),
Err(ValidationError::Threshold(_))
));
let delegates = NonEmpty::from_vec(arbitrary::vec::<Did>(256)).unwrap();
assert!(matches!(
Rule::new(delegates.into(), 1).validate(&mut || resolve_from_doc(&doc)),
Err(ValidationError::Delegates(_))
));
let delegates = nonempty![
did("did:key:z6MknLWe8A7UJxvTfY36JcB8XrP1KTLb5HFTX38hEmdY3b56"),
did("did:key:z6MknLWe8A7UJxvTfY36JcB8XrP1KTLb5HFTX38hEmdY3b56")
];
let expected = Rule {
allow: ResolvedDelegates::Set(
doc::Delegates::new(nonempty![did(
"did:key:z6MknLWe8A7UJxvTfY36JcB8XrP1KTLb5HFTX38hEmdY3b56"
)])
.unwrap(),
),
threshold: doc::Threshold::MIN,
extensions: json::Map::new(),
};
assert_eq!(
Rule::new(delegates.into(), 1)
.validate(&mut || resolve_from_doc(&doc))
.unwrap(),
expected,
);
let rules = vec![
(pattern.clone(), Rule::new(Allowed::Delegates, 1)),
(
pattern.clone(),
Rule::new(doc.delegates().as_ref().clone().into(), 1),
),
];
let expected = [(
pattern,
Rule::new(
ResolvedDelegates::Set(doc.delegates().clone()),
doc::Threshold::MIN,
),
)]
.into_iter()
.collect::<Rules>();
assert_eq!(
Rules::from_raw(rules, &mut || resolve_from_doc(&doc)).unwrap(),
expected
);
}
#[test]
fn test_canonical() {
let tempdir = tempfile::tempdir().unwrap();
let storage = Storage::open(tempdir.path().join("storage"), fixtures::user()).unwrap();
transport::local::register(storage.clone());
let delegate = Device::mock_from_seed([0xff; 32]);
let contributor = MockSigner::from_seed([0xfe; 32]);
let (repo, head) = fixtures::repository(tempdir.path().join("working"));
let (rid, doc, _) = rad::init(
&repo,
"heartwood".try_into().unwrap(),
"Radicle Heartwood Protocol & Stack",
git::fmt::refname!("master"),
Visibility::default(),
&delegate,
&storage,
)
.unwrap();
let mut doc = doc.edit();
doc.delegate(contributor.public_key().into());
let failing_tag = git::fmt::refname!("release/candidates/v1.0");
let tags = [
git::fmt::refname!("v1.0"),
git::fmt::refname!("release/v1.0"),
failing_tag.clone(),
git::fmt::refname!("qa/v1.0"),
]
.into_iter()
.map(|name| {
(
git::fmt::lit::refs_tags(name.clone()).into(),
tag(name, head, &repo),
)
})
.collect::<BTreeMap<Qualified, _>>();
git::push(
&repo,
&rad::REMOTE_NAME,
[
(
&git::fmt::qualified!("refs/tags/v1.0"),
&git::fmt::qualified!("refs/tags/v1.0"),
),
(
&git::fmt::qualified!("refs/tags/release/v1.0"),
&git::fmt::qualified!("refs/tags/release/v1.0"),
),
(
&git::fmt::qualified!("refs/tags/release/candidates/v1.0"),
&git::fmt::qualified!("refs/tags/release/candidates/v1.0"),
),
(
&git::fmt::qualified!("refs/tags/qa/v1.0"),
&git::fmt::qualified!("refs/tags/qa/v1.0"),
),
],
)
.unwrap();
let rules = Rules::from_raw(
[
(
pattern(qualified_pattern!("refs/tags/*")),
Rule::new(Allowed::Delegates, 1),
),
(
pattern(qualified_pattern!("refs/tags/release/*")),
Rule::new(Allowed::Delegates, 1),
),
(
pattern(qualified_pattern!("refs/tags/release/candidates/*")),
Rule::new(Allowed::Delegates, 2),
),
],
&mut || resolve_from_doc(&doc.clone().verified().unwrap()),
)
.unwrap();
let stored = storage.repository(rid).unwrap();
let failing = git::fmt::Qualified::from(git::fmt::lit::refs_tags(failing_tag));
for (refname, oid) in tags.into_iter() {
let canonical = rules
.canonical(refname.clone(), &stored)
.unwrap_or_else(|| {
panic!("there should be a matching rule for {refname}, rules: {rules:#?}")
});
if refname == failing {
assert!(canonical.find_objects().unwrap().quorum().is_err());
} else {
assert_eq!(
canonical
.find_objects()
.unwrap()
.quorum()
.unwrap_or_else(|e| panic!("quorum error for {refname}: {e}")),
canonical::Quorum {
refname,
object: canonical::Object::Tag { id: oid },
}
)
}
}
}
#[test]
fn test_special_branches() {
assert!(Pattern::try_from((*IDENTITY_BRANCH).clone()).is_err());
assert!(Pattern::try_from((*SIGREFS_BRANCH).clone()).is_err());
assert!(Pattern::try_from((*SIGREFS_PARENT).clone()).is_err());
assert!(Pattern::try_from((*IDENTITY_ROOT).clone()).is_err());
}
}