mod rs;
pub mod md;
#[cfg(feature = "plain-consensus")]
pub mod plain;
#[cfg(feature = "ns-vote")]
pub mod vote;
#[cfg(feature = "build_docs")]
mod build;
#[cfg(feature = "parse2")]
use {
crate::parse2::{self, ArgumentStream}, };
#[cfg(feature = "parse2")]
pub use {
parse2_impls::ProtoStatusesNetdocParseAccumulator, };
use crate::doc::authcert::{AuthCert, AuthCertKeyIds};
use crate::parse::keyword::Keyword;
use crate::parse::parser::{Section, SectionRules, SectionRulesBuilder};
use crate::parse::tokenize::{Item, ItemResult, NetDocReader};
use crate::types::misc::*;
use crate::util::PeekableIterator;
use crate::{Error, KeywordEncodable, NetdocErrorKind as EK, NormalItemArgument, Pos, Result};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt::{self, Display};
use std::result::Result as StdResult;
use std::str::FromStr;
use std::sync::Arc;
use std::{net, result, time};
use tor_error::{HasKind, internal};
use tor_protover::Protocols;
use derive_deftly::{Deftly, define_derive_deftly};
use digest::Digest;
use std::sync::LazyLock;
use tor_checkable::{ExternallySigned, timed::TimerangeBound};
use tor_llcrypto as ll;
use tor_llcrypto::pk::rsa::RsaIdentity;
use serde::{Deserialize, Deserializer};
#[cfg(feature = "build_docs")]
pub use build::MdConsensusBuilder;
#[cfg(all(feature = "build_docs", feature = "plain-consensus"))]
pub use build::PlainConsensusBuilder;
#[cfg(feature = "build_docs")]
ns_export_each_flavor! {
ty: RouterStatusBuilder;
}
ns_export_each_variety! {
ty: RouterStatus, Preamble;
}
#[deprecated]
#[cfg(feature = "ns_consensus")]
pub use PlainConsensus as NsConsensus;
#[deprecated]
#[cfg(feature = "ns_consensus")]
pub use PlainRouterStatus as NsRouterStatus;
#[deprecated]
#[cfg(feature = "ns_consensus")]
pub use UncheckedPlainConsensus as UncheckedNsConsensus;
#[deprecated]
#[cfg(feature = "ns_consensus")]
pub use UnvalidatedPlainConsensus as UnvalidatedNsConsensus;
#[cfg(feature = "ns-vote")]
pub use rs::{RouterStatusMdDigestsVote, SoftwareVersion};
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Default)]
#[allow(clippy::exhaustive_structs)]
pub struct IgnoredPublicationTimeSp;
#[derive(Clone, Debug, Deftly)]
#[derive_deftly(Lifetime)]
#[cfg_attr(feature = "parse2", derive_deftly(NetdocParseableFields))]
pub struct Lifetime {
#[cfg_attr(feature = "parse2", deftly(netdoc(single_arg)))]
valid_after: Iso8601TimeSp,
#[cfg_attr(feature = "parse2", deftly(netdoc(single_arg)))]
fresh_until: Iso8601TimeSp,
#[cfg_attr(feature = "parse2", deftly(netdoc(single_arg)))]
valid_until: Iso8601TimeSp,
}
define_derive_deftly! {
Lifetime:
impl Lifetime {
pub fn new(
$( $fname: time::SystemTime, )
) -> Result<Self> {
let self_ = Lifetime {
$( $fname: $fname.into(), )
};
if self_.valid_after < self_.fresh_until && self_.fresh_until < self_.valid_until {
Ok(self_)
} else {
Err(EK::InvalidLifetime.err())
}
}
$(
${fattrs doc}
pub fn $fname(&self) -> time::SystemTime {
*self.$fname
}
)
pub fn valid_at(&self, when: time::SystemTime) -> bool {
*self.valid_after <= when && when <= *self.valid_until
}
pub fn voting_period(&self) -> time::Duration {
let valid_after = self.valid_after();
let fresh_until = self.fresh_until();
fresh_until
.duration_since(valid_after)
.expect("Mis-formed lifetime")
}
}
}
use derive_deftly_template_Lifetime;
#[derive(Debug, Clone, Default, Eq, PartialEq, Ord, PartialOrd, Hash, Copy)] #[derive(derive_more::From, derive_more::Into, derive_more::Display, derive_more::FromStr)]
pub struct ConsensusMethod(u32);
impl NormalItemArgument for ConsensusMethod {}
#[derive(Debug, Clone, Default, Eq, PartialEq)]
#[cfg_attr(feature = "parse2", derive(Deftly), derive_deftly(ItemValueParseable))]
#[non_exhaustive]
pub struct ConsensusMethods {
pub methods: BTreeSet<ConsensusMethod>,
}
#[cfg(feature = "parse2")]
pub mod consensus_methods_comma_separated {
use super::*;
use parse2::ArgumentError as AE;
use std::result::Result;
pub fn from_args<'s>(args: &mut ArgumentStream<'s>) -> Result<ConsensusMethods, AE> {
let mut methods = BTreeSet::new();
for ent in args.next().ok_or(AE::Missing)?.split(',') {
let ent = ent.parse().map_err(|_| AE::Invalid)?;
if !methods.insert(ent) {
return Err(AE::Invalid);
}
}
Ok(ConsensusMethods { methods })
}
}
#[derive(Debug, Clone, Default, Eq, PartialEq)]
pub struct NetParams<T> {
params: HashMap<String, T>,
}
impl<T> NetParams<T> {
#[allow(unused)]
pub fn new() -> Self {
NetParams {
params: HashMap::new(),
}
}
pub fn get<A: AsRef<str>>(&self, v: A) -> Option<&T> {
self.params.get(v.as_ref())
}
pub fn iter(&self) -> impl Iterator<Item = (&String, &T)> {
self.params.iter()
}
pub fn set(&mut self, k: String, v: T) {
self.params.insert(k, v);
}
}
impl<K: Into<String>, T> FromIterator<(K, T)> for NetParams<T> {
fn from_iter<I: IntoIterator<Item = (K, T)>>(i: I) -> Self {
NetParams {
params: i.into_iter().map(|(k, v)| (k.into(), v)).collect(),
}
}
}
impl<T> std::iter::Extend<(String, T)> for NetParams<T> {
fn extend<I: IntoIterator<Item = (String, T)>>(&mut self, iter: I) {
self.params.extend(iter);
}
}
impl<'de, T> Deserialize<'de> for NetParams<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let params = HashMap::deserialize(deserializer)?;
Ok(NetParams { params })
}
}
#[derive(Debug, Clone, Default, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct ProtoStatus {
recommended: Protocols,
required: Protocols,
}
impl ProtoStatus {
pub fn check_protocols(
&self,
supported_protocols: &Protocols,
) -> StdResult<(), ProtocolSupportError> {
let missing_required = self.required.difference(supported_protocols);
if !missing_required.is_empty() {
return Err(ProtocolSupportError::MissingRequired(missing_required));
}
let missing_recommended = self.recommended.difference(supported_protocols);
if !missing_recommended.is_empty() {
return Err(ProtocolSupportError::MissingRecommended(
missing_recommended,
));
}
Ok(())
}
}
#[derive(Clone, Debug, thiserror::Error)]
#[cfg_attr(test, derive(PartialEq))]
#[non_exhaustive]
pub enum ProtocolSupportError {
#[error("Required protocols are not implemented: {0}")]
MissingRequired(Protocols),
#[error("Recommended protocols are not implemented: {0}")]
MissingRecommended(Protocols),
}
impl ProtocolSupportError {
pub fn should_shutdown(&self) -> bool {
matches!(self, Self::MissingRequired(_))
}
}
impl HasKind for ProtocolSupportError {
fn kind(&self) -> tor_error::ErrorKind {
tor_error::ErrorKind::SoftwareDeprecated
}
}
#[derive(Clone, Debug, Default, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct ProtoStatuses {
client: ProtoStatus,
relay: ProtoStatus,
}
impl ProtoStatuses {
pub fn client(&self) -> &ProtoStatus {
&self.client
}
pub fn relay(&self) -> &ProtoStatus {
&self.relay
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
#[allow(clippy::exhaustive_enums)]
pub enum ConsensusFlavor {
Microdesc,
Plain,
}
impl ConsensusFlavor {
pub fn name(&self) -> &'static str {
match self {
ConsensusFlavor::Plain => "ns", ConsensusFlavor::Microdesc => "microdesc",
}
}
pub fn from_opt_name(name: Option<&str>) -> Result<Self> {
match name {
Some("microdesc") => Ok(ConsensusFlavor::Microdesc),
Some("ns") | None => Ok(ConsensusFlavor::Plain),
Some(other) => {
Err(EK::BadDocumentType.with_msg(format!("unrecognized flavor {:?}", other)))
}
}
}
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct Signature {
pub digestname: String,
pub key_ids: AuthCertKeyIds,
pub signature: Vec<u8>,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct SignatureGroup {
pub sha256: Option<[u8; 32]>,
pub sha1: Option<[u8; 20]>,
pub signatures: Vec<Signature>,
}
#[derive(
Debug, Clone, Copy, Eq, PartialEq, derive_more::From, derive_more::Into, derive_more::AsRef,
)]
pub struct SharedRandVal([u8; 32]);
#[derive(Debug, Clone, Deftly)]
#[non_exhaustive]
#[cfg_attr(feature = "parse2", derive_deftly(ItemValueParseable))]
#[cfg_attr(feature = "encode", derive_deftly(ItemValueEncodable))]
pub struct SharedRandStatus {
pub n_reveals: u8,
pub value: SharedRandVal,
pub timestamp: Option<Iso8601TimeNoSp>,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct DirSource {
pub nickname: String,
pub identity: RsaIdentity,
pub ip: net::IpAddr,
pub dir_port: u16,
pub or_port: u16,
}
#[non_exhaustive]
#[derive(Debug, Clone, Copy)]
pub enum RelayWeight {
Unmeasured(u32),
Measured(u32),
}
impl RelayWeight {
pub fn is_measured(&self) -> bool {
matches!(self, RelayWeight::Measured(_))
}
pub fn is_nonzero(&self) -> bool {
!matches!(self, RelayWeight::Unmeasured(0) | RelayWeight::Measured(0))
}
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct ConsensusVoterInfo {
pub dir_source: DirSource,
pub contact: String,
pub vote_digest: Vec<u8>,
}
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct Footer {
pub weights: NetParams<i32>,
}
pub type MdConsensus = md::Consensus;
pub type UnvalidatedMdConsensus = md::UnvalidatedConsensus;
pub type UncheckedMdConsensus = md::UncheckedConsensus;
#[cfg(feature = "plain-consensus")]
pub type PlainConsensus = plain::Consensus;
#[cfg(feature = "plain-consensus")]
pub type UnvalidatedPlainConsensus = plain::UnvalidatedConsensus;
#[cfg(feature = "plain-consensus")]
pub type UncheckedPlainConsensus = plain::UncheckedConsensus;
decl_keyword! {
#[non_exhaustive]
#[allow(missing_docs)]
pub NetstatusKwd {
"network-status-version" => NETWORK_STATUS_VERSION,
"vote-status" => VOTE_STATUS,
"consensus-methods" => CONSENSUS_METHODS,
"consensus-method" => CONSENSUS_METHOD,
"published" => PUBLISHED,
"valid-after" => VALID_AFTER,
"fresh-until" => FRESH_UNTIL,
"valid-until" => VALID_UNTIL,
"voting-delay" => VOTING_DELAY,
"client-versions" => CLIENT_VERSIONS,
"server-versions" => SERVER_VERSIONS,
"known-flags" => KNOWN_FLAGS,
"flag-thresholds" => FLAG_THRESHOLDS,
"recommended-client-protocols" => RECOMMENDED_CLIENT_PROTOCOLS,
"required-client-protocols" => REQUIRED_CLIENT_PROTOCOLS,
"recommended-relay-protocols" => RECOMMENDED_RELAY_PROTOCOLS,
"required-relay-protocols" => REQUIRED_RELAY_PROTOCOLS,
"params" => PARAMS,
"bandwidth-file-headers" => BANDWIDTH_FILE_HEADERS,
"bandwidth-file-digest" => BANDWIDTH_FILE_DIGEST,
"shared-rand-previous-value" => SHARED_RAND_PREVIOUS_VALUE,
"shared-rand-current-value" => SHARED_RAND_CURRENT_VALUE,
"dir-source" => DIR_SOURCE,
"contact" => CONTACT,
"legacy-dir-key" => LEGACY_DIR_KEY,
"shared-rand-participate" => SHARED_RAND_PARTICIPATE,
"shared-rand-commit" => SHARED_RAND_COMMIT,
"vote-digest" => VOTE_DIGEST,
"dir-key-certificate-version" => DIR_KEY_CERTIFICATE_VERSION,
"r" => RS_R,
"a" => RS_A,
"s" => RS_S,
"v" => RS_V,
"pr" => RS_PR,
"w" => RS_W,
"p" => RS_P,
"m" => RS_M,
"id" => RS_ID,
"directory-footer" => DIRECTORY_FOOTER,
"bandwidth-weights" => BANDWIDTH_WEIGHTS,
"directory-signature" => DIRECTORY_SIGNATURE,
}
}
static NS_HEADER_RULES_COMMON_: LazyLock<SectionRulesBuilder<NetstatusKwd>> = LazyLock::new(|| {
use NetstatusKwd::*;
let mut rules = SectionRules::builder();
rules.add(NETWORK_STATUS_VERSION.rule().required().args(1..=2));
rules.add(VOTE_STATUS.rule().required().args(1..));
rules.add(VALID_AFTER.rule().required());
rules.add(FRESH_UNTIL.rule().required());
rules.add(VALID_UNTIL.rule().required());
rules.add(VOTING_DELAY.rule().args(2..));
rules.add(CLIENT_VERSIONS.rule());
rules.add(SERVER_VERSIONS.rule());
rules.add(KNOWN_FLAGS.rule().required());
rules.add(RECOMMENDED_CLIENT_PROTOCOLS.rule().args(1..));
rules.add(RECOMMENDED_RELAY_PROTOCOLS.rule().args(1..));
rules.add(REQUIRED_CLIENT_PROTOCOLS.rule().args(1..));
rules.add(REQUIRED_RELAY_PROTOCOLS.rule().args(1..));
rules.add(PARAMS.rule());
rules
});
static NS_HEADER_RULES_CONSENSUS: LazyLock<SectionRules<NetstatusKwd>> = LazyLock::new(|| {
use NetstatusKwd::*;
let mut rules = NS_HEADER_RULES_COMMON_.clone();
rules.add(CONSENSUS_METHOD.rule().args(1..=1));
rules.add(SHARED_RAND_PREVIOUS_VALUE.rule().args(2..));
rules.add(SHARED_RAND_CURRENT_VALUE.rule().args(2..));
rules.add(UNRECOGNIZED.rule().may_repeat().obj_optional());
rules.build()
});
static NS_VOTERINFO_RULES_CONSENSUS: LazyLock<SectionRules<NetstatusKwd>> = LazyLock::new(|| {
use NetstatusKwd::*;
let mut rules = SectionRules::builder();
rules.add(DIR_SOURCE.rule().required().args(6..));
rules.add(CONTACT.rule().required());
rules.add(VOTE_DIGEST.rule().required());
rules.add(UNRECOGNIZED.rule().may_repeat().obj_optional());
rules.build()
});
static NS_ROUTERSTATUS_RULES_COMMON_: LazyLock<SectionRulesBuilder<NetstatusKwd>> =
LazyLock::new(|| {
use NetstatusKwd::*;
let mut rules = SectionRules::builder();
rules.add(RS_A.rule().may_repeat().args(1..));
rules.add(RS_S.rule().required());
rules.add(RS_V.rule());
rules.add(RS_PR.rule().required());
rules.add(RS_W.rule());
rules.add(RS_P.rule().args(2..));
rules.add(UNRECOGNIZED.rule().may_repeat().obj_optional());
rules
});
static NS_ROUTERSTATUS_RULES_PLAIN: LazyLock<SectionRules<NetstatusKwd>> = LazyLock::new(|| {
use NetstatusKwd::*;
let mut rules = NS_ROUTERSTATUS_RULES_COMMON_.clone();
rules.add(RS_R.rule().required().args(8..));
rules.build()
});
static NS_ROUTERSTATUS_RULES_MDCON: LazyLock<SectionRules<NetstatusKwd>> = LazyLock::new(|| {
use NetstatusKwd::*;
let mut rules = NS_ROUTERSTATUS_RULES_COMMON_.clone();
rules.add(RS_R.rule().required().args(6..));
rules.add(RS_M.rule().required().args(1..));
rules.build()
});
static NS_FOOTER_RULES: LazyLock<SectionRules<NetstatusKwd>> = LazyLock::new(|| {
use NetstatusKwd::*;
let mut rules = SectionRules::builder();
rules.add(DIRECTORY_FOOTER.rule().required().no_args());
rules.add(BANDWIDTH_WEIGHTS.rule());
rules.add(UNRECOGNIZED.rule().may_repeat().obj_optional());
rules.build()
});
impl ProtoStatus {
fn from_section(
sec: &Section<'_, NetstatusKwd>,
recommend_token: NetstatusKwd,
required_token: NetstatusKwd,
) -> Result<ProtoStatus> {
fn parse(t: Option<&Item<'_, NetstatusKwd>>) -> Result<Protocols> {
if let Some(item) = t {
item.args_as_str()
.parse::<Protocols>()
.map_err(|e| EK::BadArgument.at_pos(item.pos()).with_source(e))
} else {
Ok(Protocols::new())
}
}
let recommended = parse(sec.get(recommend_token))?;
let required = parse(sec.get(required_token))?;
Ok(ProtoStatus {
recommended,
required,
})
}
pub fn required_protocols(&self) -> &Protocols {
&self.required
}
pub fn recommended_protocols(&self) -> &Protocols {
&self.recommended
}
}
impl<T> std::str::FromStr for NetParams<T>
where
T: std::str::FromStr,
T::Err: std::error::Error,
{
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
fn parse_pair<U>(p: &str) -> Result<(String, U)>
where
U: std::str::FromStr,
U::Err: std::error::Error,
{
let parts: Vec<_> = p.splitn(2, '=').collect();
if parts.len() != 2 {
return Err(EK::BadArgument
.at_pos(Pos::at(p))
.with_msg("Missing = in key=value list"));
}
let num = parts[1].parse::<U>().map_err(|e| {
EK::BadArgument
.at_pos(Pos::at(parts[1]))
.with_msg(e.to_string())
})?;
Ok((parts[0].to_string(), num))
}
let params = s
.split(' ')
.filter(|p| !p.is_empty())
.map(parse_pair)
.collect::<Result<HashMap<_, _>>>()?;
Ok(NetParams { params })
}
}
impl FromStr for SharedRandVal {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
let val: B64 = s.parse()?;
let val = SharedRandVal(val.into_array()?);
Ok(val)
}
}
impl Display for SharedRandVal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&B64::from(Vec::from(self.0)), f)
}
}
impl NormalItemArgument for SharedRandVal {}
impl SharedRandStatus {
fn from_item(item: &Item<'_, NetstatusKwd>) -> Result<Self> {
match item.kwd() {
NetstatusKwd::SHARED_RAND_PREVIOUS_VALUE | NetstatusKwd::SHARED_RAND_CURRENT_VALUE => {}
_ => {
return Err(Error::from(internal!(
"wrong keyword {:?} on shared-random value",
item.kwd()
))
.at_pos(item.pos()));
}
}
let n_reveals: u8 = item.parse_arg(0)?;
let value: SharedRandVal = item.parse_arg(1)?;
let timestamp = item.parse_optional_arg::<Iso8601TimeNoSp>(2)?;
Ok(SharedRandStatus {
n_reveals,
value,
timestamp,
})
}
pub fn value(&self) -> &SharedRandVal {
&self.value
}
pub fn timestamp(&self) -> Option<std::time::SystemTime> {
self.timestamp.map(|t| t.0)
}
}
impl DirSource {
fn from_item(item: &Item<'_, NetstatusKwd>) -> Result<Self> {
if item.kwd() != NetstatusKwd::DIR_SOURCE {
return Err(
Error::from(internal!("Bad keyword {:?} on dir-source", item.kwd()))
.at_pos(item.pos()),
);
}
let nickname = item.required_arg(0)?.to_string();
let identity = item.parse_arg::<Fingerprint>(1)?.into();
let ip = item.parse_arg(3)?;
let dir_port = item.parse_arg(4)?;
let or_port = item.parse_arg(5)?;
Ok(DirSource {
nickname,
identity,
ip,
dir_port,
or_port,
})
}
}
impl ConsensusVoterInfo {
fn from_section(sec: &Section<'_, NetstatusKwd>) -> Result<ConsensusVoterInfo> {
use NetstatusKwd::*;
#[allow(clippy::unwrap_used)]
let first = sec.first_item().unwrap();
if first.kwd() != DIR_SOURCE {
return Err(Error::from(internal!(
"Wrong keyword {:?} at start of voter info",
first.kwd()
))
.at_pos(first.pos()));
}
let dir_source = DirSource::from_item(sec.required(DIR_SOURCE)?)?;
let contact = sec.required(CONTACT)?.args_as_str().to_string();
let vote_digest = sec.required(VOTE_DIGEST)?.parse_arg::<B16>(0)?.into();
Ok(ConsensusVoterInfo {
dir_source,
contact,
vote_digest,
})
}
}
impl Default for RelayWeight {
fn default() -> RelayWeight {
RelayWeight::Unmeasured(0)
}
}
impl RelayWeight {
fn from_item(item: &Item<'_, NetstatusKwd>) -> Result<RelayWeight> {
if item.kwd() != NetstatusKwd::RS_W {
return Err(
Error::from(internal!("Wrong keyword {:?} on W line", item.kwd()))
.at_pos(item.pos()),
);
}
let params = item.args_as_str().parse()?;
Self::from_net_params(¶ms).map_err(|e| e.at_pos(item.pos()))
}
fn from_net_params(params: &NetParams<u32>) -> Result<RelayWeight> {
let bw = params.params.get("Bandwidth");
let unmeas = params.params.get("Unmeasured");
let bw = match bw {
None => return Ok(RelayWeight::Unmeasured(0)),
Some(b) => *b,
};
match unmeas {
None | Some(0) => Ok(RelayWeight::Measured(bw)),
Some(1) => Ok(RelayWeight::Unmeasured(bw)),
_ => Err(EK::BadArgument.with_msg("unmeasured value")),
}
}
}
#[cfg(feature = "parse2")]
mod parse2_impls {
use super::*;
use parse2::ArgumentError as AE;
use parse2::ErrorProblem as EP;
use parse2::{ArgumentStream, ItemArgumentParseable, ItemValueParseable};
use parse2::{KeywordRef, NetdocParseableFields, UnparsedItem};
use paste::paste;
use std::result::Result;
macro_rules! impl_proto_statuses { { $( $rr:ident $cr:ident; )* } => { paste! {
#[derive(Deftly)]
#[derive_deftly(NetdocParseableFields)]
#[allow(unreachable_pub)]
pub struct ProtoStatusesParseHelper {
$(
#[deftly(netdoc(default))]
[<$rr _ $cr _protocols>]: Protocols,
)*
}
pub use ProtoStatusesParseHelperNetdocParseAccumulator
as ProtoStatusesNetdocParseAccumulator;
impl NetdocParseableFields for ProtoStatuses {
type Accumulator = ProtoStatusesNetdocParseAccumulator;
fn is_item_keyword(kw: KeywordRef<'_>) -> bool {
ProtoStatusesParseHelper::is_item_keyword(kw)
}
fn accumulate_item(
acc: &mut Self::Accumulator,
item: UnparsedItem<'_>,
) -> Result<(), EP> {
ProtoStatusesParseHelper::accumulate_item(acc, item)
}
fn finish(acc: Self::Accumulator) -> Result<Self, EP> {
let parse = ProtoStatusesParseHelper::finish(acc)?;
let mut out = ProtoStatuses::default();
$(
out.$cr.$rr = parse.[< $rr _ $cr _protocols >];
)*
Ok(out)
}
}
} } }
impl_proto_statuses! {
required client;
required relay;
recommended client;
recommended relay;
}
impl ItemValueParseable for NetParams<i32> {
fn from_unparsed(item: parse2::UnparsedItem<'_>) -> Result<Self, EP> {
item.check_no_object()?;
item.args_copy()
.into_remaining()
.parse()
.map_err(item.invalid_argument_handler("parameters"))
}
}
impl ItemValueParseable for RelayWeight {
fn from_unparsed(item: parse2::UnparsedItem<'_>) -> Result<Self, EP> {
item.check_no_object()?;
(|| {
let params = item.args_copy().into_remaining().parse()?;
Self::from_net_params(¶ms)
})()
.map_err(item.invalid_argument_handler("weights"))
}
}
impl ItemValueParseable for rs::SoftwareVersion {
fn from_unparsed(mut item: parse2::UnparsedItem<'_>) -> Result<Self, EP> {
item.check_no_object()?;
item.args_mut()
.into_remaining()
.parse()
.map_err(item.invalid_argument_handler("version"))
}
}
impl ItemArgumentParseable for IgnoredPublicationTimeSp {
fn from_args(a: &mut ArgumentStream) -> Result<IgnoredPublicationTimeSp, AE> {
let mut next_arg = || a.next().ok_or(AE::Missing);
let _: &str = next_arg()?;
let _: &str = next_arg()?;
Ok(IgnoredPublicationTimeSp)
}
}
}
impl Footer {
fn from_section(sec: &Section<'_, NetstatusKwd>) -> Result<Footer> {
use NetstatusKwd::*;
sec.required(DIRECTORY_FOOTER)?;
let weights = sec
.maybe(BANDWIDTH_WEIGHTS)
.args_as_str()
.unwrap_or("")
.parse()?;
Ok(Footer { weights })
}
}
enum SigCheckResult {
Valid,
Invalid,
MissingCert,
}
impl Signature {
fn from_item(item: &Item<'_, NetstatusKwd>) -> Result<Signature> {
if item.kwd() != NetstatusKwd::DIRECTORY_SIGNATURE {
return Err(Error::from(internal!(
"Wrong keyword {:?} for directory signature",
item.kwd()
))
.at_pos(item.pos()));
}
let (alg, id_fp, sk_fp) = if item.n_args() > 2 {
(
item.required_arg(0)?,
item.required_arg(1)?,
item.required_arg(2)?,
)
} else {
("sha1", item.required_arg(0)?, item.required_arg(1)?)
};
let digestname = alg.to_string();
let id_fingerprint = id_fp.parse::<Fingerprint>()?.into();
let sk_fingerprint = sk_fp.parse::<Fingerprint>()?.into();
let key_ids = AuthCertKeyIds {
id_fingerprint,
sk_fingerprint,
};
let signature = item.obj("SIGNATURE")?;
Ok(Signature {
digestname,
key_ids,
signature,
})
}
fn matches_cert(&self, cert: &AuthCert) -> bool {
cert.key_ids() == self.key_ids
}
fn find_cert<'a>(&self, certs: &'a [AuthCert]) -> Option<&'a AuthCert> {
certs.iter().find(|&c| self.matches_cert(c))
}
fn check_signature(&self, signed_digest: &[u8], certs: &[AuthCert]) -> SigCheckResult {
match self.find_cert(certs) {
None => SigCheckResult::MissingCert,
Some(cert) => {
let key = cert.signing_key();
match key.verify(signed_digest, &self.signature[..]) {
Ok(()) => SigCheckResult::Valid,
Err(_) => SigCheckResult::Invalid,
}
}
}
}
}
impl SignatureGroup {
fn list_missing(&self, certs: &[AuthCert]) -> (usize, Vec<&Signature>) {
let mut ok: HashSet<RsaIdentity> = HashSet::new();
let mut missing = Vec::new();
for sig in &self.signatures {
let id_fingerprint = &sig.key_ids.id_fingerprint;
if ok.contains(id_fingerprint) {
continue;
}
if sig.find_cert(certs).is_some() {
ok.insert(*id_fingerprint);
continue;
}
missing.push(sig);
}
(ok.len(), missing)
}
fn could_validate(&self, authorities: &[&RsaIdentity]) -> bool {
let mut signed_by: HashSet<RsaIdentity> = HashSet::new();
for sig in &self.signatures {
let id_fp = &sig.key_ids.id_fingerprint;
if signed_by.contains(id_fp) {
continue;
}
if authorities.contains(&id_fp) {
signed_by.insert(*id_fp);
}
}
signed_by.len() > (authorities.len() / 2)
}
fn validate(&self, n_authorities: usize, certs: &[AuthCert]) -> bool {
let mut ok: HashSet<RsaIdentity> = HashSet::new();
for sig in &self.signatures {
let id_fingerprint = &sig.key_ids.id_fingerprint;
if ok.contains(id_fingerprint) {
continue;
}
let d: Option<&[u8]> = match sig.digestname.as_ref() {
"sha256" => self.sha256.as_ref().map(|a| &a[..]),
"sha1" => self.sha1.as_ref().map(|a| &a[..]),
_ => None, };
if d.is_none() {
continue;
}
#[allow(clippy::unwrap_used)]
match sig.check_signature(d.as_ref().unwrap(), certs) {
SigCheckResult::Valid => {
ok.insert(*id_fingerprint);
}
_ => continue,
}
}
ok.len() > (n_authorities / 2)
}
}
#[cfg(test)]
mod test {
#![allow(clippy::bool_assert_comparison)]
#![allow(clippy::clone_on_copy)]
#![allow(clippy::dbg_macro)]
#![allow(clippy::mixed_attributes_style)]
#![allow(clippy::print_stderr)]
#![allow(clippy::print_stdout)]
#![allow(clippy::single_char_pattern)]
#![allow(clippy::unwrap_used)]
#![allow(clippy::unchecked_time_subtraction)]
#![allow(clippy::useless_vec)]
#![allow(clippy::needless_pass_by_value)]
use super::*;
use hex_literal::hex;
#[cfg(all(feature = "ns-vote", feature = "parse2"))]
use {
crate::parse2::{NetdocUnverified as _, ParseInput, parse_netdoc},
std::fs,
};
const CERTS: &str = include_str!("../../testdata/authcerts2.txt");
const CONSENSUS: &str = include_str!("../../testdata/mdconsensus1.txt");
#[cfg(feature = "plain-consensus")]
const PLAIN_CERTS: &str = include_str!("../../testdata2/cached-certs");
#[cfg(feature = "plain-consensus")]
const PLAIN_CONSENSUS: &str = include_str!("../../testdata2/cached-consensus");
fn read_bad(fname: &str) -> String {
use std::fs;
use std::path::PathBuf;
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("testdata");
path.push("bad-mdconsensus");
path.push(fname);
fs::read_to_string(path).unwrap()
}
#[test]
fn parse_and_validate_md() -> Result<()> {
use std::net::SocketAddr;
use tor_checkable::{SelfSigned, Timebound};
let mut certs = Vec::new();
for cert in AuthCert::parse_multiple(CERTS)? {
let cert = cert?.check_signature()?.dangerously_assume_timely();
certs.push(cert);
}
let auth_ids: Vec<_> = certs.iter().map(|c| c.id_fingerprint()).collect();
assert_eq!(certs.len(), 3);
let (_, _, consensus) = MdConsensus::parse(CONSENSUS)?;
let consensus = consensus.dangerously_assume_timely().set_n_authorities(3);
assert!(consensus.authorities_are_correct(&auth_ids));
assert!(consensus.authorities_are_correct(&auth_ids[0..1]));
{
let bad_auth_id = (*b"xxxxxxxxxxxxxxxxxxxx").into();
assert!(!consensus.authorities_are_correct(&[&bad_auth_id]));
}
let missing = consensus.key_is_correct(&[]).err().unwrap();
assert_eq!(3, missing.len());
assert!(consensus.key_is_correct(&certs).is_ok());
let missing = consensus.key_is_correct(&certs[0..1]).err().unwrap();
assert_eq!(2, missing.len());
let same_three_times = vec![certs[0].clone(), certs[0].clone(), certs[0].clone()];
let missing = consensus.key_is_correct(&same_three_times).err().unwrap();
assert_eq!(2, missing.len());
assert!(consensus.is_well_signed(&same_three_times).is_err());
assert!(consensus.key_is_correct(&certs).is_ok());
let consensus = consensus.check_signature(&certs)?;
assert_eq!(6, consensus.relays().len());
let r0 = &consensus.relays()[0];
assert_eq!(
r0.md_digest(),
&hex!("73dabe0a0468f4f7a67810a18d11e36731bb1d2ec3634db459100609f3b3f535")
);
assert_eq!(
r0.rsa_identity().as_bytes(),
&hex!("0a3057af2910415794d8ea430309d9ac5f5d524b")
);
assert!(!r0.weight().is_measured());
assert!(!r0.weight().is_nonzero());
let pv = &r0.protovers();
assert!(pv.supports_subver("HSDir", 2));
assert!(!pv.supports_subver("HSDir", 3));
let ip4 = "127.0.0.1:5002".parse::<SocketAddr>().unwrap();
let ip6 = "[::1]:5002".parse::<SocketAddr>().unwrap();
assert!(r0.addrs().any(|a| a == ip4));
assert!(r0.addrs().any(|a| a == ip6));
Ok(())
}
#[test]
#[cfg(feature = "plain-consensus")]
fn parse_and_validate_ns() -> Result<()> {
use tor_checkable::{SelfSigned, Timebound};
let mut certs = Vec::new();
for cert in AuthCert::parse_multiple(PLAIN_CERTS)? {
let cert = cert?.check_signature()?.dangerously_assume_timely();
certs.push(cert);
}
let auth_ids: Vec<_> = certs.iter().map(|c| c.id_fingerprint()).collect();
assert_eq!(certs.len(), 4);
let (_, _, consensus) = PlainConsensus::parse(PLAIN_CONSENSUS)?;
let consensus = consensus.dangerously_assume_timely().set_n_authorities(3);
assert!(consensus.authorities_are_correct(&auth_ids));
assert!(consensus.authorities_are_correct(&auth_ids[0..1]));
assert!(consensus.key_is_correct(&certs).is_ok());
let _consensus = consensus.check_signature(&certs)?;
Ok(())
}
#[test]
#[cfg(all(feature = "ns-vote", feature = "parse2"))]
fn parse2_vote() -> anyhow::Result<()> {
let file = "testdata2/v3-status-votes--1";
let text = fs::read_to_string(file)?;
use crate::parse2::poc::netstatus::NetworkStatusUnverifiedVote;
let input = ParseInput::new(&text, file);
let doc: NetworkStatusUnverifiedVote = parse_netdoc(&input)?;
println!("{doc:?}");
println!("{:#?}", doc.inspect_unverified().0.r[0]);
Ok(())
}
#[test]
fn test_bad() {
use crate::Pos;
fn check(fname: &str, e: &Error) {
let content = read_bad(fname);
let res = MdConsensus::parse(&content);
assert!(res.is_err());
assert_eq!(&res.err().unwrap(), e);
}
check(
"bad-flags",
&EK::BadArgument
.at_pos(Pos::from_line(27, 1))
.with_msg("Flags out of order"),
);
check(
"bad-md-digest",
&EK::BadArgument
.at_pos(Pos::from_line(40, 3))
.with_msg("Invalid base64"),
);
check(
"bad-weight",
&EK::BadArgument
.at_pos(Pos::from_line(67, 141))
.with_msg("invalid digit found in string"),
);
check(
"bad-weights",
&EK::BadArgument
.at_pos(Pos::from_line(51, 13))
.with_msg("invalid digit found in string"),
);
check(
"wrong-order",
&EK::WrongSortOrder.at_pos(Pos::from_line(52, 1)),
);
check(
"wrong-start",
&EK::UnexpectedToken
.with_msg("vote-status")
.at_pos(Pos::from_line(1, 1)),
);
check("wrong-version", &EK::BadDocumentVersion.with_msg("10"));
}
fn gettok(s: &str) -> Result<Item<'_, NetstatusKwd>> {
let mut reader = NetDocReader::new(s)?;
let tok = reader.next().unwrap();
assert!(reader.next().is_none());
tok
}
#[test]
fn test_weight() {
let w = gettok("w Unmeasured=1 Bandwidth=6\n").unwrap();
let w = RelayWeight::from_item(&w).unwrap();
assert!(!w.is_measured());
assert!(w.is_nonzero());
let w = gettok("w Bandwidth=10\n").unwrap();
let w = RelayWeight::from_item(&w).unwrap();
assert!(w.is_measured());
assert!(w.is_nonzero());
let w = RelayWeight::default();
assert!(!w.is_measured());
assert!(!w.is_nonzero());
let w = gettok("w Mustelid=66 Cheato=7 Unmeasured=1\n").unwrap();
let w = RelayWeight::from_item(&w).unwrap();
assert!(!w.is_measured());
assert!(!w.is_nonzero());
let w = gettok("r foo\n").unwrap();
let w = RelayWeight::from_item(&w);
assert!(w.is_err());
let w = gettok("r Bandwidth=6 Unmeasured=Frog\n").unwrap();
let w = RelayWeight::from_item(&w);
assert!(w.is_err());
let w = gettok("r Bandwidth=6 Unmeasured=3\n").unwrap();
let w = RelayWeight::from_item(&w);
assert!(w.is_err());
}
#[test]
fn test_netparam() {
let p = "Hello=600 Goodbye=5 Fred=7"
.parse::<NetParams<u32>>()
.unwrap();
assert_eq!(p.get("Hello"), Some(&600_u32));
let p = "Hello=Goodbye=5 Fred=7".parse::<NetParams<u32>>();
assert!(p.is_err());
let p = "Hello=Goodbye Fred=7".parse::<NetParams<u32>>();
assert!(p.is_err());
}
#[test]
fn test_sharedrand() {
let sr =
gettok("shared-rand-previous-value 9 5LodY4yWxFhTKtxpV9wAgNA9N8flhUCH0NqQv1/05y4\n")
.unwrap();
let sr = SharedRandStatus::from_item(&sr).unwrap();
assert_eq!(sr.n_reveals, 9);
assert_eq!(
sr.value.0,
hex!("e4ba1d638c96c458532adc6957dc0080d03d37c7e5854087d0da90bf5ff4e72e")
);
assert!(sr.timestamp.is_none());
let sr2 = gettok(
"shared-rand-current-value 9 \
5LodY4yWxFhTKtxpV9wAgNA9N8flhUCH0NqQv1/05y4 2022-01-20T12:34:56\n",
)
.unwrap();
let sr2 = SharedRandStatus::from_item(&sr2).unwrap();
assert_eq!(sr2.n_reveals, sr.n_reveals);
assert_eq!(sr2.value.0, sr.value.0);
assert_eq!(
sr2.timestamp.unwrap().0,
humantime::parse_rfc3339("2022-01-20T12:34:56Z").unwrap()
);
let sr = gettok("foo bar\n").unwrap();
let sr = SharedRandStatus::from_item(&sr);
assert!(sr.is_err());
}
#[test]
fn test_protostatus() {
let my_protocols: Protocols = "Link=7 Cons=1-5 Desc=3-10".parse().unwrap();
let outcome = ProtoStatus {
recommended: "Link=7".parse().unwrap(),
required: "Desc=5".parse().unwrap(),
}
.check_protocols(&my_protocols);
assert!(outcome.is_ok());
let outcome = ProtoStatus {
recommended: "Microdesc=4 Link=7".parse().unwrap(),
required: "Desc=5".parse().unwrap(),
}
.check_protocols(&my_protocols);
assert_eq!(
outcome,
Err(ProtocolSupportError::MissingRecommended(
"Microdesc=4".parse().unwrap()
))
);
let outcome = ProtoStatus {
recommended: "Microdesc=4 Link=7".parse().unwrap(),
required: "Desc=5 Cons=5-12 Wombat=15".parse().unwrap(),
}
.check_protocols(&my_protocols);
assert_eq!(
outcome,
Err(ProtocolSupportError::MissingRequired(
"Cons=6-12 Wombat=15".parse().unwrap()
))
);
}
#[test]
fn serialize_protostatus() {
let ps = ProtoStatuses {
client: ProtoStatus {
recommended: "Link=1-5 LinkAuth=2-5".parse().unwrap(),
required: "Link=5 LinkAuth=3".parse().unwrap(),
},
relay: ProtoStatus {
recommended: "Wombat=20-30 Knish=20-30".parse().unwrap(),
required: "Wombat=20-22 Knish=25-27".parse().unwrap(),
},
};
let json = serde_json::to_string(&ps).unwrap();
let ps2 = serde_json::from_str(json.as_str()).unwrap();
assert_eq!(ps, ps2);
let ps3: ProtoStatuses = serde_json::from_str(
r#"{
"client":{
"required":"Link=5 LinkAuth=3",
"recommended":"Link=1-5 LinkAuth=2-5"
},
"relay":{
"required":"Wombat=20-22 Knish=25-27",
"recommended":"Wombat=20-30 Knish=20-30"
}
}"#,
)
.unwrap();
assert_eq!(ps, ps3);
}
}