#[allow(unused_imports)]
use alloc::collections::BTreeMap;
#[allow(unused_imports)]
use core::marker::PhantomData;
use jacquard_common::CowStr;
#[allow(unused_imports)]
use jacquard_common::deps::codegen::unicode_segmentation::UnicodeSegmentation;
use jacquard_common::types::collection::{Collection, RecordError};
use jacquard_common::types::string::{AtUri, Cid, Datetime};
use jacquard_common::types::uri::{RecordUri, UriError};
use jacquard_common::xrpc::XrpcResp;
use jacquard_derive::{IntoStatic, lexicon};
use jacquard_lexicon::lexicon::LexiconDoc;
use jacquard_lexicon::schema::LexiconSchema;
#[allow(unused_imports)]
use jacquard_lexicon::validation::{ConstraintError, ValidationPath};
use serde::{Serialize, Deserialize};
use crate::com_atproto::repo::strong_ref::StrongRef;
use crate::tech_tokimeki::takibi::log;
#[lexicon]
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, IntoStatic)]
#[serde(rename_all = "camelCase", rename = "tech.tokimeki.takibi.log", tag = "$type")]
pub struct Log<'a> {
pub created_at: Datetime,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(borrow)]
pub visible_sparks: Option<Vec<log::SparkRef<'a>>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, IntoStatic)]
#[serde(rename_all = "camelCase")]
pub struct LogGetRecordOutput<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(borrow)]
pub cid: Option<Cid<'a>>,
#[serde(borrow)]
pub uri: AtUri<'a>,
#[serde(borrow)]
pub value: Log<'a>,
}
#[lexicon]
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, IntoStatic)]
#[serde(rename_all = "camelCase")]
pub struct SparkRef<'a> {
pub elapsed: i64,
#[serde(borrow)]
pub spark: StrongRef<'a>,
}
impl<'a> Log<'a> {
pub fn uri(
uri: impl Into<CowStr<'a>>,
) -> Result<RecordUri<'a, LogRecord>, UriError> {
RecordUri::try_from_uri(AtUri::new_cow(uri.into())?)
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct LogRecord;
impl XrpcResp for LogRecord {
const NSID: &'static str = "tech.tokimeki.takibi.log";
const ENCODING: &'static str = "application/json";
type Output<'de> = LogGetRecordOutput<'de>;
type Err<'de> = RecordError<'de>;
}
impl From<LogGetRecordOutput<'_>> for Log<'_> {
fn from(output: LogGetRecordOutput<'_>) -> Self {
use jacquard_common::IntoStatic;
output.value.into_static()
}
}
impl Collection for Log<'_> {
const NSID: &'static str = "tech.tokimeki.takibi.log";
type Record = LogRecord;
}
impl Collection for LogRecord {
const NSID: &'static str = "tech.tokimeki.takibi.log";
type Record = LogRecord;
}
impl<'a> LexiconSchema for Log<'a> {
fn nsid() -> &'static str {
"tech.tokimeki.takibi.log"
}
fn def_name() -> &'static str {
"main"
}
fn lexicon_doc() -> LexiconDoc<'static> {
lexicon_doc_tech_tokimeki_takibi_log()
}
fn validate(&self) -> Result<(), ConstraintError> {
if let Some(ref value) = self.visible_sparks {
#[allow(unused_comparisons)]
if value.len() > 20usize {
return Err(ConstraintError::MaxLength {
path: ValidationPath::from_field("visible_sparks"),
max: 20usize,
actual: value.len(),
});
}
}
Ok(())
}
}
impl<'a> LexiconSchema for SparkRef<'a> {
fn nsid() -> &'static str {
"tech.tokimeki.takibi.log"
}
fn def_name() -> &'static str {
"sparkRef"
}
fn lexicon_doc() -> LexiconDoc<'static> {
lexicon_doc_tech_tokimeki_takibi_log()
}
fn validate(&self) -> Result<(), ConstraintError> {
{
let value = &self.elapsed;
if *value > 10000i64 {
return Err(ConstraintError::Maximum {
path: ValidationPath::from_field("elapsed"),
max: 10000i64,
actual: *value,
});
}
}
{
let value = &self.elapsed;
if *value < 0i64 {
return Err(ConstraintError::Minimum {
path: ValidationPath::from_field("elapsed"),
min: 0i64,
actual: *value,
});
}
}
Ok(())
}
}
pub mod log_state {
pub use crate::builder_types::{Set, Unset, IsSet, IsUnset};
#[allow(unused)]
use ::core::marker::PhantomData;
mod sealed {
pub trait Sealed {}
}
pub trait State: sealed::Sealed {
type CreatedAt;
}
pub struct Empty(());
impl sealed::Sealed for Empty {}
impl State for Empty {
type CreatedAt = Unset;
}
pub struct SetCreatedAt<S: State = Empty>(PhantomData<fn() -> S>);
impl<S: State> sealed::Sealed for SetCreatedAt<S> {}
impl<S: State> State for SetCreatedAt<S> {
type CreatedAt = Set<members::created_at>;
}
#[allow(non_camel_case_types)]
pub mod members {
pub struct created_at(());
}
}
pub struct LogBuilder<'a, S: log_state::State> {
_state: PhantomData<fn() -> S>,
_fields: (Option<Datetime>, Option<Vec<log::SparkRef<'a>>>),
_lifetime: PhantomData<&'a ()>,
}
impl<'a> Log<'a> {
pub fn new() -> LogBuilder<'a, log_state::Empty> {
LogBuilder::new()
}
}
impl<'a> LogBuilder<'a, log_state::Empty> {
pub fn new() -> Self {
LogBuilder {
_state: PhantomData,
_fields: (None, None),
_lifetime: PhantomData,
}
}
}
impl<'a, S> LogBuilder<'a, S>
where
S: log_state::State,
S::CreatedAt: log_state::IsUnset,
{
pub fn created_at(
mut self,
value: impl Into<Datetime>,
) -> LogBuilder<'a, log_state::SetCreatedAt<S>> {
self._fields.0 = Option::Some(value.into());
LogBuilder {
_state: PhantomData,
_fields: self._fields,
_lifetime: PhantomData,
}
}
}
impl<'a, S: log_state::State> LogBuilder<'a, S> {
pub fn visible_sparks(
mut self,
value: impl Into<Option<Vec<log::SparkRef<'a>>>>,
) -> Self {
self._fields.1 = value.into();
self
}
pub fn maybe_visible_sparks(
mut self,
value: Option<Vec<log::SparkRef<'a>>>,
) -> Self {
self._fields.1 = value;
self
}
}
impl<'a, S> LogBuilder<'a, S>
where
S: log_state::State,
S::CreatedAt: log_state::IsSet,
{
pub fn build(self) -> Log<'a> {
Log {
created_at: self._fields.0.unwrap(),
visible_sparks: self._fields.1,
extra_data: Default::default(),
}
}
pub fn build_with_data(
self,
extra_data: BTreeMap<
jacquard_common::deps::smol_str::SmolStr,
jacquard_common::types::value::Data<'a>,
>,
) -> Log<'a> {
Log {
created_at: self._fields.0.unwrap(),
visible_sparks: self._fields.1,
extra_data: Some(extra_data),
}
}
}
fn lexicon_doc_tech_tokimeki_takibi_log() -> LexiconDoc<'static> {
#[allow(unused_imports)]
use jacquard_common::{CowStr, deps::smol_str::SmolStr, types::blob::MimeType};
use jacquard_lexicon::lexicon::*;
use alloc::collections::BTreeMap;
LexiconDoc {
lexicon: Lexicon::Lexicon1,
id: CowStr::new_static("tech.tokimeki.takibi.log"),
defs: {
let mut map = BTreeMap::new();
map.insert(
SmolStr::new_static("main"),
LexUserType::Record(LexRecord {
description: Some(
CowStr::new_static(
"A log record - adding wood to the fire. Implicitly records visible sparks as a form of 'silent appreciation'.",
),
),
key: Some(CowStr::new_static("tid")),
record: LexRecordRecord::Object(LexObject {
required: Some(vec![SmolStr::new_static("createdAt")]),
properties: {
#[allow(unused_mut)]
let mut map = BTreeMap::new();
map.insert(
SmolStr::new_static("createdAt"),
LexObjectProperty::String(LexString {
format: Some(LexStringFormat::Datetime),
..Default::default()
}),
);
map.insert(
SmolStr::new_static("visibleSparks"),
LexObjectProperty::Array(LexArray {
description: Some(
CowStr::new_static(
"Sparks visible at the moment of adding wood, with elapsed time for decay scoring",
),
),
items: LexArrayItem::Ref(LexRef {
r#ref: CowStr::new_static("#sparkRef"),
..Default::default()
}),
max_length: Some(20usize),
..Default::default()
}),
);
map
},
..Default::default()
}),
..Default::default()
}),
);
map.insert(
SmolStr::new_static("sparkRef"),
LexUserType::Object(LexObject {
description: Some(
CowStr::new_static(
"Reference to a visible spark with timing information",
),
),
required: Some(
vec![
SmolStr::new_static("spark"), SmolStr::new_static("elapsed")
],
),
properties: {
#[allow(unused_mut)]
let mut map = BTreeMap::new();
map.insert(
SmolStr::new_static("elapsed"),
LexObjectProperty::Integer(LexInteger {
minimum: Some(0i64),
maximum: Some(10000i64),
..Default::default()
}),
);
map.insert(
SmolStr::new_static("spark"),
LexObjectProperty::Ref(LexRef {
r#ref: CowStr::new_static("com.atproto.repo.strongRef"),
..Default::default()
}),
);
map
},
..Default::default()
}),
);
map
},
..Default::default()
}
}
pub mod spark_ref_state {
pub use crate::builder_types::{Set, Unset, IsSet, IsUnset};
#[allow(unused)]
use ::core::marker::PhantomData;
mod sealed {
pub trait Sealed {}
}
pub trait State: sealed::Sealed {
type Spark;
type Elapsed;
}
pub struct Empty(());
impl sealed::Sealed for Empty {}
impl State for Empty {
type Spark = Unset;
type Elapsed = Unset;
}
pub struct SetSpark<S: State = Empty>(PhantomData<fn() -> S>);
impl<S: State> sealed::Sealed for SetSpark<S> {}
impl<S: State> State for SetSpark<S> {
type Spark = Set<members::spark>;
type Elapsed = S::Elapsed;
}
pub struct SetElapsed<S: State = Empty>(PhantomData<fn() -> S>);
impl<S: State> sealed::Sealed for SetElapsed<S> {}
impl<S: State> State for SetElapsed<S> {
type Spark = S::Spark;
type Elapsed = Set<members::elapsed>;
}
#[allow(non_camel_case_types)]
pub mod members {
pub struct spark(());
pub struct elapsed(());
}
}
pub struct SparkRefBuilder<'a, S: spark_ref_state::State> {
_state: PhantomData<fn() -> S>,
_fields: (Option<i64>, Option<StrongRef<'a>>),
_lifetime: PhantomData<&'a ()>,
}
impl<'a> SparkRef<'a> {
pub fn new() -> SparkRefBuilder<'a, spark_ref_state::Empty> {
SparkRefBuilder::new()
}
}
impl<'a> SparkRefBuilder<'a, spark_ref_state::Empty> {
pub fn new() -> Self {
SparkRefBuilder {
_state: PhantomData,
_fields: (None, None),
_lifetime: PhantomData,
}
}
}
impl<'a, S> SparkRefBuilder<'a, S>
where
S: spark_ref_state::State,
S::Elapsed: spark_ref_state::IsUnset,
{
pub fn elapsed(
mut self,
value: impl Into<i64>,
) -> SparkRefBuilder<'a, spark_ref_state::SetElapsed<S>> {
self._fields.0 = Option::Some(value.into());
SparkRefBuilder {
_state: PhantomData,
_fields: self._fields,
_lifetime: PhantomData,
}
}
}
impl<'a, S> SparkRefBuilder<'a, S>
where
S: spark_ref_state::State,
S::Spark: spark_ref_state::IsUnset,
{
pub fn spark(
mut self,
value: impl Into<StrongRef<'a>>,
) -> SparkRefBuilder<'a, spark_ref_state::SetSpark<S>> {
self._fields.1 = Option::Some(value.into());
SparkRefBuilder {
_state: PhantomData,
_fields: self._fields,
_lifetime: PhantomData,
}
}
}
impl<'a, S> SparkRefBuilder<'a, S>
where
S: spark_ref_state::State,
S::Spark: spark_ref_state::IsSet,
S::Elapsed: spark_ref_state::IsSet,
{
pub fn build(self) -> SparkRef<'a> {
SparkRef {
elapsed: self._fields.0.unwrap(),
spark: self._fields.1.unwrap(),
extra_data: Default::default(),
}
}
pub fn build_with_data(
self,
extra_data: BTreeMap<
jacquard_common::deps::smol_str::SmolStr,
jacquard_common::types::value::Data<'a>,
>,
) -> SparkRef<'a> {
SparkRef {
elapsed: self._fields.0.unwrap(),
spark: self._fields.1.unwrap(),
extra_data: Some(extra_data),
}
}
}