#[allow(unused_imports)]
use alloc::collections::BTreeMap;
#[allow(unused_imports)]
use core::marker::PhantomData;
use jacquard_common::CowStr;
#[allow(unused_imports)]
use jacquard_common::deps::codegen::unicode_segmentation::UnicodeSegmentation;
use jacquard_common::types::collection::{Collection, RecordError};
use jacquard_common::types::string::{AtUri, Cid, Datetime};
use jacquard_common::types::uri::{RecordUri, UriError};
use jacquard_common::xrpc::XrpcResp;
use jacquard_derive::{IntoStatic, lexicon};
use jacquard_lexicon::lexicon::LexiconDoc;
use jacquard_lexicon::schema::LexiconSchema;
#[allow(unused_imports)]
use jacquard_lexicon::validation::{ConstraintError, ValidationPath};
use serde::{Serialize, Deserialize};
#[lexicon]
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, IntoStatic)]
#[serde(rename_all = "camelCase", rename = "tech.tokimeki.takibi.spark", tag = "$type")]
pub struct Spark<'a> {
pub created_at: Datetime,
#[serde(borrow)]
pub text: CowStr<'a>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, IntoStatic)]
#[serde(rename_all = "camelCase")]
pub struct SparkGetRecordOutput<'a> {
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(borrow)]
pub cid: Option<Cid<'a>>,
#[serde(borrow)]
pub uri: AtUri<'a>,
#[serde(borrow)]
pub value: Spark<'a>,
}
impl<'a> Spark<'a> {
pub fn uri(
uri: impl Into<CowStr<'a>>,
) -> Result<RecordUri<'a, SparkRecord>, UriError> {
RecordUri::try_from_uri(AtUri::new_cow(uri.into())?)
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct SparkRecord;
impl XrpcResp for SparkRecord {
const NSID: &'static str = "tech.tokimeki.takibi.spark";
const ENCODING: &'static str = "application/json";
type Output<'de> = SparkGetRecordOutput<'de>;
type Err<'de> = RecordError<'de>;
}
impl From<SparkGetRecordOutput<'_>> for Spark<'_> {
fn from(output: SparkGetRecordOutput<'_>) -> Self {
use jacquard_common::IntoStatic;
output.value.into_static()
}
}
impl Collection for Spark<'_> {
const NSID: &'static str = "tech.tokimeki.takibi.spark";
type Record = SparkRecord;
}
impl Collection for SparkRecord {
const NSID: &'static str = "tech.tokimeki.takibi.spark";
type Record = SparkRecord;
}
impl<'a> LexiconSchema for Spark<'a> {
fn nsid() -> &'static str {
"tech.tokimeki.takibi.spark"
}
fn def_name() -> &'static str {
"main"
}
fn lexicon_doc() -> LexiconDoc<'static> {
lexicon_doc_tech_tokimeki_takibi_spark()
}
fn validate(&self) -> Result<(), ConstraintError> {
{
let value = &self.text;
#[allow(unused_comparisons)]
if <str>::len(value.as_ref()) > 100usize {
return Err(ConstraintError::MaxLength {
path: ValidationPath::from_field("text"),
max: 100usize,
actual: <str>::len(value.as_ref()),
});
}
}
{
let value = &self.text;
{
let count = UnicodeSegmentation::graphemes(value.as_ref(), true).count();
if count > 30usize {
return Err(ConstraintError::MaxGraphemes {
path: ValidationPath::from_field("text"),
max: 30usize,
actual: count,
});
}
}
}
Ok(())
}
}
pub mod spark_state {
pub use crate::builder_types::{Set, Unset, IsSet, IsUnset};
#[allow(unused)]
use ::core::marker::PhantomData;
mod sealed {
pub trait Sealed {}
}
pub trait State: sealed::Sealed {
type Text;
type CreatedAt;
}
pub struct Empty(());
impl sealed::Sealed for Empty {}
impl State for Empty {
type Text = Unset;
type CreatedAt = Unset;
}
pub struct SetText<S: State = Empty>(PhantomData<fn() -> S>);
impl<S: State> sealed::Sealed for SetText<S> {}
impl<S: State> State for SetText<S> {
type Text = Set<members::text>;
type CreatedAt = S::CreatedAt;
}
pub struct SetCreatedAt<S: State = Empty>(PhantomData<fn() -> S>);
impl<S: State> sealed::Sealed for SetCreatedAt<S> {}
impl<S: State> State for SetCreatedAt<S> {
type Text = S::Text;
type CreatedAt = Set<members::created_at>;
}
#[allow(non_camel_case_types)]
pub mod members {
pub struct text(());
pub struct created_at(());
}
}
pub struct SparkBuilder<'a, S: spark_state::State> {
_state: PhantomData<fn() -> S>,
_fields: (Option<Datetime>, Option<CowStr<'a>>),
_lifetime: PhantomData<&'a ()>,
}
impl<'a> Spark<'a> {
pub fn new() -> SparkBuilder<'a, spark_state::Empty> {
SparkBuilder::new()
}
}
impl<'a> SparkBuilder<'a, spark_state::Empty> {
pub fn new() -> Self {
SparkBuilder {
_state: PhantomData,
_fields: (None, None),
_lifetime: PhantomData,
}
}
}
impl<'a, S> SparkBuilder<'a, S>
where
S: spark_state::State,
S::CreatedAt: spark_state::IsUnset,
{
pub fn created_at(
mut self,
value: impl Into<Datetime>,
) -> SparkBuilder<'a, spark_state::SetCreatedAt<S>> {
self._fields.0 = Option::Some(value.into());
SparkBuilder {
_state: PhantomData,
_fields: self._fields,
_lifetime: PhantomData,
}
}
}
impl<'a, S> SparkBuilder<'a, S>
where
S: spark_state::State,
S::Text: spark_state::IsUnset,
{
pub fn text(
mut self,
value: impl Into<CowStr<'a>>,
) -> SparkBuilder<'a, spark_state::SetText<S>> {
self._fields.1 = Option::Some(value.into());
SparkBuilder {
_state: PhantomData,
_fields: self._fields,
_lifetime: PhantomData,
}
}
}
impl<'a, S> SparkBuilder<'a, S>
where
S: spark_state::State,
S::Text: spark_state::IsSet,
S::CreatedAt: spark_state::IsSet,
{
pub fn build(self) -> Spark<'a> {
Spark {
created_at: self._fields.0.unwrap(),
text: self._fields.1.unwrap(),
extra_data: Default::default(),
}
}
pub fn build_with_data(
self,
extra_data: BTreeMap<
jacquard_common::deps::smol_str::SmolStr,
jacquard_common::types::value::Data<'a>,
>,
) -> Spark<'a> {
Spark {
created_at: self._fields.0.unwrap(),
text: self._fields.1.unwrap(),
extra_data: Some(extra_data),
}
}
}
fn lexicon_doc_tech_tokimeki_takibi_spark() -> LexiconDoc<'static> {
#[allow(unused_imports)]
use jacquard_common::{CowStr, deps::smol_str::SmolStr, types::blob::MimeType};
use jacquard_lexicon::lexicon::*;
use alloc::collections::BTreeMap;
LexiconDoc {
lexicon: Lexicon::Lexicon1,
id: CowStr::new_static("tech.tokimeki.takibi.spark"),
defs: {
let mut map = BTreeMap::new();
map.insert(
SmolStr::new_static("main"),
LexUserType::Record(LexRecord {
description: Some(
CowStr::new_static(
"A spark record - throwing a short message into the fire. Something you like or words that cheer you up.",
),
),
key: Some(CowStr::new_static("tid")),
record: LexRecordRecord::Object(LexObject {
required: Some(
vec![
SmolStr::new_static("text"),
SmolStr::new_static("createdAt")
],
),
properties: {
#[allow(unused_mut)]
let mut map = BTreeMap::new();
map.insert(
SmolStr::new_static("createdAt"),
LexObjectProperty::String(LexString {
format: Some(LexStringFormat::Datetime),
..Default::default()
}),
);
map.insert(
SmolStr::new_static("text"),
LexObjectProperty::String(LexString {
description: Some(
CowStr::new_static("Short text (up to 30 graphemes)"),
),
max_length: Some(100usize),
max_graphemes: Some(30usize),
..Default::default()
}),
);
map
},
..Default::default()
}),
..Default::default()
}),
);
map
},
..Default::default()
}
}