use super::super::base::*;
use crate::symbols::*;
use anyhow::{anyhow, Result};
use narsese::lexical::Term as TermLexical;
fn get_identifier(term: &TermLexical) -> String {
match term {
TermLexical::Atom { prefix, .. } => prefix.clone(),
TermLexical::Compound { connecter, .. } => connecter.clone(),
TermLexical::Set {
left_bracket,
right_bracket,
..
} => left_bracket.to_string() + right_bracket,
TermLexical::Statement { copula, .. } => copula.clone(),
}
}
#[derive(Debug, Clone)]
struct FoldContext {
var_id_map: Vec<String>,
}
impl FoldContext {
fn new() -> Self {
Self { var_id_map: vec![] }
}
}
#[inline]
pub fn lexical_fold(term: TermLexical) -> Result<Term> {
let mut context = FoldContext::new();
fold_term(term, &mut context)
}
fn fold_term(term: TermLexical, context: &mut FoldContext) -> Result<Term> {
#[inline]
fn update_var(
var_type: impl Into<String>,
original_name: String,
context: &mut FoldContext,
) -> Term {
match context
.var_id_map
.iter()
.position(|stored_name| &original_name == stored_name)
{
Some(existed) => Term::from_var_similar(var_type, existed + 1),
None => {
context.var_id_map.push(original_name);
Term::from_var_similar(var_type, context.var_id_map.len())
}
}
}
macro_rules! make_error {
() => {
if cfg!(test) {
anyhow::anyhow!("词项无效 @ {}:{}", file!(), line!())
} else {
anyhow::anyhow!("词项无效")
}
};
}
let identifier = get_identifier(&term);
use TermLexical::*;
let term = match (identifier.as_str(), term) {
(WORD, Atom { name, .. }) => Term::make_word(name),
(PLACEHOLDER, Atom { .. }) => Term::make_placeholder(),
(VAR_INDEPENDENT, Atom { name, .. }) => update_var(VAR_INDEPENDENT, name, context),
(VAR_DEPENDENT, Atom { name, .. }) => update_var(VAR_DEPENDENT, name, context),
(VAR_QUERY, Atom { name, .. }) => update_var(VAR_QUERY, name, context),
(SET_EXT_OPERATOR, Set { terms, .. }) => {
Term::make_set_ext_arg(fold_inner_lexical_vec(terms, context)?).ok_or(make_error!())?
}
(SET_INT_OPERATOR, Set { terms, .. }) => {
Term::make_set_int_arg(fold_inner_lexical_vec(terms, context)?).ok_or(make_error!())?
}
(INTERSECTION_EXT_OPERATOR, Compound { terms, .. }) => {
Term::make_intersection_ext_arg(fold_inner_lexical_vec(terms, context)?)
.ok_or(make_error!())?
}
(INTERSECTION_INT_OPERATOR, Compound { terms, .. }) => {
Term::make_intersection_int_arg(fold_inner_lexical_vec(terms, context)?)
.ok_or(make_error!())?
}
(DIFFERENCE_EXT_OPERATOR, Compound { terms, .. }) if terms.len() == 2 => {
let mut iter = terms.into_iter();
let term1 = fold_inner_lexical(iter.next().unwrap(), context)?;
let term2 = fold_inner_lexical(iter.next().unwrap(), context)?;
Term::make_difference_ext(term1, term2).ok_or(make_error!())?
}
(DIFFERENCE_INT_OPERATOR, Compound { terms, .. }) if terms.len() == 2 => {
let mut iter = terms.into_iter();
let term1 = fold_inner_lexical(iter.next().unwrap(), context)?;
let term2 = fold_inner_lexical(iter.next().unwrap(), context)?;
Term::make_difference_int(term1, term2).ok_or(make_error!())?
}
(PRODUCT_OPERATOR, Compound { terms, .. }) => {
Term::make_product_arg(fold_inner_lexical_vec(terms, context)?).ok_or(make_error!())?
}
(IMAGE_EXT_OPERATOR, Compound { terms, .. }) => {
let (i, terms) = fold_lexical_terms_as_image(terms, context)?;
match i {
0 => Term::make_product_arg(terms).ok_or(make_error!())?,
_ => Term::make_image_ext_vec(terms).ok_or(make_error!())?,
}
}
(IMAGE_INT_OPERATOR, Compound { terms, .. }) => {
let (i, terms) = fold_lexical_terms_as_image(terms, context)?;
match i {
0 => Term::make_product_arg(terms).ok_or(make_error!())?,
_ => Term::make_image_int_vec(terms).ok_or(make_error!())?,
}
}
(CONJUNCTION_OPERATOR, Compound { terms, .. }) => {
Term::make_conjunction_arg(fold_inner_lexical_vec(terms, context)?)
.ok_or(make_error!())?
}
(DISJUNCTION_OPERATOR, Compound { terms, .. }) => {
Term::make_disjunction_arg(fold_inner_lexical_vec(terms, context)?)
.ok_or(make_error!())?
}
(NEGATION_OPERATOR, Compound { terms, .. }) if terms.len() == 1 => {
let inner = fold_inner_lexical(terms.into_iter().next().unwrap(), context)?;
Term::make_negation(inner).ok_or(make_error!())?
}
(
INHERITANCE_RELATION,
Statement {
subject, predicate, ..
},
) => Term::make_inheritance(
fold_inner_lexical(*subject, context)?,
fold_inner_lexical(*predicate, context)?,
)
.ok_or(make_error!())?,
(
SIMILARITY_RELATION,
Statement {
subject, predicate, ..
},
) => Term::make_similarity(
fold_inner_lexical(*subject, context)?,
fold_inner_lexical(*predicate, context)?,
)
.ok_or(make_error!())?,
(
IMPLICATION_RELATION,
Statement {
subject, predicate, ..
},
) => Term::make_implication(
fold_inner_lexical(*subject, context)?,
fold_inner_lexical(*predicate, context)?,
)
.ok_or(make_error!())?,
(
EQUIVALENCE_RELATION,
Statement {
subject, predicate, ..
},
) => Term::make_equivalence(
fold_inner_lexical(*subject, context)?,
fold_inner_lexical(*predicate, context)?,
)
.ok_or(make_error!())?,
(
INSTANCE_RELATION, Statement {
subject, predicate, ..
},
) => Term::make_inheritance(
Term::make_set_ext_arg(vec![fold_inner_lexical(*subject, context)?])
.ok_or(make_error!())?,
fold_inner_lexical(*predicate, context)?,
)
.ok_or(make_error!())?,
(
PROPERTY_RELATION, Statement {
subject, predicate, ..
},
) => Term::make_inheritance(
fold_inner_lexical(*subject, context)?,
Term::make_set_int_arg(vec![fold_inner_lexical(*predicate, context)?])
.ok_or(make_error!())?,
)
.ok_or(make_error!())?,
(
INSTANCE_PROPERTY_RELATION, Statement {
subject, predicate, ..
},
) => Term::make_inheritance(
Term::make_set_ext_arg(vec![fold_inner_lexical(*subject, context)?])
.ok_or(make_error!())?,
Term::make_set_int_arg(vec![fold_inner_lexical(*predicate, context)?])
.ok_or(make_error!())?,
)
.ok_or(make_error!())?,
(identifier, this) => return Err(anyhow!("标识符为「{identifier}」的非法词项:{this:?}")),
};
Ok(term)
}
#[inline]
fn fold_inner_lexical(term: TermLexical, context: &mut FoldContext) -> Result<Term> {
let term = fold_term(term, context)?;
if term.is_placeholder() {
return Err(anyhow!("词法折叠错误:占位符仅能直属于 外延像/内涵像 词项"));
}
Ok(term)
}
#[inline]
fn fold_inner_lexical_vec(terms: Vec<TermLexical>, context: &mut FoldContext) -> Result<Vec<Term>> {
let mut v = vec![];
for term in terms {
v.push(fold_inner_lexical(term, context)?);
}
check_folded_terms(v)
}
#[inline]
fn check_folded_terms(v: Vec<Term>) -> Result<Vec<Term>> {
match v.is_empty() {
true => Err(anyhow!("词法折叠错误:NAL不允许构造空集")),
false => Ok(v),
}
}
#[inline]
fn fold_lexical_terms_as_image(
terms: Vec<TermLexical>,
context: &mut FoldContext,
) -> Result<(usize, Vec<Term>)> {
let mut v = vec![];
let mut placeholder_index = 0;
for (i, term) in terms.into_iter().enumerate() {
let term: Term = fold_term(term, context)?;
match term.is_placeholder() {
true => {
placeholder_index = i;
if i > 0 {
v.push(term);
}
}
false => v.push(term),
}
}
Ok((placeholder_index, check_folded_terms(v)?))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{ok, util::AResult};
use nar_dev_utils::macro_once;
use narsese::{
conversion::{
inter_type::lexical_fold::TryFoldInto,
string::impl_lexical::format_instances::FORMAT_ASCII,
},
lexical::Term as LexicalTerm,
lexical_nse_term as l_term,
};
#[test]
fn test_lexical_fold() -> AResult {
fn test(t: LexicalTerm) -> Result<Term> {
print!("{:?} => ", FORMAT_ASCII.format(&t));
let term_1 = Term::try_from(t.clone())?;
let term_2 = t.clone().try_fold_into(&())?;
let term_3 = Term::from_lexical(t)?;
assert_eq!(term_1, term_2);
assert_eq!(term_1, term_3);
assert_eq!(term_2, term_3);
let term = term_1;
println!("{:?}", term.format_name());
Ok(term)
}
macro_once! {
macro test($($term:literal)*) {
$(
test(l_term!($term))?;
)*
}
"<A --> B>"
"(&&, C, B, A, (/, A, _, B))"
"[2, 1, 0, $0, #1, ?2]"
"<A <-> {B}>" "<{A} <=> B>" "<{SELF} ==> (--, [good])>"
}
ok!()
}
#[test]
fn test_lexical_fold_err() -> AResult {
fn test(t: LexicalTerm) -> AResult {
let t_s = FORMAT_ASCII.format(&t);
let e = Term::try_from(t.clone()).expect_err(&format!("非法词项{t_s:?}异常通过解析"));
println!("{t_s:?} => {e}");
ok!()
}
macro_once! {
macro test($($term:literal)*) {
$(
test(l_term!($term))?;
)*
}
"^operator" "<(*, {SELF}, x, y) --> ^left>" "<X =/> Y>" "<X =|> Y>" "<X </> Y>" "+123" "(&/, 1, 2, 3)" "(&|, 3, 2, 1)" "(-, A, B, C)"
"(-, A)"
"(--, A, B)"
"{_}"
"{A, B, _}"
"[_]"
"[A, B, _]"
"<A --> _>"
"<A <-> _>"
"<A ==> _>"
"<A <=> _>"
"<_ --> _>"
"<_ <-> _>"
"<_ ==> _>"
"<_ <=> _>"
"(&, _, A, B)"
"(-, _, B)"
"(-, A, _)"
"(--, _)"
"(&&, (*, [A, B, _]), A, B)"
}
ok!()
}
#[test]
fn test_var_map() -> AResult {
let lexical = l_term!(<(&&,<(*,{$1},{$2},$d)-->方向>,<(*,{$1},$c)-->格点状态>,<(*,{$2},无缺陷)-->格点状态>)==><(*,$d,$c,{$1},{$2})-->[同色连空]>>);
println!("{}", FORMAT_ASCII.format(&lexical));
let term1 = Term::from_lexical(lexical.clone())?;
let term1_s = term1.format_ascii();
println!("{term1_s}");
let mut context = FoldContext::new();
let term2 = fold_term(lexical.clone(), &mut context)?;
let term2_s = term2.format_ascii();
println!("{term2_s}");
assert_eq!(term1_s, term2_s);
println!("{:?}", context);
for (var_i, original_name) in context.var_id_map.iter().enumerate() {
println!("{original_name} => {}", var_i + 1);
}
let expected = [("1", 1), ("2", 2), ("d", 3), ("c", 4)];
for (original_name, var_i) in expected.iter() {
assert_eq!(context.var_id_map[*var_i - 1], *original_name);
}
ok!()
}
}