1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
use variable::Variable;
use std::collections::HashMap;
use std::ops::Add;

type VariableValues = HashMap<char, f64>;

/// Terms are basic mathematical building blocks, from which are formed expressions and more complex entities.
///
/// The `Term` data type (currently) represents basic polynomial components, which can be assigned a numeric value with `Term::evaluate`/`Term::reduce`.
#[derive(Clone)]
pub enum Term {
	/// Represents a term which simply a variable, one of the two foundational term types.
	///
	/// The value of the variable is looked up against the given variable values when `Term::evaluate` is called.
	Variable(Variable),
	/// Represents a constant term, one of the two foundational term types.
	///
	/// The value of this term is fixed and is calculated by simply unpacking the associated value.
	Constant(f64),
	/// Represents a sum of multiple terms.
	///
	/// To calculate the value of this term, the components are evaluated iteratively from the first to last index.
	Sum(Vec<Term>),
	/// Represents a difference of terms.
	///
	/// The first term is used as-is; all others have their signs inverted and are added to the first term in ascending order of index.
	Difference(Vec<Term>),
	/// Represents a product of terms.
	///
	/// All terms are multiplied together after evaluation, with evaluation proceeding in ascending index order.
	Product(Vec<Term>),
	/// Represents a quotient of terms.
	///
	/// The first term is evaluated, then divided by each following term in order of ascending index (each term is used immediately after evaluation). Fairly aggressive sanity checks are performed to prevent division by zero; if this continues to pester you, consider multiplying by the inverse instead.
	///
	/// This variant should be considered unstable; it is only due to typing constraints that simplification is implemented for more than two subterms. **Consider using `Term::Product` instead, if possible.**
	Quotient(Vec<Term>) // Look into limiting vector sizes to avoid confusion (due to bad input)
}

impl Term {
	/// Evaluates a term to its numerical value.
	///
	/// # Examples
	/// ```
	/// use cassie::{Term, Variable};
	/// use std::collections::HashMap;
	///
	/// let x: Variable = "x".parse().unwrap();
	/// let x = Term::Variable(x);
	/// let c = Term::Constant(100.0);
	/// let s = x + c;
	/// let mut values = HashMap::new();
	/// values.insert('x', 28.0);
	/// assert!((s.evaluate(&values).unwrap() - 128.0).abs() < 0.00001);
	/// ```
	pub fn evaluate(&self, values: &VariableValues) -> Result<f64, String> {
		self.eval(Some(values))
	}
	/// Evaluates a term to its numerical value, assuming only constants (no variables specified).
	///
	/// # Panics
	/// This method is functionally identical to using `Term::evaluate` with an empty value table, so it inherits the panic conditions from `Term::evaluate`.
	/// Most significantly, **if a variable is present in `self`, this function will panic**, since the variable value will not be resolved.
	/// 
	/// # Examples
	/// ```
	/// use cassie::Term;
	/// let c = Term::Constant(64.0);
	/// assert!((c.reduce().unwrap() - 64.0) < 0.00001);
	///
	/// let b = Term::Constant(64.0);
	/// let a = Term::Constant(36.0);
	/// let c = &a + &b;
	/// assert!(a.reduce().unwrap() - 36.0 < 0.00001);
	/// assert!(b.reduce().unwrap() - 64.0 < 0.00001);
	/// assert!(c.reduce().unwrap() - 100.0 < 0.00001);
	/// ```
	pub fn reduce(&self) -> Result<f64, String> {
		self.eval(None)
	}

	fn eval(&self, values: Option<&VariableValues>) -> Result<f64, String> {
		use Term::*;
		match *self {
			Constant(value) => Ok(value),
			Sum(ref terms) => {
				let mut sum = 0.0;
				for term in terms {
					match term.eval(values) {
						Ok(value) => {
							sum += value;
						}, Err(e) => {
							return Err(e);
						}
					};
				}
				Ok(sum) // dim sum for a twosome
			}, Difference(ref terms) => {
				let first = terms[0].eval(values);
				if first.is_err() { return first; }
				let mut difference = first.unwrap();
				for term in terms[1..].iter() {
					match term.eval(values) {
						Ok(value) => {
							difference -= value;
						}, Err(e) => {
							return Err(e);
						}
					};
				}
				Ok(difference)
			}, Product(ref terms) => {
				let mut product = 1.0;
				for term in terms {
					match term.eval(values) {
						Ok(value) => {
							product *= value;
						}, Err(e) => {
							return Err(e);
						}
					};
				}
				Ok(product)
			}, Quotient(ref terms) => {
				let first = terms[0].eval(values);
				if first.is_err() { return first; }
				let mut quotient = first.unwrap();
				for term in terms {
					match term.eval(values) {
						Ok(dividend) => {
							if dividend.abs() <  0.00000000000000001 {
								return Err("Attempted division by zero.".to_string());
							}
							quotient /= dividend;
						}, Err(e) => {
							return Err(e);
						}
					};
				}
				Ok(quotient)
			}, Variable(ref variable) => {
				if let Some(v) = values {
					if let Some(value) = v.get(&variable.symbol) {
						Ok(*value)
					} else {
						Err(format!("No value provided for variable {}", variable.symbol))
					}
				} else {
					Err(format!("No variable values provided (looking for {})", variable.symbol))
				}
			}
		}
	}
}

impl<'a, 'b> Add<&'b Term> for &'a Term { // We clone things a lot just in case a mutable operation is later defined on Term; we don't want to be chasing those bugs!

	type Output = Term;

	fn add(self, another: &'b Term) -> Term {
		match *self {
			Term::Sum(ref terms) => {
				match *another {
					Term::Sum(ref more) => {
						let mut terms = terms.clone();
						for term in more {
							terms.push(term.clone());
						}
						Term::Sum(terms)
					}, _ => {
						let mut terms = terms.clone();
						terms.push(another.clone());
						Term::Sum(terms)
					}
				}
			}, _ => {
				match *another {
					Term::Sum(ref terms) => {
						let mut terms = terms.clone();
						terms.push(self.clone());
						Term::Sum(terms)
					}, _ => {
						Term::Sum(vec!(self.clone(), another.clone()))
					}
				}
			}
		}
	}
}

impl Add for Term { // We clone things a lot just in case a mutable operation is later defined on Term; we don't want to be chasing those bugs!

	type Output = Term;

	fn add(self, another: Term) -> Term {
		&self + &another
	}
}