1use std::collections::HashSet;
10use std::convert::TryFrom;
11use std::fs::read_to_string;
12use std::path::PathBuf;
13
14use crate::error::MissingFileError;
15use crate::models::{Account, Comment, Currency, HasName, Payee, Transaction};
16use crate::parser::utils::count_decimals;
17use crate::{models, CommonOpts, List};
18use pest::Parser;
19
20mod include;
21pub mod tokenizers;
23pub(crate) mod utils;
24pub mod value_expr;
25
26use tokenizers::transaction;
27
28#[derive(Parser)]
29#[grammar = "grammar/grammar.pest"]
30pub struct GrammarParser;
31
32#[derive(Debug, Clone)]
33pub struct ParsedLedger {
34 pub accounts: List<Account>,
35 pub payees: List<Payee>,
36 pub commodities: List<Currency>,
37 pub transactions: Vec<Transaction<transaction::RawPosting>>,
38 pub prices: Vec<models::ParsedPrice>,
39 pub comments: Vec<Comment>,
40 pub tags: Vec<models::Tag>,
41 pub files: Vec<PathBuf>,
42}
43
44impl Default for ParsedLedger {
45 fn default() -> Self {
46 ParsedLedger::new()
47 }
48}
49impl ParsedLedger {
50 pub fn new() -> Self {
51 ParsedLedger {
52 accounts: List::<Account>::new(),
53 payees: List::<models::Payee>::new(),
54 commodities: List::<Currency>::new(),
55 transactions: vec![],
56 prices: vec![],
57 comments: vec![],
58 tags: vec![],
59 files: vec![],
60 }
61 }
62 pub fn append(&mut self, other: &mut ParsedLedger) {
63 self.accounts.append(&other.accounts);
64 self.payees.append(&other.payees);
65 self.commodities.append(&other.commodities);
66 self.transactions.append(&mut other.transactions);
67 self.comments.append(&mut other.comments);
68 self.transactions.append(&mut other.transactions);
69 self.prices.append(&mut other.prices);
70 self.files.append(&mut other.files);
71 }
72
73 pub fn len(&self) -> usize {
74 self.accounts.len()
75 + self.payees.len()
76 + self.commodities.len()
77 + self.transactions.len()
78 + self.prices.len()
79 + self.comments.len()
80 + self.tags.len()
81 }
82 pub fn is_empty(&self) -> bool {
83 self.len() == 0
84 }
85}
86
87#[derive(Debug, Clone)]
89pub struct Tokenizer<'a> {
90 file: Option<&'a PathBuf>,
91 content: String,
92 seen_files: HashSet<&'a PathBuf>,
93}
94
95impl<'a> TryFrom<&'a PathBuf> for Tokenizer<'a> {
96 type Error = Box<dyn std::error::Error>;
97 fn try_from(file: &'a PathBuf) -> Result<Self, Self::Error> {
98 match read_to_string(file) {
99 Ok(content) => {
100 let mut seen_files: HashSet<&PathBuf> = HashSet::new();
101 seen_files.insert(file);
102 Ok(Tokenizer {
103 file: Some(file),
104 content,
105 seen_files,
106 })
107 }
108 Err(err) => match err.kind() {
109 std::io::ErrorKind::NotFound => Err(Box::new(
110 MissingFileError::JournalFileDoesNotExistError(file.to_path_buf()),
111 )),
112 _ => Err(Box::new(err)),
113 },
114 }
115 }
116}
117
118impl<'a> From<String> for Tokenizer<'a> {
119 fn from(content: String) -> Self {
120 Tokenizer {
121 file: None,
122 content,
123 seen_files: HashSet::new(),
124 }
125 }
126}
127
128impl<'a> Tokenizer<'a> {
129 pub fn tokenize(&'a mut self, options: &CommonOpts) -> ParsedLedger {
130 self.tokenize_with_currencies(options, None)
131 }
132 pub fn tokenize_with_currencies(
135 &'a mut self,
136 options: &CommonOpts,
137 defined_currencies: Option<&List<Currency>>,
138 ) -> ParsedLedger {
139 let mut ledger: ParsedLedger = ParsedLedger::new();
140 if let Some(x) = defined_currencies {
141 ledger.commodities.append(x);
142 }
143 if let Some(file) = self.file {
144 ledger.files.push(file.clone());
145 }
146 match GrammarParser::parse(Rule::journal, self.content.as_str()) {
147 Ok(mut parsed) => {
148 let elements = parsed.next().unwrap().into_inner();
149 for element in elements {
150 match element.as_rule() {
151 Rule::directive => {
152 let inner = element.into_inner().next().unwrap();
153 match inner.as_rule() {
154 Rule::include => {
155 let mut new_ledger =
157 self.include(inner, options, &ledger.commodities).unwrap();
158 ledger.append(&mut new_ledger);
159 }
160 Rule::price => {
161 ledger.prices.push(self.parse_price(inner));
162 }
163 Rule::tag_dir => {
164 ledger.tags.push(self.parse_tag(inner));
165 }
166 Rule::commodity => {
167 let commodity = self.parse_commodity(inner);
168 if let Ok(old_commodity) =
169 ledger.commodities.get(commodity.get_name())
170 {
171 commodity.update_precision(old_commodity.get_precision());
172 }
173 ledger.commodities.remove(&commodity);
174 ledger.commodities.insert(commodity);
175 }
176 Rule::account_dir => {
177 ledger.accounts.insert(self.parse_account(inner));
178 }
179 Rule::payee_dir => {
180 ledger.payees.insert(self.parse_payee(inner));
181 }
182 _ => {}
183 }
184 }
185 Rule::transaction | Rule::automated_transaction => {
186 let transaction = self.parse_transaction(element);
187 for posting in transaction.postings.borrow().iter() {
188 let currencies = &[
189 (&posting.money_currency, &posting.money_format),
190 (&posting.cost_currency, &posting.cost_format),
191 (&posting.balance_currency, &posting.balance_format),
192 ];
193 for (currency, format) in currencies {
194 if let Some(c) = currency {
195 match ledger.commodities.get(c) {
196 Err(_) => {
197 if options.pedantic {
198 panic!("Error: commodity {} not declared.", c);
199 }
200 if options.strict {
201 eprintln!(
202 "Warning: commodity {} not declared.",
203 c
204 );
205 }
206
207 let commodity = Currency::from(c.as_str());
208 if let Some(format_string) = format {
209 commodity.update_precision(count_decimals(
210 format_string.as_str(),
211 ));
212 }
213 ledger.commodities.insert(commodity);
214 }
215 Ok(c) => {
216 if let Some(format_string) = format {
217 c.update_precision(count_decimals(
218 format_string.as_str(),
219 ));
220 }
221 }
222 }
223 }
224 }
225 }
226 ledger.transactions.push(transaction);
227 }
228 _x => {
229 }
231 }
232 }
233 }
234 Err(e) => {
235 if let Some(file) = &self.file {
236 eprintln!("Can't parse {:?} {}", file, e);
237 }
238 eprintln!("Error found in line {}", e)
239 }
240 }
241
242 ledger
243 }
244}
245#[cfg(test)]
246mod tests {
247 use structopt::StructOpt;
248
249 use super::*;
250
251 #[test]
252 fn test_empty_string() {
253 let content = "".to_string();
254 let mut tokenizer = Tokenizer::from(content);
255 let items = tokenizer.tokenize(&CommonOpts::from_iter(["", "-f", ""].iter()));
256 assert_eq!(items.len(), 0, "Should be empty");
257 }
258
259 #[test]
260 fn test_only_spaces() {
261 let content = "\n\n\n\n\n".to_string();
262 let mut tokenizer = Tokenizer::from(content);
263 let items = tokenizer.tokenize(&CommonOpts::from_iter(["", "-f", ""].iter()));
264 assert_eq!(items.len(), 0, "Should be empty")
265 }
266}