summaryrefslogtreecommitdiffstats
path: root/src/parser.rs
blob: c57fe1456d4a8e5b4c011cb58dde413f5f81377c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
use crate::journal::Journal;

use std::fs;
use std::iter::Peekable;

extern crate itertools;
extern crate time;

use itertools::Itertools;
use time::Date;


#[derive(Debug, Eq, PartialEq, Clone)]
pub enum Token {
    Word(String),
    DateSep,
    AccountSep,
    DecimalSep,
    Newline,
    Space,
    Indent,
    Marker(char),
    Comment(char),
    Numeric(String),
}

pub struct Lexer<I: Iterator<Item=char>> {
    iter: Peekable<I>
}

impl<I: Iterator<Item=char>> Lexer<I> {
    pub fn new(iter: I) -> Lexer<I> {
        Lexer {
            iter: iter.peekable()
        }
    }
}

impl<I: Iterator<Item=char>> Iterator for Lexer<I> {
    type Item = Token;

    fn next(&mut self) -> Option<Token> {
        // let ch = *self.iter.peek().unwrap_or(&'`');
        let ch = self.iter.peek()
        match ch {
            /* alphanumeric */
            c if c.is_alphabetic() => {
                Some(Token::Word(self.iter.by_ref()
                        .peeking_take_while(|&c| c.is_alphabetic()).collect())) },
            c if c.is_numeric() => {
                Some(Token::Numeric(self.iter.by_ref()
                        .peeking_take_while(|&c| c.is_numeric()).collect()))
            },
            /* whitespace */
            ' ' => {
                self.iter.next();
                Some(Token::Space)
            },
            '\n' => {
                self.iter.next();
                Some(Token::Newline)
            },
            '\t' => {
                self.iter.next();
                Some(Token::Indent)
            },
            /* separators */
            '/' => {
                self.iter.next();
                Some(Token::DateSep)
            },
            ':' => {
                self.iter.next();
                Some(Token::AccountSep)
            },
            ',' | '.' => {
                self.iter.next();
                Some(Token::DecimalSep)
            },
            /* comments */
            ';' | '#' | '%' => {
                self.iter.next();
                Some(Token::Comment(ch))
            },
            /* markers */
            '*' | '!' | '@' | '-' => {
                self.iter.next();
                Some(Token::Marker(ch))
            },
            '`' => {
                println!("--");
                None
            },
            _ => self.next(),
        }
    }
}


pub fn lex(text: &str) -> Vec<Token> {
    Lexer::new(text.chars()).collect()
}



struct Parser {

}

pub fn parse(name: &str) -> Journal {
    let text = fs::read_to_string(name).expect("Cannot open file");

    println!("{:?}", text);

    for token in lex(&text) {
        println!("{:?}", token);
    }

    Journal {
        accounts: vec![],
        commodities: vec![],
        transactions: vec![],
    }
}