]> git.proxmox.com Git - proxmox-backup.git/blob - src/config/network/lexer.rs
src/config/network.rs: make it compatible with pve
[proxmox-backup.git] / src / config / network / lexer.rs
1 use std::io::BufRead;
2 use std::iter::Iterator;
3 use std::collections::{HashMap, VecDeque};
4
5 use lazy_static::lazy_static;
6
7 #[derive(Debug, Copy, Clone, PartialEq)]
8 pub enum Token {
9 Text,
10 Comment,
11 DHCP,
12 Newline,
13 Address,
14 Auto,
15 Gateway,
16 Inet,
17 Inet6,
18 Iface,
19 Loopback,
20 Manual,
21 Netmask,
22 Static,
23 Attribute,
24 MTU,
25 BridgePorts,
26 BridgeVlanAware,
27 BondSlaves,
28 EOF,
29 }
30
31 lazy_static! {
32 static ref KEYWORDS: HashMap<&'static str, Token> = {
33 let mut map = HashMap::new();
34 map.insert("address", Token::Address);
35 map.insert("auto", Token::Auto);
36 map.insert("dhcp", Token::DHCP);
37 map.insert("gateway", Token::Gateway);
38 map.insert("inet", Token::Inet);
39 map.insert("inet6", Token::Inet6);
40 map.insert("iface", Token::Iface);
41 map.insert("loopback", Token::Loopback);
42 map.insert("manual", Token::Manual);
43 map.insert("netmask", Token::Netmask);
44 map.insert("static", Token::Static);
45 map.insert("mtu", Token::MTU);
46 map.insert("bridge-ports", Token::BridgePorts);
47 map.insert("bridge_ports", Token::BridgePorts);
48 map.insert("bridge-vlan-aware", Token::BridgeVlanAware);
49 map.insert("bridge_vlan_aware", Token::BridgeVlanAware);
50 map.insert("bond-slaves", Token::BondSlaves);
51 map.insert("bond_slaves", Token::BondSlaves);
52 map
53 };
54 }
55
56 pub struct Lexer<R> {
57 input: R,
58 eof_count: usize,
59 cur_line: Option<VecDeque<(Token, String)>>,
60 }
61
62 impl <R: BufRead> Lexer<R> {
63
64 pub fn new(input: R) -> Self {
65 Self { input, eof_count: 0, cur_line: None }
66 }
67
68 fn split_line(line: &str) -> VecDeque<(Token, String)> {
69 if line.starts_with("#") {
70 let mut res = VecDeque::new();
71 res.push_back((Token::Comment, line[1..].trim().to_string()));
72 return res;
73 }
74 let mut list: VecDeque<(Token, String)> = line.split_ascii_whitespace().map(|text| {
75 let token = KEYWORDS.get(text).unwrap_or(&Token::Text);
76 (*token, text.to_string())
77 }).collect();
78
79 if line.starts_with(|c: char| c.is_ascii_whitespace() && c != '\n') {
80 list.push_front((Token::Attribute, String::from("\t")));
81 }
82 list
83 }
84 }
85
86 impl <R: BufRead> Iterator for Lexer<R> {
87
88 type Item = Result<(Token, String), std::io::Error>;
89
90 fn next(&mut self) -> Option<Self::Item> {
91 if self.cur_line.is_none() {
92 let mut line = String::new();
93 match self.input.read_line(&mut line) {
94 Err(err) => return Some(Err(err)),
95 Ok(0) => {
96 self.eof_count += 1;
97 if self.eof_count == 1 { return Some(Ok((Token::EOF, String::new()))); }
98 return None;
99 }
100 _ => {}
101 }
102 self.cur_line = Some(Self::split_line(&line));
103 }
104
105 match self.cur_line {
106 Some(ref mut cur_line) => {
107 if cur_line.is_empty() {
108 self.cur_line = None;
109 return Some(Ok((Token::Newline, String::from("\n"))));
110 } else {
111 let (token, text) = cur_line.pop_front().unwrap();
112 return Some(Ok((token, text)));
113 }
114 }
115 None => {
116 return None;
117 }
118 }
119 }
120 }