Skip to content

Commit 7cc0aaa

Browse files
committed
support customize key's width; add attribute test for lexer and parser
1 parent d21da45 commit 7cc0aaa

5 files changed

Lines changed: 225 additions & 47 deletions

File tree

layouts/40.tvkl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
:| Tab | Q | E | R | T | Y | U | I | O | 'P' | Back |-
2-
:| Caps | A | S | D | F | G | H | J | K | L | Enter |-
3-
:| Shift | / | Z | X | C | V | B | N | M | , | . |-
4-
:| Ctrl | Win | Alt | Space | Alt | App | Ctrl |-
1+
:| Tab [$6] | Q | W | E | R | T | Y | U | I | O | P | Back [$7] |-
2+
:| Caps [$7] | A | S | D | F | G | H | J | K | L | Enter [$8] |-
3+
:| LShift [$8] | / | Z | X | C | V | B | N | M | , | . | RShift [$8] |-
4+
:| LCtrl [$7] | Win [$6] | LAlt [$7] | Space | RAlt [$7] | App [$6] | RCtrl [$7] |-

layouts/60.tvkl

Whitespace-only changes.

src/lexer.rs

Lines changed: 68 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,19 @@ use std::str::Chars;
44

55
#[derive(Debug, PartialEq, Clone, Copy)]
66
pub enum TokenType {
7-
LineHead, // ":"
8-
LineTail, // "-"
9-
Split, // "|"
7+
LineHead, // ":"
8+
LineTail, // "-"
9+
Split, // "|"
1010
Name,
11+
Number, // "$12"
12+
LBracket, // "["
13+
RBracket, // "]"
14+
LBrace, // "{"
15+
RBrace, // "}"
1116
}
1217

18+
const RESERVE_SYMBOL: [char; 9] = [':', '-', '|', '\'', '[', ']', '{', '}', '$'];
19+
1320
#[derive(Debug, PartialEq, Clone)]
1421
pub struct Token {
1522
pub token_type: TokenType,
@@ -41,6 +48,26 @@ impl<'a> Lexer<'a> {
4148
self.src.next();
4249
Some(Token { token_type: TokenType::Split, value: "|".to_string() })
4350
}
51+
'$' => {
52+
self.src.next();
53+
Some(self.collect_number())
54+
}
55+
'[' => {
56+
self.src.next();
57+
Some(Token { token_type: TokenType::LBracket, value: "[".to_string() })
58+
}
59+
']' => {
60+
self.src.next();
61+
Some(Token { token_type: TokenType::RBracket, value: "]".to_string() })
62+
}
63+
'{' => {
64+
self.src.next();
65+
Some(Token { token_type: TokenType::LBrace, value: "{".to_string() })
66+
}
67+
'}' => {
68+
self.src.next();
69+
Some(Token { token_type: TokenType::RBrace, value: "}".to_string() })
70+
}
4471
'\'' => {
4572
Some(self.collect_quoted_name())
4673
}
@@ -76,10 +103,23 @@ impl<'a> Lexer<'a> {
76103
Token { token_type: TokenType::Name, value }
77104
}
78105

106+
fn collect_number(&mut self) -> Token {
107+
let mut value = String::new();
108+
while let Some(&c) = self.src.peek() {
109+
if c.is_numeric() && !RESERVE_SYMBOL.contains(&c) {
110+
value.push(c);
111+
self.src.next();
112+
} else {
113+
break;
114+
}
115+
}
116+
Token { token_type: TokenType::Number, value }
117+
}
118+
79119
fn collect_plain_name(&mut self) -> Token {
80120
let mut value = String::new();
81121
while let Some(&c) = self.src.peek() {
82-
if c.is_whitespace() || vec![':', '-', '|', '\''].contains(&c) {
122+
if c.is_whitespace() || RESERVE_SYMBOL.contains(&c) {
83123
break;
84124
}
85125
value.push(c);
@@ -102,7 +142,7 @@ mod tests{
102142
use super::*;
103143

104144
#[test]
105-
fn input_to_token(){
145+
fn simple_input(){
106146
let input = ":| A | '|' | 'P' | Back |-";
107147

108148
let mut lexer = Lexer::new(input);
@@ -124,7 +164,30 @@ mod tests{
124164
];
125165

126166
assert_eq!(tokens, right_result);
167+
}
168+
169+
#[test]
170+
fn specify_length(){
171+
let input = ":| A | Back [$10] |-";
172+
173+
let mut lexer = Lexer::new(input);
174+
let tokens:Vec<Token> = lexer.tokenization();
127175

176+
let right_result = vec![
177+
Token { token_type: TokenType::LineHead, value: String::from(":") },
178+
Token { token_type: TokenType::Split, value: String::from("|") },
179+
Token { token_type: TokenType::Name, value: String::from("A")},
180+
Token { token_type: TokenType::Split, value: String::from("|") },
181+
Token { token_type: TokenType::Name, value: String::from("Back") },
182+
Token { token_type: TokenType::LBracket, value: String::from("[") },
183+
Token { token_type: TokenType::Number, value: String::from("10") },
184+
Token { token_type: TokenType::RBracket, value: String::from("]") },
185+
Token { token_type: TokenType::Split, value: String::from("|") },
186+
Token { token_type: TokenType::LineTail, value: String::from("-") },
187+
];
188+
189+
assert_eq!(tokens, right_result);
128190
}
191+
129192

130193
}

src/parser.rs

Lines changed: 147 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,10 @@ pub struct Parser {
99
current: usize,
1010
}
1111

12+
pub struct Attr {
13+
width: u16,
14+
}
15+
1216
impl Parser {
1317
pub fn new(tokens: Vec<Token>) -> Self {
1418
Self { tokens, current: 0 }
@@ -31,18 +35,6 @@ impl Parser {
3135
}
3236
}
3337

34-
fn expect(&mut self, ty: TokenType, value: String) -> Result<(), ParserError> {
35-
let c = self.advance().unwrap();
36-
if c.token_type == ty && c.value == value {
37-
Ok(())
38-
} else {
39-
Err(ParserError::Err(format!(
40-
"Expected '{:?}' {:?}, found '{:?}' {:?}",
41-
ty, value, c.token_type, c.value
42-
)))
43-
}
44-
}
45-
4638
fn consume(&mut self, ty: TokenType) -> Result<&Token, ParserError> {
4739
let t = self.peek()?;
4840
if t.token_type == ty {
@@ -70,13 +62,19 @@ impl Parser {
7062
self.consume(TokenType::LineHead)?;
7163
self.consume(TokenType::Split)?;
7264

73-
while self.peek()?.token_type != TokenType::LineTail {
65+
while self.current < self.tokens.len() && self.peek()?.token_type != TokenType::LineTail {
7466
let name_token = self.consume(TokenType::Name)?;
75-
let name_str = &name_token.value;
67+
let name_str = name_token.value.clone();
68+
69+
let mut attr = get_default_width(&name_str);
70+
71+
if self.peek()?.token_type == TokenType::LBracket {
72+
self.parse_attr(&mut attr)?;
73+
}
7674

7775
row.push(Button {
78-
rdev_key: get_rdev_key(name_str),
79-
width: get_default_width(name_str),
76+
rdev_key: get_rdev_key(&name_str),
77+
width: attr.width,
8078
name: Arc::from(name_str.as_str()),
8179
});
8280

@@ -86,6 +84,29 @@ impl Parser {
8684
self.consume(TokenType::LineTail)?;
8785
Ok(row)
8886
}
87+
88+
fn parse_attr(&mut self, attr: &mut Attr) -> Result<(), ParserError> {
89+
self.consume(TokenType::LBracket)?;
90+
91+
while self.peek()?.token_type != TokenType::RBracket {
92+
match self.peek()?.token_type {
93+
TokenType::Number => self.parse_width(attr)?,
94+
_ => return Err(ParserError::Err("Invalid attribute".to_string())),
95+
}
96+
}
97+
98+
self.consume(TokenType::RBracket)?;
99+
Ok(())
100+
}
101+
102+
fn parse_width(&mut self, attr: &mut Attr) -> Result<(), ParserError> {
103+
let num_token = self.consume(TokenType::Number)?;
104+
attr.width = num_token
105+
.value
106+
.parse::<u16>()
107+
.map_err(|_| ParserError::Err("Invalid number format".to_string()))?;
108+
Ok(())
109+
}
89110
}
90111

91112
fn get_rdev_key(name: &str) -> Option<Key> {
@@ -142,12 +163,10 @@ fn get_rdev_key(name: &str) -> Option<Key> {
142163
}
143164
}
144165

145-
fn get_default_width(name: &str) -> u16 {
166+
fn get_default_width(name: &str) -> Attr {
146167
match name.to_lowercase().as_str() {
147-
"space" => 20,
148-
"lshift" | "rshift" | "enter" => 8,
149-
"tab" | "back" | "caps" | "ctrl" => 6,
150-
_ => 4,
168+
"space" => Attr { width: 20 },
169+
_ => Attr { width: 4 },
151170
}
152171
}
153172

@@ -158,20 +177,38 @@ mod tests {
158177

159178
// Helper to create a Name token
160179
fn t_name(val: &str) -> Token {
161-
Token { token_type: TokenType::Name, value: val.to_string() }
180+
Token {
181+
token_type: TokenType::Name,
182+
value: val.to_string(),
183+
}
162184
}
163185

164186
#[test]
165187
fn test_parser_success() {
166188
// Input sequence for: :| Tab | 'P' | -
167189
let tokens = vec![
168-
Token { token_type: TokenType::LineHead, value: ":".into() },
169-
Token { token_type: TokenType::Split, value: "|".into() },
190+
Token {
191+
token_type: TokenType::LineHead,
192+
value: ":".into(),
193+
},
194+
Token {
195+
token_type: TokenType::Split,
196+
value: "|".into(),
197+
},
170198
t_name("Tab"),
171-
Token { token_type: TokenType::Split, value: "|".into() },
199+
Token {
200+
token_type: TokenType::Split,
201+
value: "|".into(),
202+
},
172203
t_name("P"),
173-
Token { token_type: TokenType::Split, value: "|".into() },
174-
Token { token_type: TokenType::LineTail, value: "-".into() },
204+
Token {
205+
token_type: TokenType::Split,
206+
value: "|".into(),
207+
},
208+
Token {
209+
token_type: TokenType::LineTail,
210+
value: "-".into(),
211+
},
175212
];
176213

177214
let mut parser = Parser::new(tokens);
@@ -188,33 +225,107 @@ mod tests {
188225
fn test_parser_invalid_sequence() {
189226
// Missing the leading ":" -> | Q | -
190227
let tokens = vec![
191-
Token { token_type: TokenType::Split, value: "|".into() },
228+
Token {
229+
token_type: TokenType::Split,
230+
value: "|".into(),
231+
},
192232
t_name("Q"),
193-
Token { token_type: TokenType::Split, value: "|".into() },
194-
Token { token_type: TokenType::LineTail, value: "-".into() },
233+
Token {
234+
token_type: TokenType::Split,
235+
value: "|".into(),
236+
},
237+
Token {
238+
token_type: TokenType::LineTail,
239+
value: "-".into(),
240+
},
195241
];
196242

197243
let mut parser = Parser::new(tokens);
198244
let result = parser.parse();
199-
245+
200246
assert!(result.is_err());
201247
}
202248

203249
#[test]
204250
fn test_parser_missing_split() {
205251
// Missing the pipe between names: :| A B | -
206252
let tokens = vec![
207-
Token { token_type: TokenType::LineHead, value: ":".into() },
208-
Token { token_type: TokenType::Split, value: "|".into() },
253+
Token {
254+
token_type: TokenType::LineHead,
255+
value: ":".into(),
256+
},
257+
Token {
258+
token_type: TokenType::Split,
259+
value: "|".into(),
260+
},
209261
t_name("A"),
210262
t_name("B"), // Error here: Parser expects Split (|) after Name
211-
Token { token_type: TokenType::Split, value: "|".into() },
212-
Token { token_type: TokenType::LineTail, value: "-".into() },
263+
Token {
264+
token_type: TokenType::Split,
265+
value: "|".into(),
266+
},
267+
Token {
268+
token_type: TokenType::LineTail,
269+
value: "-".into(),
270+
},
213271
];
214272

215273
let mut parser = Parser::new(tokens);
216274
let result = parser.parse();
217-
275+
218276
assert!(result.is_err());
219277
}
278+
279+
#[test]
280+
fn test_parser_with_attr() {
281+
// Input sequence for: :| Tab [$10] | 'P' | -
282+
let tokens = vec![
283+
Token {
284+
token_type: TokenType::LineHead,
285+
value: ":".into(),
286+
},
287+
Token {
288+
token_type: TokenType::Split,
289+
value: "|".into(),
290+
},
291+
t_name("Tab"),
292+
Token {
293+
token_type: TokenType::LBracket,
294+
value: "[".into(),
295+
},
296+
Token {
297+
token_type: TokenType::Number,
298+
value: "10".into(),
299+
},
300+
Token {
301+
token_type: TokenType::RBracket,
302+
value: "]".into(),
303+
},
304+
Token {
305+
token_type: TokenType::Split,
306+
value: "|".into(),
307+
},
308+
t_name("P"),
309+
Token {
310+
token_type: TokenType::Split,
311+
value: "|".into(),
312+
},
313+
Token {
314+
token_type: TokenType::LineTail,
315+
value: "-".into(),
316+
},
317+
];
318+
319+
let mut parser = Parser::new(tokens);
320+
let result = parser.parse().unwrap();
321+
322+
assert_eq!(result.layer.len(), 1);
323+
assert_eq!(result.layer[0][0].name.as_ref(), "Tab");
324+
assert_eq!(result.layer[0][0].rdev_key, Some(Key::Tab));
325+
assert_eq!(result.layer[0][0].width, 10);
326+
assert_eq!(result.layer[0][1].name.as_ref(), "P");
327+
assert_eq!(result.layer[0][1].rdev_key, Some(Key::KeyP));
328+
assert_eq!(result.layer[0][1].width, 4);
329+
}
330+
220331
}

tvkl/grammar/tvkl.gram

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,6 @@
1-
line := ":" "|" ( ( "'" name "'" | name ) "|")* "-"
2-
name := *
1+
line := ":" "|" ( ( "'" name "'" | name ) attrDecl? "|")* "-"
2+
attrDecl := "[" attr+ "]"
3+
attr := length
4+
length := number
5+
number := "$" NUMBER
6+
name := *

0 commit comments

Comments
 (0)