1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
|
use std::io::Error;
use std::path::PathBuf;
use std::process::ExitStatus;
// > overwrite
// >> append
// | pipe
// : on prev cmd any
// && on prev cmd okay
// || on prev cmd fail
mod command;
use command::*;
pub struct CommandLine(Vec<CommandInfo>);
impl CommandLine {
pub fn new(line: &str) -> Self {
let split = line.split(':');
let mut v = CommandLine(Vec::new());
for x in split {
v.0.push(tokenize(x));
}
v
}
pub fn run(
&self,
home: &PathBuf,
mut status: Option<ExitStatus>,
) -> Result<Option<ExitStatus>, Error> {
for cmd in &self.0 {
match cmd.run(&home, &status)? {
RunResult::Command(s) => status = Some(s),
RunResult::Builtin => {}
}
}
Ok(status)
}
}
enum TokenType {
Argument,
StdoutFileOverwrite,
StdoutFileAppend,
}
pub fn tokenize(line: &str) -> CommandInfo {
let mut r = CommandInfo::new();
let mut iter = line.chars().peekable();
let mut token = String::new();
let mut token_type = TokenType::Argument;
let mut quote = false;
while let Some(i) = iter.next() {
match i {
' ' => {
if quote {
token.push(' ');
}
}
'\'' => match iter.peek() {
Some(&'\'') | Some(&'>') | Some(&'&') => {
token.push(iter.next().unwrap());
}
_ => {
quote = !quote;
}
},
'>' => {
if iter.peek() == Some(&'>') {
token_type = TokenType::StdoutFileAppend;
iter.next();
} else {
token_type = TokenType::StdoutFileOverwrite;
}
}
'&' => {
if iter.peek() == Some(&'&') {
r.when = RunOn::ExitSuccess;
iter.next();
}
}
'|' => {
if iter.peek() == Some(&'|') {
r.when = RunOn::ExitFailure;
iter.next();
}
}
_ => {
token.push(i);
}
}
if !token.is_empty() && ((iter.peek() == Some(&' ') && !quote) || iter.peek() == None) {
match token_type {
TokenType::Argument => r.args.push(token),
TokenType::StdoutFileOverwrite => r.stdout = Redirect::FileOverwrite(token),
TokenType::StdoutFileAppend => r.stdout = Redirect::FileAppend(token),
}
token = String::new();
}
}
r
}
#[test]
fn test_tokenizer() {
{
let ls = tokenize("ls -l");
assert_eq!(ls.args, vec!("ls", "-l"));
let string = tokenize("ls -l 'something else'");
assert_eq!(string.args, vec!("ls", "-l", "something else"));
let escape = tokenize("ls -l 'junction jan''s'");
assert_eq!(escape.args, vec!("ls", "-l", "junction jan\'s"));
}
{
let o = tokenize("&& ls");
assert_eq!(o.args, vec!("ls"));
assert_eq!(o.when, RunOn::ExitSuccess);
let f = tokenize("|| ls");
assert_eq!(f.args, vec!("ls"));
assert_eq!(f.when, RunOn::ExitFailure);
}
/*
println!("{:?}", tokenize("ls -l something'>"));
println!("{:?}", tokenize("ls -l something'>'>"));
println!("{:?}", tokenize("ls -l something >output"));
println!("{:?}", tokenize("ls -l something > output"));
println!("{:?}", tokenize("ls -l something >'junction jan''s'"));
println!("{:?}", tokenize("ls -l something >>output"));
println!("{:?}", tokenize("ls -l something >> output"));
println!("{:?}", tokenize("ls -l something >>'junction jan''s'"));
*/
}
|