| Class | MCollective::Matcher::Parser |
| In: |
lib/mcollective/matcher/parser.rb
|
| Parent: | Object |
| execution_stack | [R] | |
| scanner | [R] |
# File lib/mcollective/matcher/parser.rb, line 6
6: def initialize(args)
7: @scanner = Scanner.new(args)
8: @execution_stack = []
9: @parse_errors = []
10: @token_errors = []
11: @paren_errors = []
12: parse
13: exit_with_token_errors if @token_errors.size > 0
14: exit_with_parse_errors if @parse_errors.size > 0
15: exit_with_paren_errors if @paren_errors.size > 0
16: end
# File lib/mcollective/matcher/parser.rb, line 37
37: def exit_with_paren_errors
38: @paren_errors.each do |i|
39: @scanner.arguments[i] = Util.colorize(:red, @scanner.arguments[i])
40: end
41: raise "Missing parenthesis found while parsing -S input #{@scanner.arguments.join}"
42: end
# File lib/mcollective/matcher/parser.rb, line 28
28: def exit_with_parse_errors
29: @parse_errors.each do |error_range|
30: (error_range[0]..error_range[1]).each do |i|
31: @scanner.arguments[i] = Util.colorize(:red, @scanner.arguments[i])
32: end
33: end
34: raise "Parse errors found while parsing -S input #{ @scanner.arguments.join}"
35: end
Exit and highlight any malformed tokens
# File lib/mcollective/matcher/parser.rb, line 19
19: def exit_with_token_errors
20: @token_errors.each do |error_range|
21: (error_range[0]..error_range[1]).each do |i|
22: @scanner.arguments[i] = Util.colorize(:red, @scanner.arguments[i])
23: end
24: end
25: raise "Malformed token(s) found while parsing -S input #{@scanner.arguments.join}"
26: end
Parse the input string, one token at a time a contruct the call stack
# File lib/mcollective/matcher/parser.rb, line 45
45: def parse
46: pre_index = @scanner.token_index
47: p_token,p_token_value = nil
48: c_token,c_token_value = @scanner.get_token
49: parenth = 0
50:
51: while (c_token != nil)
52: @scanner.token_index += 1
53: n_token, n_token_value = @scanner.get_token
54:
55: unless n_token == " "
56: case c_token
57: when "bad_token"
58: @token_errors << c_token_value
59:
60: when "and"
61: unless (n_token =~ /not|fstatement|statement|\(/) || (scanner.token_index == scanner.arguments.size) && !(n_token == nil)
62: @parse_errors << [pre_index, scanner.token_index]
63: end
64:
65: if p_token == nil
66: @parse_errors << [pre_index - c_token.size, scanner.token_index]
67: elsif (p_token == "and" || p_token == "or")
68: @parse_errors << [pre_index - 1 - p_token.size, pre_index - 1]
69: end
70:
71: when "or"
72: unless (n_token =~ /not|fstatement|statement|\(/) || (scanner.token_index == scanner.arguments.size) && !(n_token == nil)
73: @parse_errors << [pre_index, scanner.token_index]
74: end
75:
76: if p_token == nil
77: @parse_errors << [pre_index - c_token.size, scanner.token_index]
78: elsif (p_token == "and" || p_token == "or")
79: @parse_errors << [pre_index - 1 - p_token.size, pre_index - 1]
80: end
81:
82: when "not"
83: unless n_token =~ /fstatement|statement|\(|not/ && !(n_token == nil)
84: @parse_errors << [pre_index, scanner.token_index]
85: end
86:
87: when "statement","fstatement"
88: unless n_token =~ /and|or|\)/
89: unless scanner.token_index == scanner.arguments.size
90: @parse_errors << [pre_index, scanner.token_index]
91: end
92: end
93:
94: when ")"
95: unless (n_token =~ /|and|or|not|\(/)
96: unless(scanner.token_index == scanner.arguments.size)
97: @parse_errors << [pre_index, scanner.token_index]
98: end
99: end
100: unless @paren_errors.empty?
101: @paren_errors.pop
102: else
103: @paren_errors.push((n_token.nil?) ? scanner.token_index - 1: scanner.token_index - n_token_value.size)
104: end
105:
106: when "("
107: unless n_token =~ /fstatement|statement|not|\(/
108: @parse_errors << [pre_index, scanner.token_index]
109: end
110: @paren_errors.push((n_token.nil?) ? scanner.token_index - 1: scanner.token_index - n_token_value.size)
111:
112: else
113: @parse_errors << [pre_index, scanner.token_index]
114: end
115:
116: unless n_token == " " ||c_token == "bad_token"
117: @execution_stack << {c_token => c_token_value}
118: end
119:
120: p_token, p_token_value = c_token, c_token_value
121: c_token, c_token_value = n_token, n_token_value
122: end
123: pre_index = @scanner.token_index
124: end
125: end