File: //proc/self/root/opt/puppetlabs/puppet/lib/ruby/vendor_ruby/puppet/external/nagios/parser.rb
#
# DO NOT MODIFY!!!!
# This file is automatically generated by Racc 1.4.9
# from Racc grammer file "".
#
require 'racc/parser.rb'
module Nagios
class Parser < Racc::Parser
module_eval(<<'...end grammar.ry/module_eval...', 'grammar.ry', 50)
require 'strscan'
class ::Nagios::Parser::SyntaxError < RuntimeError; end
def parse(src)
if (RUBY_VERSION < '2.1.0') && src.respond_to?("force_encoding") then
src.force_encoding("ASCII-8BIT")
end
@ss = StringScanner.new(src)
# state variables
@in_parameter_value = false
@in_object_definition = false
@done = false
@line = 1
@yydebug = true
do_parse
end
# This tokenizes the outside of object definitions,
# and detects when we start defining an object.
# We ignore whitespaces, comments and inline comments.
# We yield when finding newlines, the "define" keyword,
# the object name and the opening curly bracket.
def tokenize_outside_definitions
case
when (chars = @ss.skip(/[ \t]+/)) # ignore whitespace /\s+/
;
when (text = @ss.scan(/\#.*$/)) # ignore comments
;
when (text = @ss.scan(/;.*$/)) # ignore inline comments
;
when (text = @ss.scan(/\n/)) # newline
[:RETURN, text]
when (text = @ss.scan(/\b(define)\b/)) # the "define" keyword
[:DEFINE, text]
when (text = @ss.scan(/[^{ \t\n]+/)) # the name of the object being defined (everything not an opening curly bracket or a separator)
[:NAME, text]
when (text = @ss.scan(/\{/)) # the opening curly bracket - we enter object definition
@in_object_definition = true
[:LCURLY, text]
else
text = @ss.string[@ss.pos .. -1]
raise ScanError, "can not match: '#{text}'"
end # case
end
# This tokenizes until we find the parameter name.
def tokenize_parameter_name
case
when (chars = @ss.skip(/[ \t]+/)) # ignore whitespace /\s+/
;
when (text = @ss.scan(/\#.*$/)) # ignore comments
;
when (text = @ss.scan(/;.*$/)) # ignore inline comments
;
when (text = @ss.scan(/\n/)) # newline
[:RETURN, text]
when (text = @ss.scan(/\}/)) # closing curly bracket : end of definition
@in_object_definition = false
[:RCURLY, text]
when (not @in_parameter_value and (text = @ss.scan(/\S+/))) # This is the name of the parameter
@in_parameter_value = true
[:PARAM, text]
else
text = @ss.string[@ss.pos .. -1]
raise ScanError, "can not match: '#{text}'"
end # case
end
# This tokenizes the parameter value.
# There is a special handling for lines containing semicolons :
# - unescaped semicolons are line comments (and should stop parsing of the line)
# - escaped (with backslash \) semicolons should be kept in the parameter value (without the backslash)
def tokenize_parameter_value
case
when (chars = @ss.skip(/[ \t]+/)) # ignore whitespace /\s+/
;
when (text = @ss.scan(/\#.*$/)) # ignore comments
;
when (text = @ss.scan(/\n/)) # newline
@in_parameter_value = false
[:RETURN, text]
when (text = @ss.scan(/.+$/)) # Value of parameter
@in_parameter_value = false
# Special handling of inline comments (;) and escaped semicolons (\;)
# We split the string on escaped semicolons (\;),
# Then we rebuild it as long as there are no inline comments (;)
# We join the rebuilt string with unescaped semicolons (on purpose)
array = text.split('\;', 0)
text = ""
array.each do |elt|
# Now we split at inline comments. If we have more than 1 element in the array
# it means we have an inline comment, so we are able to stop parsing
# However we still want to reconstruct the string with its first part (before the comment)
linearray = elt.split(';', 0)
# Let's reconstruct the string with a (unescaped) semicolon
if text != "" then
text += ';'
end
text += linearray[0]
# Now we can stop
if linearray.length > 1 then
break
end
end
# We strip the text to remove spaces between end of string and beginning of inline comment
[:VALUE, text.strip]
else
text = @ss.string[@ss.pos .. -1]
raise ScanError, "can not match: '#{text}'"
end # case
end
# This tokenizes inside an object definition.
# Two cases : parameter name and parameter value
def tokenize_inside_definitions
if @in_parameter_value
tokenize_parameter_value
else
tokenize_parameter_name
end
end
# The lexer. Very simple.
def token
text = @ss.peek(1)
@line += 1 if text == "\n"
token = if @in_object_definition
tokenize_inside_definitions
else
tokenize_outside_definitions
end
token
end
def next_token
return if @ss.eos?
# skips empty actions
until _next_token = token or @ss.eos?; end
_next_token
end
def yydebug
1
end
def yywrap
0
end
def on_error(token, value, vstack )
# text = @ss.string[@ss.pos .. -1]
text = @ss.peek(20)
msg = ""
unless value.nil?
msg = "line #{@line}: syntax error at value '#{value}' : #{text}"
else
msg = "line #{@line}: syntax error at token '#{token}' : #{text}"
end
if @ss.eos?
msg = "line #{@line}: Unexpected end of file"
end
if token == '$end'.intern
puts "okay, this is silly"
else
raise ::Nagios::Parser::SyntaxError, msg
end
end
...end grammar.ry/module_eval...
##### State transition tables begin ###
racc_action_table = [
8, 3, 3, 14, 12, 18, 10, 4, 4, 20,
12, 14, 12, 9, 6, 12 ]
racc_action_check = [
5, 0, 5, 13, 9, 13, 8, 0, 5, 14,
14, 11, 12, 6, 3, 20 ]
racc_action_pointer = [
-1, nil, nil, 11, nil, 0, 8, nil, 6, -4,
nil, 7, 4, -1, 2, nil, nil, nil, nil, nil,
7, nil ]
racc_action_default = [
-12, -1, -3, -12, -4, -12, -12, -2, -12, -12,
22, -12, -10, -12, -12, -6, -11, -7, -5, -9,
-12, -8 ]
racc_goto_table = [
11, 1, 15, 16, 17, 19, 7, 13, 5, nil,
nil, 21 ]
racc_goto_check = [
4, 2, 6, 4, 6, 4, 2, 5, 1, nil,
nil, 4 ]
racc_goto_pointer = [
nil, 8, 1, nil, -9, -4, -9 ]
racc_goto_default = [
nil, nil, nil, 2, nil, nil, nil ]
racc_reduce_table = [
0, 0, :racc_error,
1, 10, :_reduce_1,
2, 10, :_reduce_2,
1, 11, :_reduce_3,
1, 11, :_reduce_4,
6, 12, :_reduce_5,
1, 14, :_reduce_none,
2, 14, :_reduce_7,
3, 15, :_reduce_8,
2, 15, :_reduce_9,
1, 13, :_reduce_none,
2, 13, :_reduce_none ]
racc_reduce_n = 12
racc_shift_n = 22
racc_token_table = {
false => 0,
:error => 1,
:DEFINE => 2,
:NAME => 3,
:PARAM => 4,
:LCURLY => 5,
:RCURLY => 6,
:VALUE => 7,
:RETURN => 8 }
racc_nt_base = 9
racc_use_result_var = true
Racc_arg = [
racc_action_table,
racc_action_check,
racc_action_default,
racc_action_pointer,
racc_goto_table,
racc_goto_check,
racc_goto_default,
racc_goto_pointer,
racc_nt_base,
racc_reduce_table,
racc_token_table,
racc_shift_n,
racc_reduce_n,
racc_use_result_var ]
Racc_token_to_s_table = [
"$end",
"error",
"DEFINE",
"NAME",
"PARAM",
"LCURLY",
"RCURLY",
"VALUE",
"RETURN",
"$start",
"decls",
"decl",
"object",
"returns",
"vars",
"var" ]
Racc_debug_parser = false
##### State transition tables end #####
# reduce 0 omitted
module_eval(<<'.,.,', 'grammar.ry', 6)
def _reduce_1(val, _values, result)
return val[0] if val[0]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.ry', 8)
def _reduce_2(val, _values, result)
if val[1].nil?
result = val[0]
else
if val[0].nil?
result = val[1]
else
result = [ val[0], val[1] ].flatten
end
end
result
end
.,.,
module_eval(<<'.,.,', 'grammar.ry', 20)
def _reduce_3(val, _values, result)
result = [val[0]]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.ry', 21)
def _reduce_4(val, _values, result)
result = nil
result
end
.,.,
module_eval(<<'.,.,', 'grammar.ry', 25)
def _reduce_5(val, _values, result)
result = Nagios::Base.create(val[1],val[4])
result
end
.,.,
# reduce 6 omitted
module_eval(<<'.,.,', 'grammar.ry', 31)
def _reduce_7(val, _values, result)
val[1].each {|p,v|
val[0][p] = v
}
result = val[0]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.ry', 38)
def _reduce_8(val, _values, result)
result = {val[0] => val[1]}
result
end
.,.,
module_eval(<<'.,.,', 'grammar.ry', 39)
def _reduce_9(val, _values, result)
result = {val[0] => "" }
result
end
.,.,
# reduce 10 omitted
# reduce 11 omitted
def _reduce_none(val, _values, result)
val[0]
end
end # class Parser
end # module Nagios