class YARD::Parser::Ruby::Legacy::TokenList
Public Class Methods
new(content = nil)
click to toggle source
# File lib/yard/parser/ruby/legacy/token_list.rb, line 6 def initialize(content = nil) self << content if content end
Public Instance Methods
push(*tokens)
click to toggle source
@param [TokenList, Token, String] tokens
A list of tokens. If the token is a string, it is parsed with {RubyLex}.
Calls superclass method
# File lib/yard/parser/ruby/legacy/token_list.rb, line 25 def push(*tokens) tokens.each do |tok| if tok.is_a?(TokenList) || tok.is_a?(Array) concat tok elsif tok.is_a?(Token) super tok elsif tok.is_a?(String) parse_content(tok) else raise ArgumentError, "Expecting token, list of tokens or string of code to be tokenized. Got #{tok.class}" end end self end
Also aliased as: <<
squeeze(type = TkSPACE)
click to toggle source
# File lib/yard/parser/ruby/legacy/token_list.rb, line 41 def squeeze(type = TkSPACE) last = nil TokenList.new(map {|t| x = t.is_a?(type) && last.is_a?(type) ? nil : t; last = t; x }) end
to_s(full_statement = false, show_block = true)
click to toggle source
# File lib/yard/parser/ruby/legacy/token_list.rb, line 10 def to_s(full_statement = false, show_block = true) inject([]) do |acc, token| break acc if !full_statement && TkStatementEnd === token if !show_block && TkBlockContents === token acc << "" else acc << token.text end acc end.join end
Private Instance Methods
convert_token(lex, tk)
click to toggle source
# File lib/yard/parser/ruby/legacy/token_list.rb, line 55 def convert_token(lex, tk) if TkIDENTIFIER === tk && lex.peek == ':' next_tk = lex.token sym = TkLABEL.new(tk.line_no, tk.char_no, nil) sym.lex_state = lex.lex_state sym.set_text(tk.text + next_tk.text) elsif TkSYMBEG === tk && next_tk = lex.token sym = TkSYMBOL.new(tk.line_no, tk.char_no, nil) sym.lex_state = lex.lex_state sym.set_text(tk.text + next_tk.text) else tk end end
parse_content(content)
click to toggle source
# File lib/yard/parser/ruby/legacy/token_list.rb, line 48 def parse_content(content) lex = RubyLex.new(content) while tk = lex.token do self << convert_token(lex, tk) end end