# File lib/nokogiri/css/generated_tokenizer.rb, line 24 24: def action(&block) 25: yield 26: end
# File lib/nokogiri/css/generated_tokenizer.rb, line 34 34: def load_file( filename ) 35: @filename = filename 36: open(filename, "r") do |f| 37: scan_setup(f.read) 38: end 39: end
# File lib/nokogiri/css/generated_tokenizer.rb, line 47 47: def next_token 48: return if @ss.eos? 49: 50: text = @ss.peek(1) 51: @lineno += 1 if text == "\n" 52: token = case @state 53: when nil 54: case 55: when (text = @ss.scan(/has\([\s]*/)) 56: action { [:HAS, text] } 57: 58: when (text = @ss.scan(/[-@]?([_A-Za-z]|[^\00--\1177]|\\[0-9A-Fa-f]{1,6}(\r\n|[\s])?|\\[^\n\r\f0-9A-Fa-f])([_A-Za-z0-9-]|[^\00--\1177]|\\[0-9A-Fa-f]{1,6}(\r\n|[\s])?|\\[^\n\r\f0-9A-Fa-f])*\([\s]*/)) 59: action { [:FUNCTION, text] } 60: 61: when (text = @ss.scan(/[-@]?([_A-Za-z]|[^\00--\1177]|\\[0-9A-Fa-f]{1,6}(\r\n|[\s])?|\\[^\n\r\f0-9A-Fa-f])([_A-Za-z0-9-]|[^\00--\1177]|\\[0-9A-Fa-f]{1,6}(\r\n|[\s])?|\\[^\n\r\f0-9A-Fa-f])*/)) 62: action { [:IDENT, text] } 63: 64: when (text = @ss.scan(/\#([_A-Za-z0-9-]|[^\00--\1177]|\\[0-9A-Fa-f]{1,6}(\r\n|[\s])?|\\[^\n\r\f0-9A-Fa-f])+/)) 65: action { [:HASH, text] } 66: 67: when (text = @ss.scan(/[\s]*~=[\s]*/)) 68: action { [:INCLUDES, text] } 69: 70: when (text = @ss.scan(/[\s]*\|=[\s]*/)) 71: action { [:DASHMATCH, text] } 72: 73: when (text = @ss.scan(/[\s]*\^=[\s]*/)) 74: action { [:PREFIXMATCH, text] } 75: 76: when (text = @ss.scan(/[\s]*\$=[\s]*/)) 77: action { [:SUFFIXMATCH, text] } 78: 79: when (text = @ss.scan(/[\s]*\*=[\s]*/)) 80: action { [:SUBSTRINGMATCH, text] } 81: 82: when (text = @ss.scan(/[\s]*!=[\s]*/)) 83: action { [:NOT_EQUAL, text] } 84: 85: when (text = @ss.scan(/[\s]*=[\s]*/)) 86: action { [:EQUAL, text] } 87: 88: when (text = @ss.scan(/[\s]*\)/)) 89: action { [:RPAREN, text] } 90: 91: when (text = @ss.scan(/[\s]*\[[\s]*/)) 92: action { [:LSQUARE, text] } 93: 94: when (text = @ss.scan(/[\s]*\]/)) 95: action { [:RSQUARE, text] } 96: 97: when (text = @ss.scan(/[\s]*\+[\s]*/)) 98: action { [:PLUS, text] } 99: 100: when (text = @ss.scan(/[\s]*>[\s]*/)) 101: action { [:GREATER, text] } 102: 103: when (text = @ss.scan(/[\s]*,[\s]*/)) 104: action { [:COMMA, text] } 105: 106: when (text = @ss.scan(/[\s]*~[\s]*/)) 107: action { [:TILDE, text] } 108: 109: when (text = @ss.scan(/\:not\([\s]*/)) 110: action { [:NOT, text] } 111: 112: when (text = @ss.scan(/-?([0-9]+|[0-9]*\.[0-9]+)/)) 113: action { [:NUMBER, text] } 114: 115: when (text = @ss.scan(/[\s]*\/\/[\s]*/)) 116: action { [:DOUBLESLASH, text] } 117: 118: when (text = @ss.scan(/[\s]*\/[\s]*/)) 119: action { [:SLASH, text] } 120: 121: when (text = @ss.scan(/U\+[0-9a-f?]{1,6}(-[0-9a-f]{1,6})?/)) 122: action {[:UNICODE_RANGE, text] } 123: 124: when (text = @ss.scan(/[\s]+/)) 125: action { [:S, text] } 126: 127: when (text = @ss.scan(/"([^\n\r\f"]|\n|\r\n|\r|\f|[^\00--\1177]|\\[0-9A-Fa-f]{1,6}(\r\n|[\s])?|\\[^\n\r\f0-9A-Fa-f])*"|'([^\n\r\f']|\n|\r\n|\r|\f|[^\00--\1177]|\\[0-9A-Fa-f]{1,6}(\r\n|[\s])?|\\[^\n\r\f0-9A-Fa-f])*'/)) 128: action { [:STRING, text] } 129: 130: when (text = @ss.scan(/./)) 131: action { [text, text] } 132: 133: else 134: text = @ss.string[@ss.pos .. 1] 135: raise ScanError, "can not match: '" + text + "'" 136: end # if 137: 138: else 139: raise ScanError, "undefined state: '" + state.to_s + "'" 140: end # case state 141: token 142: end
# File lib/nokogiri/css/generated_tokenizer.rb, line 41 41: def scan_file( filename ) 42: load_file(filename) 43: do_parse 44: end
# File lib/nokogiri/css/generated_tokenizer.rb, line 18 18: def scan_setup(str) 19: @ss = StringScanner.new(str) 20: @lineno = 1 21: @state = nil 22: end
# File lib/nokogiri/css/generated_tokenizer.rb, line 28 28: def scan_str(str) 29: scan_setup(str) 30: do_parse 31: end
Disabled; run with --debug to generate this.
Generated with the Darkfish Rdoc Generator 1.1.6.