OILS / lazylex / html_test.py View on Github | oils.pub

152 lines, 92 significant
1#!/usr/bin/env python2
2from __future__ import print_function
3
4import unittest
5
6from lazylex import html # module under test log = html.log
7
8log = html.log
9
10with open('lazylex/testdata.html') as f:
11 TEST_HTML = f.read()
12
13
14def _MakeTagLexer(s):
15 lex = html.TagLexer(s)
16 lex.Reset(0, len(s))
17 return lex
18
19
20def _PrintTokens(lex):
21 log('')
22 log('tag = %r', lex.TagName())
23 for tok, start, end in lex.Tokens():
24 log('%s %r', tok, lex.s[start:end])
25
26
27class TagLexerTest(unittest.TestCase):
28
29 def testTagLexer(self):
30 # Invalid!
31 #lex = _MakeTagLexer('< >')
32 #print(lex.Tag())
33
34 lex = _MakeTagLexer('<a>')
35 _PrintTokens(lex)
36
37 lex = _MakeTagLexer('<a novalue>')
38 _PrintTokens(lex)
39
40 # Note: we could have a different HasAttr() method
41 # <a novalue> means lex.Get('novalue') == None
42 # https://developer.mozilla.org/en-US/docs/Web/API/Element/hasAttribute
43 self.assertEqual(None, lex.GetAttrRaw('novalue'))
44
45 lex = _MakeTagLexer('<a href="double quoted">')
46 _PrintTokens(lex)
47
48 self.assertEqual('double quoted', lex.GetAttrRaw('href'))
49 self.assertEqual(None, lex.GetAttrRaw('oops'))
50
51 lex = _MakeTagLexer('<a href=foo class="bar">')
52 _PrintTokens(lex)
53
54 lex = _MakeTagLexer('<a href=foo class="bar" />')
55 _PrintTokens(lex)
56
57 lex = _MakeTagLexer('<a href="?foo=1&amp;bar=2" />')
58 self.assertEqual('?foo=1&amp;bar=2', lex.GetAttrRaw('href'))
59
60 def testTagName(self):
61 lex = _MakeTagLexer('<a href=foo class="bar" />')
62 self.assertEqual('a', lex.TagName())
63
64 def testAllAttrs(self):
65 """
66 [('key', 'value')] for all
67 """
68 # closed
69 lex = _MakeTagLexer('<a href=foo class="bar" />')
70 self.assertEqual([('href', 'foo'), ('class', 'bar')],
71 lex.AllAttrsRaw())
72
73 lex = _MakeTagLexer('<a href="?foo=1&amp;bar=2" />')
74 self.assertEqual([('href', '?foo=1&amp;bar=2')], lex.AllAttrsRaw())
75
76
77class LexerTest(unittest.TestCase):
78
79 # IndexLinker in devtools/make_help.py
80 # <pre> sections in doc/html_help.py
81 # TocExtractor in devtools/cmark.py
82
83 def testPstrip(self):
84 """Remove anything like this.
85
86 <p><pstrip> </pstrip></p>
87 """
88 pass
89
90 def testCommentParse(self):
91 n = len(TEST_HTML)
92 for tok_id, end_pos in html._Tokens(TEST_HTML, 0, n):
93 if tok_id == html.Invalid:
94 raise RuntimeError()
95 print(tok_id)
96
97 def testValid(self):
98 Tok = html.Tok
99
100 lex = html.ValidTokens('<a>hi</a>')
101
102 tok_id, pos = next(lex)
103 self.assertEqual(3, pos)
104 self.assertEqual(Tok.StartTag, tok_id)
105
106 tok_id, pos = next(lex)
107 self.assertEqual(5, pos)
108 self.assertEqual(Tok.RawData, tok_id)
109
110 tok_id, pos = next(lex)
111 self.assertEqual(9, pos)
112 self.assertEqual(Tok.EndTag, tok_id)
113
114 tok_id, pos = next(lex)
115 self.assertEqual(9, pos)
116 self.assertEqual(Tok.EndOfStream, tok_id)
117
118 lex = html.Lexer('<a>hi</a>')
119 while True:
120 tok_id, pos = lex.Read()
121 print('%d %s' % (pos, html.TokenName(tok_id)))
122 if tok_id == Tok.EndOfStream:
123 break
124
125 return
126 tok_id, pos = next(lex)
127 self.assertEqual(9, pos)
128 self.assertEqual(Tok.EndOfStream, tok_id)
129
130 while True:
131 tok_id, pos = next(lex)
132 print('%d %s' % (pos, html.TokenName(tok_id)))
133
134 def testInvalid(self):
135 Tok = html.Tok
136
137 lex = html.ValidTokens('<a>&')
138
139 tok_id, pos = next(lex)
140 self.assertEqual(3, pos)
141 self.assertEqual(Tok.StartTag, tok_id)
142
143 try:
144 tok_id, pos = next(lex)
145 except html.LexError as e:
146 print(e)
147 else:
148 self.fail('Expected LexError')
149
150
151if __name__ == '__main__':
152 unittest.main()