/[suikacvs]/markup/html/whatpm/t/tokenizer-result.txt
Suika

Contents of /markup/html/whatpm/t/tokenizer-result.txt

Parent Directory Parent Directory | Revision Log Revision Log


Revision 1.311 - (hide annotations) (download)
Sat Sep 5 10:21:30 2009 UTC (15 years, 10 months ago) by wakaba
Branch: MAIN
Changes since 1.310: +2 -2 lines
File MIME type: text/plain
updated

1 wakaba 1.287 1..1129
2 wakaba 1.273 # Running under perl version 5.010000 for linux
3 wakaba 1.311 # Current time local: Sat Sep 5 19:15:16 2009
4     # Current time GMT: Sat Sep 5 10:15:16 2009
5 wakaba 1.1 # Using Test.pm version 1.25
6 wakaba 1.11 # t/tokenizer/test1.test
7 wakaba 1.20 ok 1
8 wakaba 1.298 not ok 2
9     # Test 2 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n undef,\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #2)
10     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HTML',\n undef,\n undef,\n 1\n ]\n ];\n" (Correct Doctype uppercase: qq'<!DOCTYPE HTML>')
11     # Line 4 is changed:
12     # - " qq'HTML',\n"
13     # + " qq'html',\n"
14     # t/HTML-tokenizer.t line 205 is: ok $parser_dump, $expected_dump,
15     not ok 3
16     # Test 3 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n undef,\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #3)
17     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HtMl',\n undef,\n undef,\n 1\n ]\n ];\n" (Correct Doctype mixed case: qq'<!DOCTYPE HtMl>')
18     # Line 4 is changed:
19     # - " qq'HtMl',\n"
20     # + " qq'html',\n"
21 wakaba 1.1 ok 4
22 wakaba 1.20 ok 5
23 wakaba 1.1 ok 6
24     ok 7
25     ok 8
26     ok 9
27     ok 10
28     ok 11
29     ok 12
30     ok 13
31     ok 14
32 wakaba 1.130 ok 15
33 wakaba 1.1 ok 16
34     ok 17
35     ok 18
36 wakaba 1.296 not ok 19
37     # Test 19 got: "$VAR1 = [\n [\n qq'Comment',\n qq' --comment '\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #19)
38     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq' --comment '\n ]\n ];\n" (Comment, two central dashes: qq'<!-- --comment -->')
39     # Line 2 is missing:
40     # - " qq'ParseError',\n"
41 wakaba 1.1 ok 20
42     ok 21
43 wakaba 1.25 ok 22
44     ok 23
45 wakaba 1.1 ok 24
46 wakaba 1.22 ok 25
47     ok 26
48     ok 27
49 wakaba 1.1 ok 28
50     ok 29
51     ok 30
52     ok 31
53     ok 32
54     ok 33
55 wakaba 1.18 ok 34
56 wakaba 1.1 ok 35
57     ok 36
58     ok 37
59 wakaba 1.8 ok 38
60 wakaba 1.28 ok 39
61     ok 40
62 wakaba 1.43 ok 41
63     ok 42
64 wakaba 1.286 ok 43
65 wakaba 1.11 # t/tokenizer/test2.test
66 wakaba 1.286 not ok 44
67     # Test 44 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #44)
68 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (DOCTYPE without name: qq'<!DOCTYPE>')
69 wakaba 1.20 # Line 6 is changed:
70 wakaba 1.8 # - " qq'',\n"
71 wakaba 1.20 # + " undef,\n"
72     ok 45
73     ok 46
74     ok 47
75     ok 48
76     ok 49
77     ok 50
78     ok 51
79 wakaba 1.97 ok 52
80     ok 53
81     ok 54
82     ok 55
83 wakaba 1.9 ok 56
84     ok 57
85 wakaba 1.1 ok 58
86     ok 59
87     ok 60
88 wakaba 1.19 ok 61
89 wakaba 1.1 ok 62
90     ok 63
91 wakaba 1.130 ok 64
92 wakaba 1.1 ok 65
93 wakaba 1.240 ok 66
94     ok 67
95     ok 68
96 wakaba 1.1 ok 69
97     ok 70
98 wakaba 1.34 ok 71
99     ok 72
100 wakaba 1.1 ok 73
101     ok 74
102 wakaba 1.21 ok 75
103     ok 76
104 wakaba 1.1 ok 77
105 wakaba 1.141 ok 78
106 wakaba 1.1 ok 79
107 wakaba 1.305 ok 80
108 wakaba 1.34 ok 81
109 wakaba 1.286 # t/tokenizer/test3.test
110 wakaba 1.15 ok 82
111 wakaba 1.1 ok 83
112     ok 84
113 wakaba 1.25 ok 85
114     ok 86
115 wakaba 1.34 ok 87
116 wakaba 1.1 ok 88
117     ok 89
118     ok 90
119     ok 91
120 wakaba 1.296 not ok 92
121     # Test 92 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'--.'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #92)
122     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Comment',\n qq'--.'\n ]\n ];\n" (<!----.: qq'<!----.')
123     # Line 3 is missing:
124     # - " qq'ParseError',\n"
125 wakaba 1.1 ok 93
126     ok 94
127 wakaba 1.8 ok 95
128     ok 96
129     ok 97
130     ok 98
131     ok 99
132     ok 100
133 wakaba 1.96 ok 101
134     ok 102
135     ok 103
136     ok 104
137 wakaba 1.141 ok 105
138 wakaba 1.286 ok 106
139     ok 107
140     ok 108
141     not ok 109
142     # Test 109 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #109)
143 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype >: qq'<!doctype >')
144 wakaba 1.43 # Line 5 is changed:
145     # - " qq'',\n"
146     # + " undef,\n"
147 wakaba 1.286 not ok 110
148     # Test 110 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #110)
149 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype : qq'<!doctype ')
150 wakaba 1.43 # Line 5 is changed:
151     # - " qq'',\n"
152     # + " undef,\n"
153 wakaba 1.8 ok 111
154     ok 112
155     ok 113
156 wakaba 1.10 ok 114
157 wakaba 1.287 ok 115
158 wakaba 1.10 ok 116
159     ok 117
160     ok 118
161 wakaba 1.287 ok 119
162 wakaba 1.10 ok 120
163     ok 121
164 wakaba 1.39 ok 122
165 wakaba 1.18 ok 123
166 wakaba 1.287 ok 124
167 wakaba 1.18 ok 125
168     ok 126
169 wakaba 1.20 ok 127
170 wakaba 1.240 ok 128
171 wakaba 1.20 ok 129
172 wakaba 1.287 ok 130
173 wakaba 1.240 ok 131
174 wakaba 1.20 ok 132
175     ok 133
176     ok 134
177 wakaba 1.287 ok 135
178 wakaba 1.20 ok 136
179 wakaba 1.303 not ok 137
180     # Test 137 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #137)
181     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'z'\n ]\n ];\n" (</z: qq'</z')
182     # Line 2 is changed:
183     # - " qq'ParseError',\n"
184     # + " qq'ParseError'\n"
185     # Lines 3-3 are missing:
186     # - " [\n"
187     # - " qq'EndTag',\n"
188     # - " qq'z'\n"
189     # - " ]\n"
190 wakaba 1.21 ok 138
191 wakaba 1.306 not ok 139
192     # Test 139 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #139)
193     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {}\n ]\n ];\n" (<z : qq'<z ')
194     # Line 2 is changed:
195     # - " qq'ParseError',\n"
196     # + " qq'ParseError'\n"
197     # Lines 3-3 are missing:
198     # - " [\n"
199     # - " qq'StartTag',\n"
200     # - " qq'z',\n"
201     # - " {}\n"
202     # - " ]\n"
203 wakaba 1.20 ok 140
204 wakaba 1.306 not ok 141
205     # Test 141 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #141)
206     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {}\n ]\n ];\n" (<z/ : qq'<z/ ')
207     # Line 3 is changed:
208     # - " qq'ParseError',\n"
209     # + " qq'ParseError'\n"
210     # Lines 4-4 are missing:
211     # - " [\n"
212     # - " qq'StartTag',\n"
213     # - " qq'z',\n"
214     # - " {}\n"
215     # - " ]\n"
216 wakaba 1.28 ok 142
217 wakaba 1.303 not ok 143
218     # Test 143 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #143)
219     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {}\n ]\n ];\n" (<z: qq'<z')
220     # Line 2 is changed:
221     # - " qq'ParseError',\n"
222     # + " qq'ParseError'\n"
223     # Lines 3-3 are missing:
224     # - " [\n"
225     # - " qq'StartTag',\n"
226     # - " qq'z',\n"
227     # - " {}\n"
228     # - " ]\n"
229     not ok 144
230     # Test 144 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #144)
231     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'z'\n ]\n ];\n" (</z: qq'</z')
232     # Line 2 is changed:
233     # - " qq'ParseError',\n"
234     # + " qq'ParseError'\n"
235     # Lines 3-3 are missing:
236     # - " [\n"
237     # - " qq'EndTag',\n"
238     # - " qq'z'\n"
239     # - " ]\n"
240     not ok 145
241     # Test 145 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #145)
242     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z0',\n {}\n ]\n ];\n" (<z0: qq'<z0')
243     # Line 2 is changed:
244     # - " qq'ParseError',\n"
245     # + " qq'ParseError'\n"
246     # Lines 3-3 are missing:
247     # - " [\n"
248     # - " qq'StartTag',\n"
249     # - " qq'z0',\n"
250     # - " {}\n"
251     # - " ]\n"
252 wakaba 1.286 not ok 146
253     # Test 146 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #146)
254 wakaba 1.247 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (<z/0=>: qq'<z/0=>')
255     # Got 1 extra line at line 3:
256     # + " qq'ParseError',\n"
257 wakaba 1.309 not ok 147
258     # Test 147 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #147)
259     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (<z/0= : qq'<z/0= ')
260     # Line 3 is changed:
261     # - " qq'ParseError',\n"
262     # + " qq'ParseError'\n"
263     # Lines 4-4 are missing:
264     # - " [\n"
265     # - " qq'StartTag',\n"
266     # - " qq'z',\n"
267     # - " {\n"
268     # - " 0 => qq''\n"
269     # - " }\n"
270     # - " ]\n"
271 wakaba 1.239 ok 148
272 wakaba 1.306 not ok 149
273     # Test 149 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #149)
274     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq'?'\n }\n ]\n ];\n" (<z/0=? : qq'<z/0=? ')
275     # Line 3 is changed:
276     # - " qq'ParseError',\n"
277     # + " qq'ParseError'\n"
278     # Lines 4-4 are missing:
279     # - " [\n"
280     # - " qq'StartTag',\n"
281     # - " qq'z',\n"
282     # - " {\n"
283     # - " 0 => qq'?'\n"
284     # - " }\n"
285     # - " ]\n"
286 wakaba 1.22 ok 150
287 wakaba 1.130 ok 151
288 wakaba 1.239 ok 152
289 wakaba 1.22 ok 153
290     ok 154
291     ok 155
292     ok 156
293 wakaba 1.28 ok 157
294     ok 158
295 wakaba 1.309 not ok 159
296     # Test 159 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #159)
297     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (<z/0 =: qq'<z/0 =')
298     # Line 3 is changed:
299     # - " qq'ParseError',\n"
300     # + " qq'ParseError'\n"
301     # Lines 4-4 are missing:
302     # - " [\n"
303     # - " qq'StartTag',\n"
304     # - " qq'z',\n"
305     # - " {\n"
306     # - " 0 => qq''\n"
307     # - " }\n"
308     # - " ]\n"
309 wakaba 1.239 ok 160
310 wakaba 1.308 not ok 161
311     # Test 161 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #161)
312     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (<z/0 : qq'<z/0 ')
313     # Line 3 is changed:
314     # - " qq'ParseError',\n"
315     # + " qq'ParseError'\n"
316     # Lines 4-4 are missing:
317     # - " [\n"
318     # - " qq'StartTag',\n"
319     # - " qq'z',\n"
320     # - " {\n"
321     # - " 0 => qq''\n"
322     # - " }\n"
323     # - " ]\n"
324 wakaba 1.28 ok 162
325     ok 163
326 wakaba 1.307 not ok 164
327     # Test 164 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #164)
328     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'00' => qq''\n }\n ]\n ];\n" (<z/00: qq'<z/00')
329     # Line 3 is changed:
330     # - " qq'ParseError',\n"
331     # + " qq'ParseError'\n"
332     # Lines 4-4 are missing:
333     # - " [\n"
334     # - " qq'StartTag',\n"
335     # - " qq'z',\n"
336     # - " {\n"
337     # - " qq'00' => qq''\n"
338     # - " }\n"
339     # - " ]\n"
340     not ok 165
341     # Test 165 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #165)
342     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (<z/0 0: qq'<z/0 0')
343     # Line 4 is changed:
344     # - " qq'ParseError',\n"
345     # + " qq'ParseError'\n"
346     # Lines 5-5 are missing:
347     # - " [\n"
348     # - " qq'StartTag',\n"
349     # - " qq'z',\n"
350     # - " {\n"
351     # - " 0 => qq''\n"
352     # - " }\n"
353     # - " ]\n"
354 wakaba 1.28 ok 166
355     ok 167
356     ok 168
357 wakaba 1.307 not ok 169
358     # Test 169 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #169)
359     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'0z' => qq''\n }\n ]\n ];\n" (<z/0z: qq'<z/0z')
360     # Line 3 is changed:
361     # - " qq'ParseError',\n"
362     # + " qq'ParseError'\n"
363     # Lines 4-4 are missing:
364     # - " [\n"
365     # - " qq'StartTag',\n"
366     # - " qq'z',\n"
367     # - " {\n"
368     # - " qq'0z' => qq''\n"
369     # - " }\n"
370     # - " ]\n"
371     not ok 170
372     # Test 170 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #170)
373     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq'',\n qq'z' => qq''\n }\n ]\n ];\n" (<z/0 z: qq'<z/0 z')
374     # Line 3 is changed:
375     # - " qq'ParseError',\n"
376     # + " qq'ParseError'\n"
377     # Lines 4-4 are missing:
378     # - " [\n"
379     # - " qq'StartTag',\n"
380     # - " qq'z',\n"
381     # - " {\n"
382     # - " 0 => qq'',\n"
383     # - " qq'z' => qq''\n"
384     # - " }\n"
385     # - " ]\n"
386 wakaba 1.303 not ok 171
387     # Test 171 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #171)
388     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'zz',\n {}\n ]\n ];\n" (<zz: qq'<zz')
389     # Line 2 is changed:
390     # - " qq'ParseError',\n"
391     # + " qq'ParseError'\n"
392     # Lines 3-3 are missing:
393     # - " [\n"
394     # - " qq'StartTag',\n"
395     # - " qq'zz',\n"
396     # - " {}\n"
397     # - " ]\n"
398 wakaba 1.307 not ok 172
399     # Test 172 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #172)
400     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'z' => qq''\n }\n ]\n ];\n" (<z/z: qq'<z/z')
401     # Line 3 is changed:
402     # - " qq'ParseError',\n"
403     # + " qq'ParseError'\n"
404     # Lines 4-4 are missing:
405     # - " [\n"
406     # - " qq'StartTag',\n"
407     # - " qq'z',\n"
408     # - " {\n"
409     # - " qq'z' => qq''\n"
410     # - " }\n"
411     # - " ]\n"
412 wakaba 1.286 # t/tokenizer/test4.test
413 wakaba 1.299 not ok 173
414 wakaba 1.307 # Test 173 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #173)
415 wakaba 1.299 # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq'',\n qq'<' => qq''\n }\n ]\n ];\n" (< in attribute name: qq'<z/0 <')
416 wakaba 1.307 # Line 4 is changed:
417     # - " [\n"
418     # + " qq'ParseError'\n"
419     # Lines 5-5 are missing:
420     # - " qq'StartTag',\n"
421     # - " qq'z',\n"
422     # - " {\n"
423     # - " 0 => qq'',\n"
424     # - " qq'<' => qq''\n"
425     # - " }\n"
426     # - " ]\n"
427 wakaba 1.293 not ok 174
428     # Test 174 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'x' => qq'<'\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #174)
429     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'x' => qq'<'\n }\n ]\n ];\n" (< in attribute value: qq'<z x=<')
430     # Got 1 extra line at line 3:
431     # + " qq'ParseError',\n"
432 wakaba 1.286 ok 175
433     ok 176
434     not ok 177
435     # Test 177 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'=' => qq''\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #177)
436 wakaba 1.247 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'=' => qq''\n }\n ]\n ];\n" (== attribute: qq'<z ==>')
437     # Got 1 extra line at line 3:
438     # + " qq'ParseError',\n"
439 wakaba 1.28 ok 178
440 wakaba 1.33 ok 179
441 wakaba 1.34 ok 180
442 wakaba 1.38 ok 181
443     ok 182
444 wakaba 1.43 ok 183
445     ok 184
446     ok 185
447     ok 186
448     ok 187
449     ok 188
450 wakaba 1.240 ok 189
451     ok 190
452 wakaba 1.43 ok 191
453     ok 192
454     ok 193
455     ok 194
456     ok 195
457     ok 196
458 wakaba 1.306 not ok 197
459     # Test 197 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #197)
460     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {}\n ]\n ];\n" (CR EOF in tag name: qq'<z\x{0D}')
461     # Line 2 is changed:
462     # - " qq'ParseError',\n"
463     # + " qq'ParseError'\n"
464     # Lines 3-3 are missing:
465     # - " [\n"
466     # - " qq'StartTag',\n"
467     # - " qq'z',\n"
468     # - " {}\n"
469     # - " ]\n"
470 wakaba 1.96 ok 198
471     ok 199
472 wakaba 1.286 ok 200
473 wakaba 1.96 ok 201
474 wakaba 1.130 ok 202
475 wakaba 1.43 ok 203
476     ok 204
477     ok 205
478     ok 206
479     ok 207
480     ok 208
481     ok 209
482     ok 210
483     ok 211
484     ok 212
485     ok 213
486     ok 214
487 wakaba 1.240 ok 215
488     ok 216
489 wakaba 1.43 ok 217
490     ok 218
491     ok 219
492     ok 220
493 wakaba 1.141 ok 221
494 wakaba 1.286 ok 222
495 wakaba 1.298 not ok 223
496     # Test 223 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n qq'AbC',\n qq'XyZ',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #223)
497     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HtMl',\n qq'AbC',\n qq'XyZ',\n 1\n ]\n ];\n" (Doctype public case-sensitivity (1): qq'<!DoCtYpE HtMl PuBlIc "AbC" "XyZ">')
498     # Line 4 is changed:
499     # - " qq'HtMl',\n"
500     # + " qq'html',\n"
501     not ok 224
502     # Test 224 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n qq'aBc',\n qq'xYz',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #224)
503     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'hTmL',\n qq'aBc',\n qq'xYz',\n 1\n ]\n ];\n" (Doctype public case-sensitivity (2): qq'<!dOcTyPe hTmL pUbLiC "aBc" "xYz">')
504     # Line 4 is changed:
505     # - " qq'hTmL',\n"
506     # + " qq'html',\n"
507     not ok 225
508     # Test 225 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n qq'XyZ',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #225)
509     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HtMl',\n undef,\n qq'XyZ',\n 1\n ]\n ];\n" (Doctype system case-sensitivity (1): qq'<!DoCtYpE HtMl SyStEm "XyZ">')
510     # Line 4 is changed:
511     # - " qq'HtMl',\n"
512     # + " qq'html',\n"
513     not ok 226
514     # Test 226 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n qq'xYz',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #226)
515     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'hTmL',\n undef,\n qq'xYz',\n 1\n ]\n ];\n" (Doctype system case-sensitivity (2): qq'<!dOcTyPe hTmL sYsTeM "xYz">')
516     # Line 4 is changed:
517     # - " qq'hTmL',\n"
518     # + " qq'html',\n"
519 wakaba 1.286 not ok 227
520     # Test 227 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #227)
521 wakaba 1.130 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (U+0000 in lookahead region after non-matching character: qq'<!doc>\x{00}')
522     # Got 1 extra line at line 3:
523     # + " qq'ParseError',\n"
524     # Line 8 is missing:
525     # - " qq'ParseError',\n"
526 wakaba 1.43 ok 228
527     ok 229
528     ok 230
529     ok 231
530     ok 232
531     ok 233
532     ok 234
533     ok 235
534 wakaba 1.141 ok 236
535 wakaba 1.43 ok 237
536     ok 238
537     ok 239
538     ok 240
539     ok 241
540 wakaba 1.287 ok 242
541 wakaba 1.43 ok 243
542 wakaba 1.287 ok 244
543 wakaba 1.286 # t/tokenizer/contentModelFlags.test
544 wakaba 1.43 ok 245
545     ok 246
546     ok 247
547     ok 248
548 wakaba 1.141 ok 249
549 wakaba 1.43 ok 250
550     ok 251
551     ok 252
552     ok 253
553     ok 254
554     ok 255
555 wakaba 1.141 ok 256
556 wakaba 1.43 ok 257
557 wakaba 1.286 # t/tokenizer/escapeFlag.test
558 wakaba 1.43 ok 258
559     ok 259
560     ok 260
561     ok 261
562     ok 262
563 wakaba 1.206 ok 263
564 wakaba 1.43 ok 264
565     ok 265
566     ok 266
567 wakaba 1.286 # t/tokenizer/entities.test
568 wakaba 1.43 ok 267
569     ok 268
570     ok 269
571     ok 270
572     ok 271
573     ok 272
574     ok 273
575     ok 274
576     ok 275
577     ok 276
578     ok 277
579     ok 278
580     ok 279
581     ok 280
582     ok 281
583     ok 282
584     ok 283
585     ok 284
586     ok 285
587     ok 286
588     ok 287
589     ok 288
590     ok 289
591     ok 290
592     ok 291
593     ok 292
594     ok 293
595     ok 294
596     ok 295
597     ok 296
598     ok 297
599     ok 298
600     ok 299
601     ok 300
602     ok 301
603     ok 302
604     ok 303
605     ok 304
606     ok 305
607     ok 306
608     ok 307
609     ok 308
610     ok 309
611     ok 310
612     ok 311
613     ok 312
614     ok 313
615     ok 314
616     ok 315
617     ok 316
618     ok 317
619     ok 318
620     ok 319
621     ok 320
622     ok 321
623     ok 322
624     ok 323
625     ok 324
626     ok 325
627     ok 326
628     ok 327
629     ok 328
630     ok 329
631     ok 330
632     ok 331
633     ok 332
634     ok 333
635     ok 334
636     ok 335
637     ok 336
638     ok 337
639 wakaba 1.59 ok 338
640     ok 339
641     ok 340
642     ok 341
643     ok 342
644     ok 343
645     ok 344
646     ok 345
647     ok 346
648     ok 347
649 wakaba 1.62 ok 348
650     ok 349
651     ok 350
652     ok 351
653     ok 352
654     ok 353
655     ok 354
656     ok 355
657     ok 356
658     ok 357
659     ok 358
660     ok 359
661 wakaba 1.96 ok 360
662     ok 361
663     ok 362
664     ok 363
665 wakaba 1.129 ok 364
666     ok 365
667     ok 366
668     ok 367
669     ok 368
670     ok 369
671     ok 370
672     ok 371
673     ok 372
674     ok 373
675     ok 374
676     ok 375
677     ok 376
678     ok 377
679     ok 378
680     ok 379
681     ok 380
682     ok 381
683     ok 382
684     ok 383
685     ok 384
686     ok 385
687     ok 386
688     ok 387
689     ok 388
690     ok 389
691     ok 390
692     ok 391
693     ok 392
694     ok 393
695     ok 394
696     ok 395
697     ok 396
698 wakaba 1.130 ok 397
699     ok 398
700     ok 399
701     ok 400
702     ok 401
703     ok 402
704     ok 403
705     ok 404
706     ok 405
707     ok 406
708     ok 407
709     ok 408
710     ok 409
711     ok 410
712     ok 411
713     ok 412
714     ok 413
715     ok 414
716     ok 415
717     ok 416
718 wakaba 1.132 ok 417
719     ok 418
720     ok 419
721     ok 420
722 wakaba 1.136 ok 421
723     ok 422
724     ok 423
725     ok 424
726     ok 425
727     ok 426
728     ok 427
729     ok 428
730     ok 429
731     ok 430
732     ok 431
733     ok 432
734     ok 433
735     ok 434
736 wakaba 1.205 ok 435
737 wakaba 1.136 ok 436
738     ok 437
739     ok 438
740 wakaba 1.205 ok 439
741 wakaba 1.136 ok 440
742     ok 441
743     ok 442
744 wakaba 1.205 ok 443
745 wakaba 1.136 ok 444
746     ok 445
747 wakaba 1.205 ok 446
748 wakaba 1.136 ok 447
749     ok 448
750     ok 449
751     ok 450
752     ok 451
753     ok 452
754     ok 453
755     ok 454
756     ok 455
757     ok 456
758     ok 457
759     ok 458
760     ok 459
761     ok 460
762     ok 461
763     ok 462
764     ok 463
765     ok 464
766     ok 465
767     ok 466
768     ok 467
769     ok 468
770     ok 469
771     ok 470
772     ok 471
773 wakaba 1.141 ok 472
774 wakaba 1.195 ok 473
775     ok 474
776     ok 475
777     ok 476
778     ok 477
779 wakaba 1.205 ok 478
780     ok 479
781     ok 480
782     ok 481
783     ok 482
784     ok 483
785     ok 484
786     ok 485
787     ok 486
788     ok 487
789     ok 488
790     ok 489
791     ok 490
792     ok 491
793     ok 492
794     ok 493
795     ok 494
796     ok 495
797     ok 496
798     ok 497
799     ok 498
800     ok 499
801     ok 500
802     ok 501
803     ok 502
804     ok 503
805     ok 504
806     ok 505
807     ok 506
808     ok 507
809     ok 508
810     ok 509
811     ok 510
812     ok 511
813     ok 512
814     ok 513
815     ok 514
816     ok 515
817     ok 516
818     ok 517
819     ok 518
820     ok 519
821     ok 520
822     ok 521
823     ok 522
824     ok 523
825     ok 524
826     ok 525
827     ok 526
828     ok 527
829     ok 528
830     ok 529
831     ok 530
832     ok 531
833     ok 532
834     ok 533
835     ok 534
836     ok 535
837     ok 536
838     ok 537
839     ok 538
840     ok 539
841 wakaba 1.210 ok 540
842 wakaba 1.205 ok 541
843     ok 542
844     ok 543
845     ok 544
846     ok 545
847     ok 546
848     ok 547
849     ok 548
850     ok 549
851     ok 550
852     ok 551
853     ok 552
854     ok 553
855     ok 554
856     ok 555
857     ok 556
858     ok 557
859     ok 558
860     ok 559
861     ok 560
862     ok 561
863     ok 562
864     ok 563
865     ok 564
866     ok 565
867     ok 566
868     ok 567
869     ok 568
870     ok 569
871     ok 570
872     ok 571
873     ok 572
874     ok 573
875     ok 574
876     ok 575
877     ok 576
878     ok 577
879     ok 578
880     ok 579
881     ok 580
882     ok 581
883     ok 582
884     ok 583
885     ok 584
886     ok 585
887     ok 586
888     ok 587
889     ok 588
890     ok 589
891     ok 590
892     ok 591
893     ok 592
894     ok 593
895     ok 594
896     ok 595
897     ok 596
898     ok 597
899     ok 598
900     ok 599
901     ok 600
902     ok 601
903     ok 602
904     ok 603
905     ok 604
906     ok 605
907     ok 606
908     ok 607
909     ok 608
910     ok 609
911     ok 610
912     ok 611
913     ok 612
914     ok 613
915     ok 614
916     ok 615
917     ok 616
918     ok 617
919     ok 618
920     ok 619
921     ok 620
922     ok 621
923     ok 622
924     ok 623
925     ok 624
926     ok 625
927     ok 626
928     ok 627
929     ok 628
930     ok 629
931     ok 630
932     ok 631
933     ok 632
934     ok 633
935     ok 634
936     ok 635
937     ok 636
938     ok 637
939     ok 638
940     ok 639
941     ok 640
942     ok 641
943     ok 642
944     ok 643
945     ok 644
946     ok 645
947     ok 646
948     ok 647
949     ok 648
950     ok 649
951     ok 650
952     ok 651
953     ok 652
954     ok 653
955     ok 654
956     ok 655
957     ok 656
958     ok 657
959     ok 658
960     ok 659
961     ok 660
962     ok 661
963     ok 662
964     ok 663
965     ok 664
966     ok 665
967     ok 666
968     ok 667
969     ok 668
970     ok 669
971     ok 670
972     ok 671
973     ok 672
974     ok 673
975     ok 674
976     ok 675
977     ok 676
978     ok 677
979     ok 678
980     ok 679
981     ok 680
982     ok 681
983     ok 682
984     ok 683
985     ok 684
986     ok 685
987     ok 686
988     ok 687
989     ok 688
990     ok 689
991     ok 690
992     ok 691
993     ok 692
994     ok 693
995     ok 694
996     ok 695
997     ok 696
998     ok 697
999     ok 698
1000     ok 699
1001     ok 700
1002     ok 701
1003     ok 702
1004     ok 703
1005     ok 704
1006     ok 705
1007     ok 706
1008     ok 707
1009     ok 708
1010     ok 709
1011     ok 710
1012     ok 711
1013     ok 712
1014     ok 713
1015     ok 714
1016     ok 715
1017     ok 716
1018     ok 717
1019     ok 718
1020     ok 719
1021     ok 720
1022     ok 721
1023     ok 722
1024     ok 723
1025     ok 724
1026     ok 725
1027     ok 726
1028     ok 727
1029     ok 728
1030     ok 729
1031     ok 730
1032     ok 731
1033     ok 732
1034     ok 733
1035     ok 734
1036     ok 735
1037     ok 736
1038     ok 737
1039     ok 738
1040     ok 739
1041     ok 740
1042     ok 741
1043     ok 742
1044     ok 743
1045     ok 744
1046     ok 745
1047     ok 746
1048     ok 747
1049     ok 748
1050     ok 749
1051     ok 750
1052     ok 751
1053     ok 752
1054     ok 753
1055     ok 754
1056     ok 755
1057     ok 756
1058     ok 757
1059     ok 758
1060     ok 759
1061     ok 760
1062     ok 761
1063     ok 762
1064     ok 763
1065     ok 764
1066     ok 765
1067     ok 766
1068     ok 767
1069     ok 768
1070     ok 769
1071     ok 770
1072     ok 771
1073     ok 772
1074     ok 773
1075     ok 774
1076     ok 775
1077     ok 776
1078     ok 777
1079     ok 778
1080     ok 779
1081     ok 780
1082     ok 781
1083     ok 782
1084     ok 783
1085     ok 784
1086     ok 785
1087     ok 786
1088     ok 787
1089     ok 788
1090     ok 789
1091     ok 790
1092     ok 791
1093     ok 792
1094     ok 793
1095     ok 794
1096     ok 795
1097     ok 796
1098     ok 797
1099     ok 798
1100     ok 799
1101     ok 800
1102     ok 801
1103     ok 802
1104     ok 803
1105     ok 804
1106     ok 805
1107     ok 806
1108     ok 807
1109     ok 808
1110     ok 809
1111     ok 810
1112     ok 811
1113     ok 812
1114     ok 813
1115     ok 814
1116     ok 815
1117     ok 816
1118     ok 817
1119     ok 818
1120     ok 819
1121     ok 820
1122     ok 821
1123     ok 822
1124     ok 823
1125     ok 824
1126     ok 825
1127     ok 826
1128     ok 827
1129     ok 828
1130     ok 829
1131     ok 830
1132     ok 831
1133     ok 832
1134     ok 833
1135     ok 834
1136     ok 835
1137     ok 836
1138     ok 837
1139     ok 838
1140     ok 839
1141     ok 840
1142     ok 841
1143     ok 842
1144     ok 843
1145     ok 844
1146     ok 845
1147 wakaba 1.286 ok 846
1148     ok 847
1149     ok 848
1150     ok 849
1151     ok 850
1152 wakaba 1.205 # t/tokenizer/xmlViolation.test
1153 wakaba 1.286 not ok 851
1154     # Test 851 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'Character',\n qq'a\\x{FFFF}b'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #851)
1155 wakaba 1.206 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Character',\n qq'a\\x{FFFD}b'\n ]\n ];\n" (Non-XML character: qq'a\x{FFFF}b')
1156     # Line 5 is changed:
1157     # - " qq'a\\x{FFFD}b'\n"
1158     # + " qq'a\\x{FFFF}b'\n"
1159 wakaba 1.286 not ok 852
1160     # Test 852 got: "$VAR1 = [\n [\n qq'Character',\n qq'a\\x{0C}b'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #852)
1161 wakaba 1.206 # Expected: "$VAR1 = [\n [\n qq'Character',\n qq'a b'\n ]\n ];\n" (Non-XML space: qq'a\x{0C}b')
1162     # Line 4 is changed:
1163     # - " qq'a b'\n"
1164     # + " qq'a\\x{0C}b'\n"
1165 wakaba 1.286 not ok 853
1166 wakaba 1.302 # Test 853 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq' foo -- bar '\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #853)
1167 wakaba 1.206 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq' foo - - bar '\n ]\n ];\n" (Double hyphen in comment: qq'<!-- foo -- bar -->')
1168 wakaba 1.302 # Line 5 is changed:
1169 wakaba 1.206 # - " qq' foo - - bar '\n"
1170     # + " qq' foo -- bar '\n"
1171 wakaba 1.286 ok 854
1172 wakaba 1.205 # t/tokenizer-test-1.test
1173     ok 855
1174     ok 856
1175     ok 857
1176     ok 858
1177     ok 859
1178     ok 860
1179     ok 861
1180     ok 862
1181     ok 863
1182     ok 864
1183     ok 865
1184     ok 866
1185     ok 867
1186     ok 868
1187     ok 869
1188     ok 870
1189     ok 871
1190     ok 872
1191     ok 873
1192     ok 874
1193     ok 875
1194     ok 876
1195     ok 877
1196     ok 878
1197     ok 879
1198     ok 880
1199     ok 881
1200     ok 882
1201     ok 883
1202     ok 884
1203     ok 885
1204     ok 886
1205     ok 887
1206     ok 888
1207     ok 889
1208     ok 890
1209     ok 891
1210     ok 892
1211     ok 893
1212     ok 894
1213     ok 895
1214     ok 896
1215     ok 897
1216     ok 898
1217     ok 899
1218     ok 900
1219     ok 901
1220     ok 902
1221     ok 903
1222     ok 904
1223     ok 905
1224     ok 906
1225     ok 907
1226     ok 908
1227     ok 909
1228     ok 910
1229     ok 911
1230     ok 912
1231     ok 913
1232     ok 914
1233     ok 915
1234     ok 916
1235     ok 917
1236     ok 918
1237     ok 919
1238     ok 920
1239     ok 921
1240     ok 922
1241     ok 923
1242     ok 924
1243     ok 925
1244 wakaba 1.298 ok 926
1245     ok 927
1246     not ok 928
1247     # Test 928 got: "$VAR1 = [\n [\n qq'Comment',\n qq'--x'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #928)
1248 wakaba 1.296 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'--x'\n ]\n ];\n" (<!----x-->: qq'<!----x-->')
1249     # Line 2 is missing:
1250     # - " qq'ParseError',\n"
1251 wakaba 1.205 ok 929
1252     ok 930
1253     ok 931
1254     ok 932
1255     ok 933
1256     ok 934
1257     ok 935
1258     ok 936
1259     ok 937
1260 wakaba 1.281 ok 938
1261     ok 939
1262     ok 940
1263     ok 941
1264     ok 942
1265     ok 943
1266     ok 944
1267     ok 945
1268 wakaba 1.285 ok 946
1269 wakaba 1.205 ok 947
1270     ok 948
1271     ok 949
1272     ok 950
1273     ok 951
1274     ok 952
1275     ok 953
1276     ok 954
1277     ok 955
1278     ok 956
1279     ok 957
1280     ok 958
1281     ok 959
1282     ok 960
1283     ok 961
1284     ok 962
1285 wakaba 1.286 ok 963
1286     ok 964
1287 wakaba 1.290 ok 965
1288     ok 966
1289     ok 967
1290 wakaba 1.298 ok 968
1291     ok 969
1292     not ok 970
1293     # Test 970 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}\\x{DFFF}'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #970)
1294 wakaba 1.285 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}'\n ],\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{DFFF}'\n ]\n ];\n" (surrogate character reference: qq'&#xD800;\x{DFFF}')
1295     # Lines 3-3 are missing:
1296     # - " [\n"
1297     # - " qq'Character',\n"
1298     # - " qq'\\x{FFFD}'\n"
1299     # - " ],\n"
1300     # Line 6 is changed:
1301     # - " qq'\\x{DFFF}'\n"
1302     # + " qq'\\x{FFFD}\\x{DFFF}'\n"
1303 wakaba 1.205 ok 971
1304     ok 972
1305     ok 973
1306     ok 974
1307     ok 975
1308     ok 976
1309     ok 977
1310     ok 978
1311     ok 979
1312     ok 980
1313     ok 981
1314     ok 982
1315     ok 983
1316     ok 984
1317     ok 985
1318     ok 986
1319     ok 987
1320     ok 988
1321     ok 989
1322     ok 990
1323     ok 991
1324     ok 992
1325     ok 993
1326     ok 994
1327     ok 995
1328     ok 996
1329     ok 997
1330     ok 998
1331     ok 999
1332     ok 1000
1333     ok 1001
1334     ok 1002
1335     ok 1003
1336     ok 1004
1337     ok 1005
1338     ok 1006
1339     ok 1007
1340     ok 1008
1341     ok 1009
1342     ok 1010
1343     ok 1011
1344     ok 1012
1345     ok 1013
1346     ok 1014
1347     ok 1015
1348     ok 1016
1349     ok 1017
1350     ok 1018
1351 wakaba 1.206 ok 1019
1352     ok 1020
1353     ok 1021
1354     ok 1022
1355     ok 1023
1356     ok 1024
1357     ok 1025
1358 wakaba 1.240 ok 1026
1359 wakaba 1.206 ok 1027
1360     ok 1028
1361     ok 1029
1362 wakaba 1.240 ok 1030
1363 wakaba 1.206 ok 1031
1364     ok 1032
1365     ok 1033
1366 wakaba 1.240 ok 1034
1367 wakaba 1.206 ok 1035
1368     ok 1036
1369 wakaba 1.240 ok 1037
1370 wakaba 1.205 ok 1038
1371     ok 1039
1372 wakaba 1.298 ok 1040
1373     ok 1041
1374 wakaba 1.299 ok 1042
1375 wakaba 1.298 ok 1043
1376 wakaba 1.299 ok 1044
1377 wakaba 1.205 ok 1045
1378     ok 1046
1379     ok 1047
1380     ok 1048
1381     ok 1049
1382     ok 1050
1383     ok 1051
1384     ok 1052
1385     ok 1053
1386     ok 1054
1387     ok 1055
1388     ok 1056
1389     ok 1057
1390     ok 1058
1391     ok 1059
1392     ok 1060
1393     ok 1061
1394 wakaba 1.206 ok 1062
1395     ok 1063
1396     ok 1064
1397     ok 1065
1398     ok 1066
1399     ok 1067
1400     ok 1068
1401 wakaba 1.227 ok 1069
1402     ok 1070
1403     ok 1071
1404     ok 1072
1405     ok 1073
1406 wakaba 1.247 ok 1074
1407     ok 1075
1408     ok 1076
1409     ok 1077
1410     ok 1078
1411     ok 1079
1412     ok 1080
1413 wakaba 1.281 ok 1081
1414     ok 1082
1415     ok 1083
1416     ok 1084
1417     ok 1085
1418     ok 1086
1419     ok 1087
1420     ok 1088
1421     ok 1089
1422     ok 1090
1423     ok 1091
1424     ok 1092
1425     ok 1093
1426     ok 1094
1427     ok 1095
1428     ok 1096
1429     ok 1097
1430 wakaba 1.285 ok 1098
1431     ok 1099
1432     ok 1100
1433     ok 1101
1434     ok 1102
1435     ok 1103
1436     ok 1104
1437     ok 1105
1438 wakaba 1.305 ok 1106
1439 wakaba 1.285 ok 1107
1440     ok 1108
1441     ok 1109
1442 wakaba 1.305 ok 1110
1443 wakaba 1.285 ok 1111
1444     ok 1112
1445     ok 1113
1446     ok 1114
1447     ok 1115
1448     ok 1116
1449     ok 1117
1450     ok 1118
1451     ok 1119
1452     ok 1120
1453     ok 1121
1454     ok 1122
1455     ok 1123
1456     ok 1124
1457     ok 1125
1458     ok 1126
1459     ok 1127
1460 wakaba 1.305 ok 1128
1461 wakaba 1.286 ok 1129
1462 wakaba 1.305 ok 1130
1463 wakaba 1.290 ok 1131
1464     ok 1132
1465 wakaba 1.293 ok 1133
1466     ok 1134
1467 wakaba 1.308 ok 1135
1468 wakaba 1.298 ok 1136
1469 wakaba 1.309 ok 1137
1470 wakaba 1.310 ok 1138
1471 wakaba 1.306 not ok 1139
1472     # Test 1139 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1139)
1473     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (end tag not closed (attribute value single quote): qq'</p class=\x{27}')
1474     # Line 3 is changed:
1475     # - " qq'ParseError'\n"
1476     # + " qq'ParseError',\n"
1477     # Got 4 extra lines at line 4:
1478     # + " [\n"
1479     # + " qq'EndTag',\n"
1480     # + " qq'p'\n"
1481     # + " ]\n"
1482 wakaba 1.301 ok 1140
1483 wakaba 1.306 not ok 1141
1484     # Test 1141 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1141)
1485     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (end tag not closed (attribute value unquoted): qq'</p class=a')
1486     # Line 3 is changed:
1487     # - " qq'ParseError'\n"
1488     # + " qq'ParseError',\n"
1489     # Got 4 extra lines at line 4:
1490     # + " [\n"
1491     # + " qq'EndTag',\n"
1492     # + " qq'p'\n"
1493     # + " ]\n"
1494 wakaba 1.305 ok 1142
1495     ok 1143
1496 wakaba 1.302 ok 1144
1497     ok 1145
1498 wakaba 1.306 not ok 1146
1499     # Test 1146 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'p',\n {\n qq'class' => qq'a'\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1146)
1500     # Expected: "$VAR1 = [\n qq'ParseError'\n ];\n" (start tag not closed (after attribute value (quoted)): qq'<p class=\x{27}a\x{27}')
1501     # Line 2 is changed:
1502     # - " qq'ParseError'\n"
1503     # + " qq'ParseError',\n"
1504     # Got 7 extra lines at line 3:
1505     # + " [\n"
1506     # + " qq'StartTag',\n"
1507     # + " qq'p',\n"
1508     # + " {\n"
1509     # + " qq'class' => qq'a'\n"
1510     # + " }\n"
1511     # + " ]\n"
1512     not ok 1147
1513     # Test 1147 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1147)
1514     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (end tag not closed (after attribute value (quoted)): qq'</p class=\x{27}a\x{27}')
1515     # Line 3 is changed:
1516     # - " qq'ParseError'\n"
1517     # + " qq'ParseError',\n"
1518     # Got 4 extra lines at line 4:
1519     # + " [\n"
1520     # + " qq'EndTag',\n"
1521     # + " qq'p'\n"
1522     # + " ]\n"
1523     not ok 1148
1524     # Test 1148 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'p',\n {\n qq'class' => qq'a'\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1148)
1525     # Expected: "$VAR1 = [\n qq'ParseError'\n ];\n" (start tag not closed (self-closing start tag): qq'<p class=\x{27}a\x{27}/')
1526     # Line 2 is changed:
1527     # - " qq'ParseError'\n"
1528     # + " qq'ParseError',\n"
1529     # Got 7 extra lines at line 3:
1530     # + " [\n"
1531     # + " qq'StartTag',\n"
1532     # + " qq'p',\n"
1533     # + " {\n"
1534     # + " qq'class' => qq'a'\n"
1535     # + " }\n"
1536     # + " ]\n"
1537     not ok 1149
1538     # Test 1149 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1149)
1539     # Expected: "$VAR1 = [\n qq'ParseError'\n ];\n" (end tag not closed (self-closing start tag): qq'</p/')
1540     # Line 2 is changed:
1541     # - " qq'ParseError'\n"
1542     # + " qq'ParseError',\n"
1543     # Got 4 extra lines at line 3:
1544     # + " [\n"
1545     # + " qq'EndTag',\n"
1546     # + " qq'p'\n"
1547     # + " ]\n"
1548     not ok 1150
1549     # Test 1150 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1150)
1550     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError'\n ];\n" (end tag not closed (self-closing start tag): qq'</p class=\x{27}a\x{27}/')
1551     # Line 3 is changed:
1552     # - " qq'ParseError'\n"
1553     # + " qq'ParseError',\n"
1554     # Got 4 extra lines at line 4:
1555     # + " [\n"
1556     # + " qq'EndTag',\n"
1557     # + " qq'p'\n"
1558     # + " ]\n"

admin@suikawiki.org
ViewVC Help
Powered by ViewVC 1.1.24