/[suikacvs]/markup/html/whatpm/t/tokenizer-result.txt
Suika

Contents of /markup/html/whatpm/t/tokenizer-result.txt

Parent Directory Parent Directory | Revision Log Revision Log


Revision 1.96 - (hide annotations) (download)
Sun Feb 17 12:37:52 2008 UTC (17 years, 5 months ago) by wakaba
Branch: MAIN
Changes since 1.95: +69 -25 lines
File MIME type: text/plain
html5lib tests updated

1 wakaba 1.62 1..359
2 wakaba 1.1 # Running under perl version 5.008007 for linux
3 wakaba 1.96 # Current time local: Sun Feb 17 21:33:49 2008
4     # Current time GMT: Sun Feb 17 12:33:49 2008
5 wakaba 1.1 # Using Test.pm version 1.25
6 wakaba 1.11 # t/tokenizer/test1.test
7 wakaba 1.20 ok 1
8     ok 2
9     ok 3
10 wakaba 1.1 ok 4
11 wakaba 1.20 ok 5
12 wakaba 1.1 ok 6
13     ok 7
14     ok 8
15     ok 9
16     ok 10
17     ok 11
18     ok 12
19     ok 13
20     ok 14
21     ok 15
22     ok 16
23     ok 17
24     ok 18
25     ok 19
26     ok 20
27     ok 21
28 wakaba 1.25 ok 22
29     ok 23
30 wakaba 1.1 ok 24
31 wakaba 1.22 ok 25
32     ok 26
33     ok 27
34 wakaba 1.1 ok 28
35     ok 29
36     ok 30
37     ok 31
38     ok 32
39     ok 33
40 wakaba 1.18 ok 34
41 wakaba 1.1 ok 35
42     ok 36
43     ok 37
44 wakaba 1.8 ok 38
45 wakaba 1.28 ok 39
46     ok 40
47 wakaba 1.43 ok 41
48     ok 42
49 wakaba 1.11 # t/tokenizer/test2.test
50 wakaba 1.43 not ok 43
51 wakaba 1.48 # Test 43 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #43)
52 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (DOCTYPE without name: qq'<!DOCTYPE>')
53 wakaba 1.20 # Line 6 is changed:
54 wakaba 1.8 # - " qq'',\n"
55 wakaba 1.20 # + " undef,\n"
56 wakaba 1.48 # t/HTML-tokenizer.t line 158 is: ok $parser_dump, $expected_dump,
57 wakaba 1.20 ok 44
58     ok 45
59     ok 46
60     ok 47
61     ok 48
62     ok 49
63     ok 50
64     ok 51
65 wakaba 1.96 not ok 52
66     # Test 52 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'>x',\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #52)
67     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'',\n undef,\n 0\n ],\n [\n qq'Character',\n qq'x'\n ]\n ];\n" (DOCTYPE with > in double-quoted publicId: qq'<!DOCTYPE html PUBLIC ">x')
68     # Line 6 is changed:
69     # - " qq'',\n"
70     # + " qq'>x',\n"
71     # Lines 9-9 are missing:
72     # - " ],\n"
73     # - " [\n"
74     # - " qq'Character',\n"
75     # - " qq'x'\n"
76     not ok 53
77     # Test 53 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'>x',\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #53)
78     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'',\n undef,\n 0\n ],\n [\n qq'Character',\n qq'x'\n ]\n ];\n" (DOCTYPE with > in single-quoted publicId: qq'<!DOCTYPE html PUBLIC \x{27}>x')
79     # Line 6 is changed:
80     # - " qq'',\n"
81     # + " qq'>x',\n"
82     # Lines 9-9 are missing:
83     # - " ],\n"
84     # - " [\n"
85     # - " qq'Character',\n"
86     # - " qq'x'\n"
87     not ok 54
88     # Test 54 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'foo',\n qq'>x',\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #54)
89     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'foo',\n qq'',\n 0\n ],\n [\n qq'Character',\n qq'x'\n ]\n ];\n" (DOCTYPE with > in double-quoted systemId: qq'<!DOCTYPE html PUBLIC "foo" ">x')
90     # Line 7 is changed:
91     # - " qq'',\n"
92     # + " qq'>x',\n"
93     # Lines 9-9 are missing:
94     # - " ],\n"
95     # - " [\n"
96     # - " qq'Character',\n"
97     # - " qq'x'\n"
98     not ok 55
99     # Test 55 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'foo',\n qq'>x',\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #55)
100     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'html',\n qq'foo',\n qq'',\n 0\n ],\n [\n qq'Character',\n qq'x'\n ]\n ];\n" (DOCTYPE with > in single-quoted systemId: qq'<!DOCTYPE html PUBLIC \x{27}foo\x{27} \x{27}>x')
101     # Line 7 is changed:
102     # - " qq'',\n"
103     # + " qq'>x',\n"
104     # Lines 9-9 are missing:
105     # - " ],\n"
106     # - " [\n"
107     # - " qq'Character',\n"
108     # - " qq'x'\n"
109 wakaba 1.9 ok 56
110     ok 57
111 wakaba 1.1 ok 58
112     ok 59
113     ok 60
114 wakaba 1.19 ok 61
115 wakaba 1.1 ok 62
116     ok 63
117     ok 64
118     ok 65
119     ok 66
120     ok 67
121     ok 68
122     ok 69
123     ok 70
124 wakaba 1.34 ok 71
125     ok 72
126 wakaba 1.1 ok 73
127     ok 74
128 wakaba 1.21 ok 75
129     ok 76
130 wakaba 1.1 ok 77
131 wakaba 1.96 # t/tokenizer/test3.test
132 wakaba 1.1 ok 78
133     ok 79
134     ok 80
135 wakaba 1.34 ok 81
136 wakaba 1.15 ok 82
137 wakaba 1.1 ok 83
138     ok 84
139 wakaba 1.25 ok 85
140     ok 86
141 wakaba 1.34 ok 87
142 wakaba 1.1 ok 88
143     ok 89
144     ok 90
145     ok 91
146     ok 92
147     ok 93
148     ok 94
149 wakaba 1.8 ok 95
150     ok 96
151     ok 97
152     ok 98
153     ok 99
154     ok 100
155 wakaba 1.96 ok 101
156     ok 102
157     ok 103
158     ok 104
159     not ok 105
160     # Test 105 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #105)
161 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype >: qq'<!doctype >')
162 wakaba 1.43 # Line 5 is changed:
163     # - " qq'',\n"
164     # + " undef,\n"
165 wakaba 1.96 not ok 106
166     # Test 106 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #106)
167 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype : qq'<!doctype ')
168 wakaba 1.43 # Line 5 is changed:
169     # - " qq'',\n"
170     # + " undef,\n"
171 wakaba 1.8 ok 107
172     ok 108
173     ok 109
174     ok 110
175     ok 111
176     ok 112
177     ok 113
178 wakaba 1.10 ok 114
179     ok 115
180     ok 116
181     ok 117
182     ok 118
183     ok 119
184     ok 120
185     ok 121
186 wakaba 1.39 ok 122
187 wakaba 1.18 ok 123
188     ok 124
189     ok 125
190     ok 126
191 wakaba 1.20 ok 127
192     ok 128
193     ok 129
194     ok 130
195     ok 131
196     ok 132
197     ok 133
198     ok 134
199     ok 135
200     ok 136
201 wakaba 1.21 ok 137
202     ok 138
203 wakaba 1.20 ok 139
204     ok 140
205     ok 141
206 wakaba 1.28 ok 142
207 wakaba 1.20 ok 143
208     ok 144
209     ok 145
210     ok 146
211 wakaba 1.22 ok 147
212     ok 148
213     ok 149
214     ok 150
215     ok 151
216     ok 152
217     ok 153
218     ok 154
219     ok 155
220     ok 156
221 wakaba 1.28 ok 157
222     ok 158
223     ok 159
224     ok 160
225     ok 161
226     ok 162
227     ok 163
228     ok 164
229     ok 165
230     ok 166
231     ok 167
232     ok 168
233 wakaba 1.96 # t/tokenizer/test4.test
234 wakaba 1.28 ok 169
235     ok 170
236     ok 171
237     ok 172
238     ok 173
239     ok 174
240     ok 175
241     ok 176
242     ok 177
243     ok 178
244 wakaba 1.33 ok 179
245 wakaba 1.34 ok 180
246 wakaba 1.38 ok 181
247     ok 182
248 wakaba 1.43 ok 183
249     ok 184
250     ok 185
251     ok 186
252     ok 187
253     ok 188
254     ok 189
255     ok 190
256     ok 191
257     ok 192
258     ok 193
259     ok 194
260     ok 195
261     ok 196
262     ok 197
263 wakaba 1.96 ok 198
264     ok 199
265     ok 200
266     ok 201
267     not ok 202
268     # Test 202 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (t/HTML-tokenizer.t at line 158 fail #202)
269 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (U+0000 in lookahead region after non-matching character: qq'<!doc>\x{00}')
270 wakaba 1.43 # Got 1 extra line at line 3:
271     # + " qq'ParseError',\n"
272     # Line 8 is missing:
273     # - " qq'ParseError',\n"
274     ok 203
275     ok 204
276     ok 205
277     ok 206
278     ok 207
279     ok 208
280     ok 209
281     ok 210
282     ok 211
283     ok 212
284     ok 213
285     ok 214
286     ok 215
287     ok 216
288 wakaba 1.96 # t/tokenizer/contentModelFlags.test
289 wakaba 1.43 ok 217
290     ok 218
291     ok 219
292     ok 220
293     ok 221
294     ok 222
295     ok 223
296     ok 224
297     ok 225
298     ok 226
299     ok 227
300     ok 228
301     ok 229
302 wakaba 1.96 # t/tokenizer/escapeFlag.test
303 wakaba 1.43 ok 230
304     ok 231
305     ok 232
306     ok 233
307     ok 234
308     ok 235
309 wakaba 1.96 # t/tokenizer-test-1.test
310 wakaba 1.43 ok 236
311     ok 237
312     ok 238
313     ok 239
314     ok 240
315     ok 241
316     ok 242
317     ok 243
318     ok 244
319     ok 245
320     ok 246
321     ok 247
322     ok 248
323     ok 249
324     ok 250
325     ok 251
326     ok 252
327     ok 253
328     ok 254
329     ok 255
330     ok 256
331     ok 257
332     ok 258
333     ok 259
334     ok 260
335     ok 261
336     ok 262
337     ok 263
338     ok 264
339     ok 265
340     ok 266
341     ok 267
342     ok 268
343     ok 269
344     ok 270
345     ok 271
346     ok 272
347     ok 273
348     ok 274
349     ok 275
350     ok 276
351     ok 277
352     ok 278
353     ok 279
354     ok 280
355     ok 281
356     ok 282
357     ok 283
358     ok 284
359     ok 285
360     ok 286
361     ok 287
362     ok 288
363     ok 289
364     ok 290
365     ok 291
366     ok 292
367     ok 293
368     ok 294
369     ok 295
370     ok 296
371     ok 297
372     ok 298
373     ok 299
374     ok 300
375     ok 301
376     ok 302
377     ok 303
378     ok 304
379     ok 305
380     ok 306
381     ok 307
382     ok 308
383     ok 309
384     ok 310
385     ok 311
386     ok 312
387     ok 313
388     ok 314
389     ok 315
390     ok 316
391     ok 317
392     ok 318
393     ok 319
394     ok 320
395     ok 321
396     ok 322
397     ok 323
398     ok 324
399     ok 325
400     ok 326
401     ok 327
402     ok 328
403     ok 329
404     ok 330
405     ok 331
406     ok 332
407     ok 333
408     ok 334
409     ok 335
410     ok 336
411     ok 337
412 wakaba 1.59 ok 338
413     ok 339
414     ok 340
415     ok 341
416     ok 342
417     ok 343
418     ok 344
419     ok 345
420     ok 346
421     ok 347
422 wakaba 1.62 ok 348
423     ok 349
424     ok 350
425     ok 351
426     ok 352
427     ok 353
428     ok 354
429     ok 355
430     ok 356
431     ok 357
432     ok 358
433     ok 359
434 wakaba 1.96 ok 360
435     ok 361
436     ok 362
437     ok 363

admin@suikawiki.org
ViewVC Help
Powered by ViewVC 1.1.24