/[suikacvs]/markup/html/whatpm/t/tokenizer-result.txt
Suika

Contents of /markup/html/whatpm/t/tokenizer-result.txt

Parent Directory Parent Directory | Revision Log Revision Log


Revision 1.304 - (hide annotations) (download)
Sat Sep 5 10:02:01 2009 UTC (15 years, 10 months ago) by wakaba
Branch: MAIN
Changes since 1.303: +86 -9 lines
File MIME type: text/plain
updated

1 wakaba 1.287 1..1129
2 wakaba 1.273 # Running under perl version 5.010000 for linux
3 wakaba 1.304 # Current time local: Sat Sep 5 19:01:52 2009
4     # Current time GMT: Sat Sep 5 10:01:52 2009
5 wakaba 1.1 # Using Test.pm version 1.25
6 wakaba 1.11 # t/tokenizer/test1.test
7 wakaba 1.20 ok 1
8 wakaba 1.298 not ok 2
9     # Test 2 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n undef,\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #2)
10     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HTML',\n undef,\n undef,\n 1\n ]\n ];\n" (Correct Doctype uppercase: qq'<!DOCTYPE HTML>')
11     # Line 4 is changed:
12     # - " qq'HTML',\n"
13     # + " qq'html',\n"
14     # t/HTML-tokenizer.t line 205 is: ok $parser_dump, $expected_dump,
15     not ok 3
16     # Test 3 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n undef,\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #3)
17     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HtMl',\n undef,\n undef,\n 1\n ]\n ];\n" (Correct Doctype mixed case: qq'<!DOCTYPE HtMl>')
18     # Line 4 is changed:
19     # - " qq'HtMl',\n"
20     # + " qq'html',\n"
21 wakaba 1.1 ok 4
22 wakaba 1.20 ok 5
23 wakaba 1.1 ok 6
24     ok 7
25     ok 8
26     ok 9
27     ok 10
28     ok 11
29     ok 12
30     ok 13
31     ok 14
32 wakaba 1.130 ok 15
33 wakaba 1.1 ok 16
34     ok 17
35     ok 18
36 wakaba 1.296 not ok 19
37     # Test 19 got: "$VAR1 = [\n [\n qq'Comment',\n qq' --comment '\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #19)
38     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq' --comment '\n ]\n ];\n" (Comment, two central dashes: qq'<!-- --comment -->')
39     # Line 2 is missing:
40     # - " qq'ParseError',\n"
41 wakaba 1.1 ok 20
42     ok 21
43 wakaba 1.25 ok 22
44     ok 23
45 wakaba 1.1 ok 24
46 wakaba 1.22 ok 25
47     ok 26
48     ok 27
49 wakaba 1.1 ok 28
50     ok 29
51     ok 30
52     ok 31
53     ok 32
54     ok 33
55 wakaba 1.18 ok 34
56 wakaba 1.1 ok 35
57     ok 36
58     ok 37
59 wakaba 1.8 ok 38
60 wakaba 1.28 ok 39
61     ok 40
62 wakaba 1.43 ok 41
63     ok 42
64 wakaba 1.286 ok 43
65 wakaba 1.11 # t/tokenizer/test2.test
66 wakaba 1.286 not ok 44
67     # Test 44 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #44)
68 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (DOCTYPE without name: qq'<!DOCTYPE>')
69 wakaba 1.20 # Line 6 is changed:
70 wakaba 1.8 # - " qq'',\n"
71 wakaba 1.20 # + " undef,\n"
72     ok 45
73     ok 46
74     ok 47
75     ok 48
76     ok 49
77     ok 50
78     ok 51
79 wakaba 1.97 ok 52
80     ok 53
81     ok 54
82     ok 55
83 wakaba 1.9 ok 56
84     ok 57
85 wakaba 1.1 ok 58
86     ok 59
87     ok 60
88 wakaba 1.19 ok 61
89 wakaba 1.1 ok 62
90     ok 63
91 wakaba 1.130 ok 64
92 wakaba 1.1 ok 65
93 wakaba 1.240 ok 66
94     ok 67
95     ok 68
96 wakaba 1.1 ok 69
97     ok 70
98 wakaba 1.34 ok 71
99     ok 72
100 wakaba 1.1 ok 73
101     ok 74
102 wakaba 1.21 ok 75
103     ok 76
104 wakaba 1.1 ok 77
105 wakaba 1.141 ok 78
106 wakaba 1.1 ok 79
107 wakaba 1.304 not ok 80
108     # Test 80 got: "$VAR1 = [];\n" (t/HTML-tokenizer.t at line 205 fail #80)
109     # Expected: "$VAR1 = [\n [\n qq'StartTag',\n qq'h',\n {}\n ]\n ];\n" (Start tag with no attributes but space before the greater-than sign: qq'<h >')
110     # Line 1 is changed:
111     # - "$VAR1 = [\n"
112     # + "$VAR1 = [];\n"
113     # Lines 2-2 are missing:
114     # - " [\n"
115     # - " qq'StartTag',\n"
116     # - " qq'h',\n"
117     # - " {}\n"
118     # - " ]\n"
119     # - " ];\n"
120 wakaba 1.34 ok 81
121 wakaba 1.286 # t/tokenizer/test3.test
122 wakaba 1.15 ok 82
123 wakaba 1.1 ok 83
124     ok 84
125 wakaba 1.25 ok 85
126     ok 86
127 wakaba 1.34 ok 87
128 wakaba 1.1 ok 88
129     ok 89
130     ok 90
131     ok 91
132 wakaba 1.296 not ok 92
133     # Test 92 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'--.'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #92)
134     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Comment',\n qq'--.'\n ]\n ];\n" (<!----.: qq'<!----.')
135     # Line 3 is missing:
136     # - " qq'ParseError',\n"
137 wakaba 1.1 ok 93
138     ok 94
139 wakaba 1.8 ok 95
140     ok 96
141     ok 97
142     ok 98
143     ok 99
144     ok 100
145 wakaba 1.96 ok 101
146     ok 102
147     ok 103
148     ok 104
149 wakaba 1.141 ok 105
150 wakaba 1.286 ok 106
151     ok 107
152     ok 108
153     not ok 109
154     # Test 109 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #109)
155 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype >: qq'<!doctype >')
156 wakaba 1.43 # Line 5 is changed:
157     # - " qq'',\n"
158     # + " undef,\n"
159 wakaba 1.286 not ok 110
160     # Test 110 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #110)
161 wakaba 1.47 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype : qq'<!doctype ')
162 wakaba 1.43 # Line 5 is changed:
163     # - " qq'',\n"
164     # + " undef,\n"
165 wakaba 1.8 ok 111
166     ok 112
167     ok 113
168 wakaba 1.10 ok 114
169 wakaba 1.287 ok 115
170 wakaba 1.10 ok 116
171     ok 117
172     ok 118
173 wakaba 1.287 ok 119
174 wakaba 1.10 ok 120
175     ok 121
176 wakaba 1.39 ok 122
177 wakaba 1.18 ok 123
178 wakaba 1.287 ok 124
179 wakaba 1.18 ok 125
180     ok 126
181 wakaba 1.20 ok 127
182 wakaba 1.240 ok 128
183 wakaba 1.20 ok 129
184 wakaba 1.287 ok 130
185 wakaba 1.240 ok 131
186 wakaba 1.20 ok 132
187     ok 133
188     ok 134
189 wakaba 1.287 ok 135
190 wakaba 1.20 ok 136
191 wakaba 1.303 not ok 137
192     # Test 137 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #137)
193     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'z'\n ]\n ];\n" (</z: qq'</z')
194     # Line 2 is changed:
195     # - " qq'ParseError',\n"
196     # + " qq'ParseError'\n"
197     # Lines 3-3 are missing:
198     # - " [\n"
199     # - " qq'EndTag',\n"
200     # - " qq'z'\n"
201     # - " ]\n"
202 wakaba 1.21 ok 138
203 wakaba 1.239 ok 139
204 wakaba 1.20 ok 140
205     ok 141
206 wakaba 1.28 ok 142
207 wakaba 1.303 not ok 143
208     # Test 143 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #143)
209     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {}\n ]\n ];\n" (<z: qq'<z')
210     # Line 2 is changed:
211     # - " qq'ParseError',\n"
212     # + " qq'ParseError'\n"
213     # Lines 3-3 are missing:
214     # - " [\n"
215     # - " qq'StartTag',\n"
216     # - " qq'z',\n"
217     # - " {}\n"
218     # - " ]\n"
219     not ok 144
220     # Test 144 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #144)
221     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'z'\n ]\n ];\n" (</z: qq'</z')
222     # Line 2 is changed:
223     # - " qq'ParseError',\n"
224     # + " qq'ParseError'\n"
225     # Lines 3-3 are missing:
226     # - " [\n"
227     # - " qq'EndTag',\n"
228     # - " qq'z'\n"
229     # - " ]\n"
230     not ok 145
231     # Test 145 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #145)
232     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z0',\n {}\n ]\n ];\n" (<z0: qq'<z0')
233     # Line 2 is changed:
234     # - " qq'ParseError',\n"
235     # + " qq'ParseError'\n"
236     # Lines 3-3 are missing:
237     # - " [\n"
238     # - " qq'StartTag',\n"
239     # - " qq'z0',\n"
240     # - " {}\n"
241     # - " ]\n"
242 wakaba 1.286 not ok 146
243     # Test 146 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #146)
244 wakaba 1.247 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq''\n }\n ]\n ];\n" (<z/0=>: qq'<z/0=>')
245     # Got 1 extra line at line 3:
246     # + " qq'ParseError',\n"
247 wakaba 1.130 ok 147
248 wakaba 1.239 ok 148
249 wakaba 1.22 ok 149
250     ok 150
251 wakaba 1.130 ok 151
252 wakaba 1.239 ok 152
253 wakaba 1.22 ok 153
254     ok 154
255     ok 155
256     ok 156
257 wakaba 1.28 ok 157
258     ok 158
259 wakaba 1.239 ok 159
260     ok 160
261 wakaba 1.28 ok 161
262     ok 162
263     ok 163
264     ok 164
265     ok 165
266     ok 166
267     ok 167
268     ok 168
269 wakaba 1.141 ok 169
270 wakaba 1.28 ok 170
271 wakaba 1.303 not ok 171
272     # Test 171 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #171)
273     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'zz',\n {}\n ]\n ];\n" (<zz: qq'<zz')
274     # Line 2 is changed:
275     # - " qq'ParseError',\n"
276     # + " qq'ParseError'\n"
277     # Lines 3-3 are missing:
278     # - " [\n"
279     # - " qq'StartTag',\n"
280     # - " qq'zz',\n"
281     # - " {}\n"
282     # - " ]\n"
283 wakaba 1.28 ok 172
284 wakaba 1.286 # t/tokenizer/test4.test
285 wakaba 1.299 not ok 173
286     # Test 173 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq'',\n qq'<' => qq''\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #173)
287     # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n 0 => qq'',\n qq'<' => qq''\n }\n ]\n ];\n" (< in attribute name: qq'<z/0 <')
288     # Got 1 extra line at line 4:
289     # + " qq'ParseError',\n"
290 wakaba 1.293 not ok 174
291     # Test 174 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'x' => qq'<'\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #174)
292     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'x' => qq'<'\n }\n ]\n ];\n" (< in attribute value: qq'<z x=<')
293     # Got 1 extra line at line 3:
294     # + " qq'ParseError',\n"
295 wakaba 1.286 ok 175
296     ok 176
297     not ok 177
298     # Test 177 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'=' => qq''\n }\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #177)
299 wakaba 1.247 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'z',\n {\n qq'=' => qq''\n }\n ]\n ];\n" (== attribute: qq'<z ==>')
300     # Got 1 extra line at line 3:
301     # + " qq'ParseError',\n"
302 wakaba 1.28 ok 178
303 wakaba 1.33 ok 179
304 wakaba 1.34 ok 180
305 wakaba 1.38 ok 181
306     ok 182
307 wakaba 1.43 ok 183
308     ok 184
309     ok 185
310     ok 186
311     ok 187
312     ok 188
313 wakaba 1.240 ok 189
314     ok 190
315 wakaba 1.43 ok 191
316     ok 192
317     ok 193
318     ok 194
319     ok 195
320     ok 196
321     ok 197
322 wakaba 1.96 ok 198
323     ok 199
324 wakaba 1.286 ok 200
325 wakaba 1.96 ok 201
326 wakaba 1.130 ok 202
327 wakaba 1.43 ok 203
328     ok 204
329     ok 205
330     ok 206
331     ok 207
332     ok 208
333     ok 209
334     ok 210
335     ok 211
336     ok 212
337     ok 213
338     ok 214
339 wakaba 1.240 ok 215
340     ok 216
341 wakaba 1.43 ok 217
342     ok 218
343     ok 219
344     ok 220
345 wakaba 1.141 ok 221
346 wakaba 1.286 ok 222
347 wakaba 1.298 not ok 223
348     # Test 223 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n qq'AbC',\n qq'XyZ',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #223)
349     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HtMl',\n qq'AbC',\n qq'XyZ',\n 1\n ]\n ];\n" (Doctype public case-sensitivity (1): qq'<!DoCtYpE HtMl PuBlIc "AbC" "XyZ">')
350     # Line 4 is changed:
351     # - " qq'HtMl',\n"
352     # + " qq'html',\n"
353     not ok 224
354     # Test 224 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n qq'aBc',\n qq'xYz',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #224)
355     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'hTmL',\n qq'aBc',\n qq'xYz',\n 1\n ]\n ];\n" (Doctype public case-sensitivity (2): qq'<!dOcTyPe hTmL pUbLiC "aBc" "xYz">')
356     # Line 4 is changed:
357     # - " qq'hTmL',\n"
358     # + " qq'html',\n"
359     not ok 225
360     # Test 225 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n qq'XyZ',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #225)
361     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'HtMl',\n undef,\n qq'XyZ',\n 1\n ]\n ];\n" (Doctype system case-sensitivity (1): qq'<!DoCtYpE HtMl SyStEm "XyZ">')
362     # Line 4 is changed:
363     # - " qq'HtMl',\n"
364     # + " qq'html',\n"
365     not ok 226
366     # Test 226 got: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'html',\n undef,\n qq'xYz',\n 1\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #226)
367     # Expected: "$VAR1 = [\n [\n qq'DOCTYPE',\n qq'hTmL',\n undef,\n qq'xYz',\n 1\n ]\n ];\n" (Doctype system case-sensitivity (2): qq'<!dOcTyPe hTmL sYsTeM "xYz">')
368     # Line 4 is changed:
369     # - " qq'hTmL',\n"
370     # + " qq'html',\n"
371 wakaba 1.286 not ok 227
372     # Test 227 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #227)
373 wakaba 1.130 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (U+0000 in lookahead region after non-matching character: qq'<!doc>\x{00}')
374     # Got 1 extra line at line 3:
375     # + " qq'ParseError',\n"
376     # Line 8 is missing:
377     # - " qq'ParseError',\n"
378 wakaba 1.43 ok 228
379     ok 229
380     ok 230
381     ok 231
382     ok 232
383     ok 233
384     ok 234
385     ok 235
386 wakaba 1.141 ok 236
387 wakaba 1.43 ok 237
388     ok 238
389     ok 239
390     ok 240
391     ok 241
392 wakaba 1.287 ok 242
393 wakaba 1.43 ok 243
394 wakaba 1.287 ok 244
395 wakaba 1.286 # t/tokenizer/contentModelFlags.test
396 wakaba 1.43 ok 245
397     ok 246
398     ok 247
399     ok 248
400 wakaba 1.141 ok 249
401 wakaba 1.43 ok 250
402     ok 251
403     ok 252
404     ok 253
405     ok 254
406     ok 255
407 wakaba 1.141 ok 256
408 wakaba 1.43 ok 257
409 wakaba 1.286 # t/tokenizer/escapeFlag.test
410 wakaba 1.43 ok 258
411     ok 259
412     ok 260
413     ok 261
414     ok 262
415 wakaba 1.206 ok 263
416 wakaba 1.43 ok 264
417     ok 265
418     ok 266
419 wakaba 1.286 # t/tokenizer/entities.test
420 wakaba 1.43 ok 267
421     ok 268
422     ok 269
423     ok 270
424     ok 271
425     ok 272
426     ok 273
427     ok 274
428     ok 275
429     ok 276
430     ok 277
431     ok 278
432     ok 279
433     ok 280
434     ok 281
435     ok 282
436     ok 283
437     ok 284
438     ok 285
439     ok 286
440     ok 287
441     ok 288
442     ok 289
443     ok 290
444     ok 291
445     ok 292
446     ok 293
447     ok 294
448     ok 295
449     ok 296
450     ok 297
451     ok 298
452     ok 299
453     ok 300
454     ok 301
455     ok 302
456     ok 303
457     ok 304
458     ok 305
459     ok 306
460     ok 307
461     ok 308
462     ok 309
463     ok 310
464     ok 311
465     ok 312
466     ok 313
467     ok 314
468     ok 315
469     ok 316
470     ok 317
471     ok 318
472     ok 319
473     ok 320
474     ok 321
475     ok 322
476     ok 323
477     ok 324
478     ok 325
479     ok 326
480     ok 327
481     ok 328
482     ok 329
483     ok 330
484     ok 331
485     ok 332
486     ok 333
487     ok 334
488     ok 335
489     ok 336
490     ok 337
491 wakaba 1.59 ok 338
492     ok 339
493     ok 340
494     ok 341
495     ok 342
496     ok 343
497     ok 344
498     ok 345
499     ok 346
500     ok 347
501 wakaba 1.62 ok 348
502     ok 349
503     ok 350
504     ok 351
505     ok 352
506     ok 353
507     ok 354
508     ok 355
509     ok 356
510     ok 357
511     ok 358
512     ok 359
513 wakaba 1.96 ok 360
514     ok 361
515     ok 362
516     ok 363
517 wakaba 1.129 ok 364
518     ok 365
519     ok 366
520     ok 367
521     ok 368
522     ok 369
523     ok 370
524     ok 371
525     ok 372
526     ok 373
527     ok 374
528     ok 375
529     ok 376
530     ok 377
531     ok 378
532     ok 379
533     ok 380
534     ok 381
535     ok 382
536     ok 383
537     ok 384
538     ok 385
539     ok 386
540     ok 387
541     ok 388
542     ok 389
543     ok 390
544     ok 391
545     ok 392
546     ok 393
547     ok 394
548     ok 395
549     ok 396
550 wakaba 1.130 ok 397
551     ok 398
552     ok 399
553     ok 400
554     ok 401
555     ok 402
556     ok 403
557     ok 404
558     ok 405
559     ok 406
560     ok 407
561     ok 408
562     ok 409
563     ok 410
564     ok 411
565     ok 412
566     ok 413
567     ok 414
568     ok 415
569     ok 416
570 wakaba 1.132 ok 417
571     ok 418
572     ok 419
573     ok 420
574 wakaba 1.136 ok 421
575     ok 422
576     ok 423
577     ok 424
578     ok 425
579     ok 426
580     ok 427
581     ok 428
582     ok 429
583     ok 430
584     ok 431
585     ok 432
586     ok 433
587     ok 434
588 wakaba 1.205 ok 435
589 wakaba 1.136 ok 436
590     ok 437
591     ok 438
592 wakaba 1.205 ok 439
593 wakaba 1.136 ok 440
594     ok 441
595     ok 442
596 wakaba 1.205 ok 443
597 wakaba 1.136 ok 444
598     ok 445
599 wakaba 1.205 ok 446
600 wakaba 1.136 ok 447
601     ok 448
602     ok 449
603     ok 450
604     ok 451
605     ok 452
606     ok 453
607     ok 454
608     ok 455
609     ok 456
610     ok 457
611     ok 458
612     ok 459
613     ok 460
614     ok 461
615     ok 462
616     ok 463
617     ok 464
618     ok 465
619     ok 466
620     ok 467
621     ok 468
622     ok 469
623     ok 470
624     ok 471
625 wakaba 1.141 ok 472
626 wakaba 1.195 ok 473
627     ok 474
628     ok 475
629     ok 476
630     ok 477
631 wakaba 1.205 ok 478
632     ok 479
633     ok 480
634     ok 481
635     ok 482
636     ok 483
637     ok 484
638     ok 485
639     ok 486
640     ok 487
641     ok 488
642     ok 489
643     ok 490
644     ok 491
645     ok 492
646     ok 493
647     ok 494
648     ok 495
649     ok 496
650     ok 497
651     ok 498
652     ok 499
653     ok 500
654     ok 501
655     ok 502
656     ok 503
657     ok 504
658     ok 505
659     ok 506
660     ok 507
661     ok 508
662     ok 509
663     ok 510
664     ok 511
665     ok 512
666     ok 513
667     ok 514
668     ok 515
669     ok 516
670     ok 517
671     ok 518
672     ok 519
673     ok 520
674     ok 521
675     ok 522
676     ok 523
677     ok 524
678     ok 525
679     ok 526
680     ok 527
681     ok 528
682     ok 529
683     ok 530
684     ok 531
685     ok 532
686     ok 533
687     ok 534
688     ok 535
689     ok 536
690     ok 537
691     ok 538
692     ok 539
693 wakaba 1.210 ok 540
694 wakaba 1.205 ok 541
695     ok 542
696     ok 543
697     ok 544
698     ok 545
699     ok 546
700     ok 547
701     ok 548
702     ok 549
703     ok 550
704     ok 551
705     ok 552
706     ok 553
707     ok 554
708     ok 555
709     ok 556
710     ok 557
711     ok 558
712     ok 559
713     ok 560
714     ok 561
715     ok 562
716     ok 563
717     ok 564
718     ok 565
719     ok 566
720     ok 567
721     ok 568
722     ok 569
723     ok 570
724     ok 571
725     ok 572
726     ok 573
727     ok 574
728     ok 575
729     ok 576
730     ok 577
731     ok 578
732     ok 579
733     ok 580
734     ok 581
735     ok 582
736     ok 583
737     ok 584
738     ok 585
739     ok 586
740     ok 587
741     ok 588
742     ok 589
743     ok 590
744     ok 591
745     ok 592
746     ok 593
747     ok 594
748     ok 595
749     ok 596
750     ok 597
751     ok 598
752     ok 599
753     ok 600
754     ok 601
755     ok 602
756     ok 603
757     ok 604
758     ok 605
759     ok 606
760     ok 607
761     ok 608
762     ok 609
763     ok 610
764     ok 611
765     ok 612
766     ok 613
767     ok 614
768     ok 615
769     ok 616
770     ok 617
771     ok 618
772     ok 619
773     ok 620
774     ok 621
775     ok 622
776     ok 623
777     ok 624
778     ok 625
779     ok 626
780     ok 627
781     ok 628
782     ok 629
783     ok 630
784     ok 631
785     ok 632
786     ok 633
787     ok 634
788     ok 635
789     ok 636
790     ok 637
791     ok 638
792     ok 639
793     ok 640
794     ok 641
795     ok 642
796     ok 643
797     ok 644
798     ok 645
799     ok 646
800     ok 647
801     ok 648
802     ok 649
803     ok 650
804     ok 651
805     ok 652
806     ok 653
807     ok 654
808     ok 655
809     ok 656
810     ok 657
811     ok 658
812     ok 659
813     ok 660
814     ok 661
815     ok 662
816     ok 663
817     ok 664
818     ok 665
819     ok 666
820     ok 667
821     ok 668
822     ok 669
823     ok 670
824     ok 671
825     ok 672
826     ok 673
827     ok 674
828     ok 675
829     ok 676
830     ok 677
831     ok 678
832     ok 679
833     ok 680
834     ok 681
835     ok 682
836     ok 683
837     ok 684
838     ok 685
839     ok 686
840     ok 687
841     ok 688
842     ok 689
843     ok 690
844     ok 691
845     ok 692
846     ok 693
847     ok 694
848     ok 695
849     ok 696
850     ok 697
851     ok 698
852     ok 699
853     ok 700
854     ok 701
855     ok 702
856     ok 703
857     ok 704
858     ok 705
859     ok 706
860     ok 707
861     ok 708
862     ok 709
863     ok 710
864     ok 711
865     ok 712
866     ok 713
867     ok 714
868     ok 715
869     ok 716
870     ok 717
871     ok 718
872     ok 719
873     ok 720
874     ok 721
875     ok 722
876     ok 723
877     ok 724
878     ok 725
879     ok 726
880     ok 727
881     ok 728
882     ok 729
883     ok 730
884     ok 731
885     ok 732
886     ok 733
887     ok 734
888     ok 735
889     ok 736
890     ok 737
891     ok 738
892     ok 739
893     ok 740
894     ok 741
895     ok 742
896     ok 743
897     ok 744
898     ok 745
899     ok 746
900     ok 747
901     ok 748
902     ok 749
903     ok 750
904     ok 751
905     ok 752
906     ok 753
907     ok 754
908     ok 755
909     ok 756
910     ok 757
911     ok 758
912     ok 759
913     ok 760
914     ok 761
915     ok 762
916     ok 763
917     ok 764
918     ok 765
919     ok 766
920     ok 767
921     ok 768
922     ok 769
923     ok 770
924     ok 771
925     ok 772
926     ok 773
927     ok 774
928     ok 775
929     ok 776
930     ok 777
931     ok 778
932     ok 779
933     ok 780
934     ok 781
935     ok 782
936     ok 783
937     ok 784
938     ok 785
939     ok 786
940     ok 787
941     ok 788
942     ok 789
943     ok 790
944     ok 791
945     ok 792
946     ok 793
947     ok 794
948     ok 795
949     ok 796
950     ok 797
951     ok 798
952     ok 799
953     ok 800
954     ok 801
955     ok 802
956     ok 803
957     ok 804
958     ok 805
959     ok 806
960     ok 807
961     ok 808
962     ok 809
963     ok 810
964     ok 811
965     ok 812
966     ok 813
967     ok 814
968     ok 815
969     ok 816
970     ok 817
971     ok 818
972     ok 819
973     ok 820
974     ok 821
975     ok 822
976     ok 823
977     ok 824
978     ok 825
979     ok 826
980     ok 827
981     ok 828
982     ok 829
983     ok 830
984     ok 831
985     ok 832
986     ok 833
987     ok 834
988     ok 835
989     ok 836
990     ok 837
991     ok 838
992     ok 839
993     ok 840
994     ok 841
995     ok 842
996     ok 843
997     ok 844
998     ok 845
999 wakaba 1.286 ok 846
1000     ok 847
1001     ok 848
1002     ok 849
1003     ok 850
1004 wakaba 1.205 # t/tokenizer/xmlViolation.test
1005 wakaba 1.286 not ok 851
1006     # Test 851 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'Character',\n qq'a\\x{FFFF}b'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #851)
1007 wakaba 1.206 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Character',\n qq'a\\x{FFFD}b'\n ]\n ];\n" (Non-XML character: qq'a\x{FFFF}b')
1008     # Line 5 is changed:
1009     # - " qq'a\\x{FFFD}b'\n"
1010     # + " qq'a\\x{FFFF}b'\n"
1011 wakaba 1.286 not ok 852
1012     # Test 852 got: "$VAR1 = [\n [\n qq'Character',\n qq'a\\x{0C}b'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #852)
1013 wakaba 1.206 # Expected: "$VAR1 = [\n [\n qq'Character',\n qq'a b'\n ]\n ];\n" (Non-XML space: qq'a\x{0C}b')
1014     # Line 4 is changed:
1015     # - " qq'a b'\n"
1016     # + " qq'a\\x{0C}b'\n"
1017 wakaba 1.286 not ok 853
1018 wakaba 1.302 # Test 853 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq' foo -- bar '\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #853)
1019 wakaba 1.206 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq' foo - - bar '\n ]\n ];\n" (Double hyphen in comment: qq'<!-- foo -- bar -->')
1020 wakaba 1.302 # Line 5 is changed:
1021 wakaba 1.206 # - " qq' foo - - bar '\n"
1022     # + " qq' foo -- bar '\n"
1023 wakaba 1.286 ok 854
1024 wakaba 1.205 # t/tokenizer-test-1.test
1025     ok 855
1026     ok 856
1027     ok 857
1028     ok 858
1029     ok 859
1030     ok 860
1031     ok 861
1032     ok 862
1033     ok 863
1034     ok 864
1035     ok 865
1036     ok 866
1037     ok 867
1038     ok 868
1039     ok 869
1040     ok 870
1041     ok 871
1042     ok 872
1043     ok 873
1044     ok 874
1045     ok 875
1046     ok 876
1047     ok 877
1048     ok 878
1049     ok 879
1050     ok 880
1051     ok 881
1052     ok 882
1053     ok 883
1054     ok 884
1055     ok 885
1056     ok 886
1057     ok 887
1058     ok 888
1059     ok 889
1060     ok 890
1061     ok 891
1062     ok 892
1063     ok 893
1064     ok 894
1065     ok 895
1066     ok 896
1067     ok 897
1068     ok 898
1069     ok 899
1070     ok 900
1071     ok 901
1072     ok 902
1073     ok 903
1074     ok 904
1075     ok 905
1076     ok 906
1077     ok 907
1078     ok 908
1079     ok 909
1080     ok 910
1081     ok 911
1082     ok 912
1083     ok 913
1084     ok 914
1085     ok 915
1086     ok 916
1087     ok 917
1088     ok 918
1089     ok 919
1090     ok 920
1091     ok 921
1092     ok 922
1093     ok 923
1094     ok 924
1095     ok 925
1096 wakaba 1.298 ok 926
1097     ok 927
1098     not ok 928
1099     # Test 928 got: "$VAR1 = [\n [\n qq'Comment',\n qq'--x'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #928)
1100 wakaba 1.296 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'--x'\n ]\n ];\n" (<!----x-->: qq'<!----x-->')
1101     # Line 2 is missing:
1102     # - " qq'ParseError',\n"
1103 wakaba 1.205 ok 929
1104     ok 930
1105     ok 931
1106     ok 932
1107     ok 933
1108     ok 934
1109     ok 935
1110     ok 936
1111     ok 937
1112 wakaba 1.281 ok 938
1113     ok 939
1114     ok 940
1115     ok 941
1116     ok 942
1117     ok 943
1118     ok 944
1119     ok 945
1120 wakaba 1.285 ok 946
1121 wakaba 1.205 ok 947
1122     ok 948
1123     ok 949
1124     ok 950
1125     ok 951
1126     ok 952
1127     ok 953
1128     ok 954
1129     ok 955
1130     ok 956
1131     ok 957
1132     ok 958
1133     ok 959
1134     ok 960
1135     ok 961
1136     ok 962
1137 wakaba 1.286 ok 963
1138     ok 964
1139 wakaba 1.290 ok 965
1140     ok 966
1141     ok 967
1142 wakaba 1.298 ok 968
1143     ok 969
1144     not ok 970
1145     # Test 970 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}\\x{DFFF}'\n ]\n ];\n" (t/HTML-tokenizer.t at line 205 fail #970)
1146 wakaba 1.285 # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}'\n ],\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{DFFF}'\n ]\n ];\n" (surrogate character reference: qq'&#xD800;\x{DFFF}')
1147     # Lines 3-3 are missing:
1148     # - " [\n"
1149     # - " qq'Character',\n"
1150     # - " qq'\\x{FFFD}'\n"
1151     # - " ],\n"
1152     # Line 6 is changed:
1153     # - " qq'\\x{DFFF}'\n"
1154     # + " qq'\\x{FFFD}\\x{DFFF}'\n"
1155 wakaba 1.205 ok 971
1156     ok 972
1157     ok 973
1158     ok 974
1159     ok 975
1160     ok 976
1161     ok 977
1162     ok 978
1163     ok 979
1164     ok 980
1165     ok 981
1166     ok 982
1167     ok 983
1168     ok 984
1169     ok 985
1170     ok 986
1171     ok 987
1172     ok 988
1173     ok 989
1174     ok 990
1175     ok 991
1176     ok 992
1177     ok 993
1178     ok 994
1179     ok 995
1180     ok 996
1181     ok 997
1182     ok 998
1183     ok 999
1184     ok 1000
1185     ok 1001
1186     ok 1002
1187     ok 1003
1188     ok 1004
1189     ok 1005
1190     ok 1006
1191     ok 1007
1192     ok 1008
1193     ok 1009
1194     ok 1010
1195     ok 1011
1196     ok 1012
1197     ok 1013
1198     ok 1014
1199     ok 1015
1200     ok 1016
1201     ok 1017
1202     ok 1018
1203 wakaba 1.206 ok 1019
1204     ok 1020
1205     ok 1021
1206     ok 1022
1207     ok 1023
1208     ok 1024
1209     ok 1025
1210 wakaba 1.240 ok 1026
1211 wakaba 1.206 ok 1027
1212     ok 1028
1213     ok 1029
1214 wakaba 1.240 ok 1030
1215 wakaba 1.206 ok 1031
1216     ok 1032
1217     ok 1033
1218 wakaba 1.240 ok 1034
1219 wakaba 1.206 ok 1035
1220     ok 1036
1221 wakaba 1.240 ok 1037
1222 wakaba 1.205 ok 1038
1223     ok 1039
1224 wakaba 1.298 ok 1040
1225     ok 1041
1226 wakaba 1.299 ok 1042
1227 wakaba 1.298 ok 1043
1228 wakaba 1.299 ok 1044
1229 wakaba 1.205 ok 1045
1230     ok 1046
1231     ok 1047
1232     ok 1048
1233     ok 1049
1234     ok 1050
1235     ok 1051
1236     ok 1052
1237     ok 1053
1238     ok 1054
1239     ok 1055
1240     ok 1056
1241     ok 1057
1242     ok 1058
1243     ok 1059
1244     ok 1060
1245     ok 1061
1246 wakaba 1.206 ok 1062
1247     ok 1063
1248     ok 1064
1249     ok 1065
1250     ok 1066
1251     ok 1067
1252     ok 1068
1253 wakaba 1.227 ok 1069
1254     ok 1070
1255     ok 1071
1256     ok 1072
1257     ok 1073
1258 wakaba 1.247 ok 1074
1259     ok 1075
1260     ok 1076
1261     ok 1077
1262     ok 1078
1263     ok 1079
1264     ok 1080
1265 wakaba 1.281 ok 1081
1266     ok 1082
1267     ok 1083
1268     ok 1084
1269     ok 1085
1270     ok 1086
1271     ok 1087
1272     ok 1088
1273     ok 1089
1274     ok 1090
1275     ok 1091
1276     ok 1092
1277     ok 1093
1278     ok 1094
1279     ok 1095
1280     ok 1096
1281     ok 1097
1282 wakaba 1.285 ok 1098
1283     ok 1099
1284     ok 1100
1285     ok 1101
1286     ok 1102
1287     ok 1103
1288     ok 1104
1289     ok 1105
1290 wakaba 1.304 not ok 1106
1291     # Test 1106 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1106)
1292     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'br',\n {}\n ]\n ];\n" (slash in tag after a space: qq'<br / >')
1293     # Line 2 is changed:
1294     # - " qq'ParseError',\n"
1295     # + " qq'ParseError'\n"
1296     # Lines 3-3 are missing:
1297     # - " [\n"
1298     # - " qq'StartTag',\n"
1299     # - " qq'br',\n"
1300     # - " {}\n"
1301     # - " ]\n"
1302 wakaba 1.285 ok 1107
1303     ok 1108
1304     ok 1109
1305 wakaba 1.304 not ok 1110
1306     # Test 1110 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1110)
1307     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'StartTag',\n qq'br',\n {\n qq'class' => qq''\n }\n ]\n ];\n" (slash in tag after attribute name: qq'<br class/ >')
1308     # Line 2 is changed:
1309     # - " qq'ParseError',\n"
1310     # + " qq'ParseError'\n"
1311     # Lines 3-3 are missing:
1312     # - " [\n"
1313     # - " qq'StartTag',\n"
1314     # - " qq'br',\n"
1315     # - " {\n"
1316     # - " qq'class' => qq''\n"
1317     # - " }\n"
1318     # - " ]\n"
1319 wakaba 1.285 ok 1111
1320     ok 1112
1321     ok 1113
1322     ok 1114
1323     ok 1115
1324     ok 1116
1325     ok 1117
1326     ok 1118
1327     ok 1119
1328     ok 1120
1329     ok 1121
1330     ok 1122
1331     ok 1123
1332     ok 1124
1333     ok 1125
1334     ok 1126
1335     ok 1127
1336 wakaba 1.304 not ok 1128
1337     # Test 1128 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1128)
1338     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (end tag attribute (before attribute, >): qq'</p class="" >')
1339     # Line 2 is changed:
1340     # - " qq'ParseError',\n"
1341     # + " qq'ParseError'\n"
1342     # Lines 3-3 are missing:
1343     # - " [\n"
1344     # - " qq'EndTag',\n"
1345     # - " qq'p'\n"
1346     # - " ]\n"
1347 wakaba 1.286 ok 1129
1348 wakaba 1.304 not ok 1130
1349     # Test 1130 got: "$VAR1 = [];\n" (t/HTML-tokenizer.t at line 205 fail #1130)
1350     # Expected: "$VAR1 = [\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (end tag not closed (before attribute): qq'</p >')
1351     # Line 1 is changed:
1352     # - "$VAR1 = [\n"
1353     # + "$VAR1 = [];\n"
1354     # Lines 2-2 are missing:
1355     # - " [\n"
1356     # - " qq'EndTag',\n"
1357     # - " qq'p'\n"
1358     # - " ]\n"
1359     # - " ];\n"
1360 wakaba 1.290 ok 1131
1361     ok 1132
1362 wakaba 1.293 ok 1133
1363     ok 1134
1364 wakaba 1.298 ok 1135
1365     ok 1136
1366 wakaba 1.301 ok 1137
1367     ok 1138
1368     ok 1139
1369     ok 1140
1370 wakaba 1.302 ok 1141
1371 wakaba 1.304 not ok 1142
1372     # Test 1142 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1142)
1373     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (end tag attribute (after attribute value double quoted): qq'</p class="a" >')
1374     # Line 2 is changed:
1375     # - " qq'ParseError',\n"
1376     # + " qq'ParseError'\n"
1377     # Lines 3-3 are missing:
1378     # - " [\n"
1379     # - " qq'EndTag',\n"
1380     # - " qq'p'\n"
1381     # - " ]\n"
1382     not ok 1143
1383     # Test 1143 got: "$VAR1 = [\n qq'ParseError'\n ];\n" (t/HTML-tokenizer.t at line 205 fail #1143)
1384     # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'EndTag',\n qq'p'\n ]\n ];\n" (end tag attribute (after attribute value single quoted): qq'</p class=\x{27}a\x{27} >')
1385     # Line 2 is changed:
1386     # - " qq'ParseError',\n"
1387     # + " qq'ParseError'\n"
1388     # Lines 3-3 are missing:
1389     # - " [\n"
1390     # - " qq'EndTag',\n"
1391     # - " qq'p'\n"
1392     # - " ]\n"
1393 wakaba 1.302 ok 1144
1394     ok 1145
1395     ok 1146
1396     ok 1147
1397     ok 1148
1398     ok 1149
1399     ok 1150

admin@suikawiki.org
ViewVC Help
Powered by ViewVC 1.1.24