Parent Directory
|
Revision Log
++ whatpm/t/ChangeLog 16 Jul 2007 14:28:33 -0000 * content-model-1.dat, content-model-2.dat: Some test data have been updated due to new warnings. 2007-07-16 Wakaba <wakaba@suika.fam.cx> ++ whatpm/Whatpm/ChangeLog 16 Jul 2007 14:01:47 -0000 * ContentChecker.pm: Drop wrong |level => 'error'| specification from "in HTML:xml:lang" error. Character position is now the last part of the error type in the URI error description. Report "unsupported" status for language tags, media queries, script codes, and style sheets. 2007-07-16 Wakaba <wakaba@suika.fam.cx>
1 | wakaba | 1.44 | 1..337 |
2 | wakaba | 1.1 | # Running under perl version 5.008007 for linux |
3 | wakaba | 1.45 | # Current time local: Mon Jul 16 23:26:22 2007 |
4 | # Current time GMT: Mon Jul 16 14:26:22 2007 | ||
5 | wakaba | 1.1 | # Using Test.pm version 1.25 |
6 | wakaba | 1.11 | # t/tokenizer/test1.test |
7 | wakaba | 1.20 | ok 1 |
8 | ok 2 | ||
9 | ok 3 | ||
10 | wakaba | 1.1 | ok 4 |
11 | wakaba | 1.20 | ok 5 |
12 | wakaba | 1.1 | ok 6 |
13 | ok 7 | ||
14 | ok 8 | ||
15 | ok 9 | ||
16 | ok 10 | ||
17 | ok 11 | ||
18 | ok 12 | ||
19 | ok 13 | ||
20 | ok 14 | ||
21 | ok 15 | ||
22 | ok 16 | ||
23 | ok 17 | ||
24 | ok 18 | ||
25 | ok 19 | ||
26 | ok 20 | ||
27 | ok 21 | ||
28 | wakaba | 1.25 | ok 22 |
29 | ok 23 | ||
30 | wakaba | 1.1 | ok 24 |
31 | wakaba | 1.22 | ok 25 |
32 | ok 26 | ||
33 | ok 27 | ||
34 | wakaba | 1.1 | ok 28 |
35 | ok 29 | ||
36 | ok 30 | ||
37 | ok 31 | ||
38 | ok 32 | ||
39 | ok 33 | ||
40 | wakaba | 1.18 | ok 34 |
41 | wakaba | 1.1 | ok 35 |
42 | ok 36 | ||
43 | ok 37 | ||
44 | wakaba | 1.8 | ok 38 |
45 | wakaba | 1.28 | ok 39 |
46 | ok 40 | ||
47 | wakaba | 1.43 | ok 41 |
48 | ok 42 | ||
49 | wakaba | 1.11 | # t/tokenizer/test2.test |
50 | wakaba | 1.43 | not ok 43 |
51 | # Test 43 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 153 fail #43) | ||
52 | wakaba | 1.8 | # Expected: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (DOCTYPE without name: <!DOCTYPE>) |
53 | wakaba | 1.20 | # Line 6 is changed: |
54 | wakaba | 1.8 | # - " qq'',\n" |
55 | wakaba | 1.20 | # + " undef,\n" |
56 | wakaba | 1.43 | # t/HTML-tokenizer.t line 153 is: ok $parser_dump, $expected_dump, |
57 | wakaba | 1.20 | ok 44 |
58 | ok 45 | ||
59 | ok 46 | ||
60 | ok 47 | ||
61 | ok 48 | ||
62 | ok 49 | ||
63 | ok 50 | ||
64 | ok 51 | ||
65 | wakaba | 1.34 | ok 52 |
66 | ok 53 | ||
67 | ok 54 | ||
68 | ok 55 | ||
69 | wakaba | 1.9 | ok 56 |
70 | ok 57 | ||
71 | wakaba | 1.1 | ok 58 |
72 | ok 59 | ||
73 | ok 60 | ||
74 | wakaba | 1.19 | ok 61 |
75 | wakaba | 1.1 | ok 62 |
76 | ok 63 | ||
77 | ok 64 | ||
78 | ok 65 | ||
79 | ok 66 | ||
80 | ok 67 | ||
81 | ok 68 | ||
82 | ok 69 | ||
83 | ok 70 | ||
84 | wakaba | 1.34 | ok 71 |
85 | ok 72 | ||
86 | wakaba | 1.1 | ok 73 |
87 | ok 74 | ||
88 | wakaba | 1.43 | # t/tokenizer/test3.test |
89 | wakaba | 1.21 | ok 75 |
90 | ok 76 | ||
91 | wakaba | 1.1 | ok 77 |
92 | ok 78 | ||
93 | ok 79 | ||
94 | ok 80 | ||
95 | wakaba | 1.34 | ok 81 |
96 | wakaba | 1.15 | ok 82 |
97 | wakaba | 1.1 | ok 83 |
98 | ok 84 | ||
99 | wakaba | 1.25 | ok 85 |
100 | ok 86 | ||
101 | wakaba | 1.34 | ok 87 |
102 | wakaba | 1.1 | ok 88 |
103 | ok 89 | ||
104 | ok 90 | ||
105 | ok 91 | ||
106 | ok 92 | ||
107 | ok 93 | ||
108 | ok 94 | ||
109 | wakaba | 1.8 | ok 95 |
110 | ok 96 | ||
111 | ok 97 | ||
112 | ok 98 | ||
113 | ok 99 | ||
114 | ok 100 | ||
115 | ok 101 | ||
116 | wakaba | 1.43 | not ok 102 |
117 | # Test 102 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 153 fail #102) | ||
118 | # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype >: <!doctype >) | ||
119 | # Line 5 is changed: | ||
120 | # - " qq'',\n" | ||
121 | # + " undef,\n" | ||
122 | not ok 103 | ||
123 | # Test 103 got: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n undef,\n undef,\n undef,\n 0\n ]\n ];\n" (t/HTML-tokenizer.t at line 153 fail #103) | ||
124 | # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'DOCTYPE',\n qq'',\n undef,\n undef,\n 0\n ]\n ];\n" (<!doctype : <!doctype ) | ||
125 | # Line 5 is changed: | ||
126 | # - " qq'',\n" | ||
127 | # + " undef,\n" | ||
128 | wakaba | 1.8 | ok 104 |
129 | ok 105 | ||
130 | ok 106 | ||
131 | ok 107 | ||
132 | ok 108 | ||
133 | ok 109 | ||
134 | ok 110 | ||
135 | ok 111 | ||
136 | ok 112 | ||
137 | ok 113 | ||
138 | wakaba | 1.10 | ok 114 |
139 | ok 115 | ||
140 | ok 116 | ||
141 | ok 117 | ||
142 | ok 118 | ||
143 | ok 119 | ||
144 | ok 120 | ||
145 | ok 121 | ||
146 | wakaba | 1.39 | ok 122 |
147 | wakaba | 1.18 | ok 123 |
148 | ok 124 | ||
149 | ok 125 | ||
150 | ok 126 | ||
151 | wakaba | 1.20 | ok 127 |
152 | ok 128 | ||
153 | ok 129 | ||
154 | ok 130 | ||
155 | ok 131 | ||
156 | ok 132 | ||
157 | ok 133 | ||
158 | ok 134 | ||
159 | ok 135 | ||
160 | ok 136 | ||
161 | wakaba | 1.21 | ok 137 |
162 | ok 138 | ||
163 | wakaba | 1.20 | ok 139 |
164 | ok 140 | ||
165 | ok 141 | ||
166 | wakaba | 1.28 | ok 142 |
167 | wakaba | 1.20 | ok 143 |
168 | ok 144 | ||
169 | ok 145 | ||
170 | ok 146 | ||
171 | wakaba | 1.22 | ok 147 |
172 | ok 148 | ||
173 | ok 149 | ||
174 | ok 150 | ||
175 | ok 151 | ||
176 | ok 152 | ||
177 | ok 153 | ||
178 | ok 154 | ||
179 | ok 155 | ||
180 | ok 156 | ||
181 | wakaba | 1.28 | ok 157 |
182 | ok 158 | ||
183 | ok 159 | ||
184 | ok 160 | ||
185 | ok 161 | ||
186 | ok 162 | ||
187 | ok 163 | ||
188 | ok 164 | ||
189 | ok 165 | ||
190 | wakaba | 1.43 | # t/tokenizer/test4.test |
191 | wakaba | 1.28 | ok 166 |
192 | ok 167 | ||
193 | ok 168 | ||
194 | ok 169 | ||
195 | ok 170 | ||
196 | ok 171 | ||
197 | ok 172 | ||
198 | ok 173 | ||
199 | ok 174 | ||
200 | ok 175 | ||
201 | ok 176 | ||
202 | ok 177 | ||
203 | ok 178 | ||
204 | wakaba | 1.33 | ok 179 |
205 | wakaba | 1.34 | ok 180 |
206 | wakaba | 1.38 | ok 181 |
207 | ok 182 | ||
208 | wakaba | 1.43 | ok 183 |
209 | ok 184 | ||
210 | ok 185 | ||
211 | ok 186 | ||
212 | ok 187 | ||
213 | ok 188 | ||
214 | ok 189 | ||
215 | ok 190 | ||
216 | ok 191 | ||
217 | ok 192 | ||
218 | ok 193 | ||
219 | ok 194 | ||
220 | ok 195 | ||
221 | ok 196 | ||
222 | ok 197 | ||
223 | ok 198 | ||
224 | not ok 199 | ||
225 | # Test 199 got: "$VAR1 = [\n qq'ParseError',\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (t/HTML-tokenizer.t at line 153 fail #199) | ||
226 | # Expected: "$VAR1 = [\n qq'ParseError',\n [\n qq'Comment',\n qq'doc'\n ],\n qq'ParseError',\n [\n qq'Character',\n qq'\\x{FFFD}'\n ]\n ];\n" (U+0000 in lookahead region after non-matching character: <!doc> |