1 |
#!/usr/bin/perl |
#!/usr/bin/perl |
2 |
|
# -d:DProf |
3 |
use strict; |
use strict; |
4 |
|
|
5 |
use lib qw[/home/httpd/html/www/markup/html/whatpm |
use lib qw[/home/httpd/html/www/markup/html/whatpm |
6 |
/home/wakaba/work/manakai/lib |
/home/wakaba/work/manakai2/lib |
7 |
/home/wakaba/public_html/-temp/wiki/lib]; |
/home/httpd/html/regexp/lib |
8 |
|
]; |
9 |
use CGI::Carp qw[fatalsToBrowser]; |
use CGI::Carp qw[fatalsToBrowser]; |
|
use Time::HiRes qw/time/; |
|
10 |
|
|
11 |
use SuikaWiki::Input::HTTP; ## TODO: Use some better CGI module |
require WebHACC::Input; |
12 |
|
|
13 |
my $http = SuikaWiki::Input::HTTP->new; |
{ |
14 |
|
require Message::CGI::HTTP; |
15 |
|
my $http = Message::CGI::HTTP->new; |
16 |
|
|
17 |
|
require WebHACC::Output; |
18 |
|
my $out = WebHACC::Output->new; |
19 |
|
$out->handle (*STDOUT); |
20 |
|
$out->set_utf8; |
21 |
|
|
22 |
## TODO: _charset_ |
if ($http->get_meta_variable ('PATH_INFO') ne '/') { |
23 |
|
$out->http_error (404); |
|
my @mode = split m#/#, scalar $http->meta_variable ('PATH_INFO'), -1; |
|
|
shift @mode if @mode and $mode[0] == ''; |
|
|
## TODO: decode unreserved characters |
|
|
|
|
|
my $s = $http->parameter ('s'); |
|
|
if (length $s > 1000_000) { |
|
|
print STDOUT "Status: 400 Document Too Long\nContent-Type: text/plain; charset=us-ascii\n\nToo long"; |
|
24 |
exit; |
exit; |
25 |
} |
} |
26 |
my $char_length = length $s; |
|
27 |
my %time; |
## TODO: We need real conneg support... |
28 |
my $time1; |
my $primary_language = 'en'; |
29 |
my $time2; |
if ($ENV{HTTP_ACCEPT_LANGUAGE} =~ /ja/) { |
30 |
|
$primary_language = 'ja'; |
31 |
require Message::DOM::DOMImplementation; |
} |
32 |
my $dom = Message::DOM::DOMImplementation->____new; |
$out->load_text_catalog ($primary_language); |
|
# $| = 1; |
|
|
my $doc; |
|
|
my $el; |
|
|
|
|
|
if (@mode == 3 and $mode[0] eq 'html' and |
|
|
($mode[2] eq 'html' or $mode[2] eq 'test')) { |
|
|
print STDOUT "Content-Type: text/plain; charset=utf-8\n\n"; |
|
|
|
|
|
require Encode; |
|
|
require Whatpm::HTML; |
|
|
|
|
|
$time1 = time; |
|
|
$s = Encode::decode ('utf-8', $s); |
|
|
$time2 = time; |
|
|
$time{decode} = $time2 - $time1; |
|
33 |
|
|
34 |
|
$out->set_flush; |
35 |
|
$out->http_header; |
36 |
|
$out->html_header; |
37 |
|
$out->unset_flush; |
38 |
|
|
39 |
|
$out->generate_input_section ($http); |
40 |
|
|
41 |
|
my $u = $http->get_parameter ('uri'); |
42 |
|
my $s = $http->get_parameter ('s'); |
43 |
|
if ((not defined $u or not length $u) and |
44 |
|
(not defined $s or not length $s)) { |
45 |
|
exit; |
46 |
|
} |
47 |
|
|
48 |
print STDOUT "#errors\n"; |
require WebHACC::Result; |
49 |
|
my $result = WebHACC::Result->new; |
50 |
|
$result->output ($out); |
51 |
|
|
52 |
my $onerror = sub { |
require WebHACC::Input; |
53 |
my (%opt) = @_; |
my $input = WebHACC::Input->get_document ($http => $result => $out); |
|
print STDOUT "$opt{line},$opt{column},$opt{type}\n"; |
|
|
}; |
|
|
|
|
|
$doc = $dom->create_document; |
|
|
$time1 = time; |
|
|
if (length $mode[1]) { |
|
|
$el = $doc->create_element_ns |
|
|
('http://www.w3.org/1999/xhtml', [undef, $mode[1]]); |
|
|
Whatpm::HTML->set_inner_html ($el, $s, $onerror); |
|
|
} else { |
|
|
Whatpm::HTML->parse_string ($s => $doc, $onerror); |
|
|
} |
|
|
$time2 = time; |
|
|
$time{parse} = $time2 - $time1; |
|
54 |
|
|
55 |
print "#document\n"; |
check_and_print ($input => $result => $out); |
56 |
|
|
57 |
|
$out->nav_list; |
58 |
|
|
|
my $out; |
|
|
if ($mode[2] eq 'html') { |
|
|
$time1 = time; |
|
|
$out = Whatpm::HTML->get_inner_html ($el || $doc); |
|
|
$time2 = time; |
|
|
$time{serialize_html} = $time2 - $time1; |
|
|
} else { # test |
|
|
$time1 = time; |
|
|
$out = test_serialize ($el || $doc); |
|
|
$time2 = time; |
|
|
$time{serialize_test} = $time2 - $time1; |
|
|
} |
|
|
print STDOUT Encode::encode ('utf-8', $$out); |
|
|
print STDOUT "\n"; |
|
|
} elsif (@mode == 3 and $mode[0] eq 'xhtml' and |
|
|
($mode[2] eq 'html' or $mode[2] eq 'test')) { |
|
|
print STDOUT "Content-Type: text/plain; charset=utf-8\n\n"; |
|
|
|
|
|
require Message::DOM::XMLParserTemp; |
|
|
print STDOUT "#errors\n"; |
|
|
|
|
|
my $onerror = sub { |
|
|
my $err = shift; |
|
|
print STDOUT $err->location->line_number, ","; |
|
|
print STDOUT $err->location->column_number, ","; |
|
|
print STDOUT $err->text, "\n"; |
|
|
return 1; |
|
|
}; |
|
|
|
|
|
open my $fh, '<', \$s; |
|
|
my $time1 = time; |
|
|
$doc = Message::DOM::XMLParserTemp->parse_byte_stream |
|
|
($fh => $dom, $onerror, charset => 'utf-8'); |
|
|
my $time2 = time; |
|
|
$time{parse_xml} = $time2 - $time1; |
|
|
|
|
|
print "#document\n"; |
|
|
|
|
|
my $out; |
|
|
if ($mode[2] eq 'html') { |
|
|
## TODO: Use XHTML serializer |
|
|
#$out = Whatpm::HTML->get_inner_html ($doc); |
|
|
} else { # test |
|
|
$time1 = time; |
|
|
$out = test_serialize ($doc); |
|
|
$time2 = time; |
|
|
$time{serialize_test} = $time2 - $time1; |
|
|
} |
|
|
print STDOUT Encode::encode ('utf-8', $$out); |
|
|
print STDOUT "\n"; |
|
|
} else { |
|
|
print STDOUT "Status: 404 Not Found\nContent-Type: text/plain; charset=us-ascii\n\n404"; |
|
59 |
exit; |
exit; |
60 |
} |
} |
61 |
|
|
62 |
if ($http->parameter ('dom5')) { |
sub check_and_print ($$$) { |
63 |
require Whatpm::ContentChecker; |
my ($input, $result, $out) = @_; |
64 |
my $onerror = sub { |
my $original_input = $out->input; |
65 |
my %opt = @_; |
$out->input ($input); |
66 |
print STDOUT get_node_path ($opt{node}) . ';' . $opt{type} . "\n"; |
|
67 |
}; |
$input->generate_info_section ($result); |
68 |
print STDOUT "#domerrors\n"; |
|
69 |
$time1 = time; |
$input->generate_transfer_sections ($result); |
70 |
if ($el) { |
|
71 |
Whatpm::ContentChecker->check_element ($el, $onerror); |
unless (defined $input->{s}) { |
72 |
} else { |
## NOTE: This is an error of the implementation. |
73 |
Whatpm::ContentChecker->check_document ($doc, $onerror); |
$result->layer_uncertain ('transfer'); |
74 |
} |
$result->generate_result_section; |
75 |
$time2 = time; |
|
76 |
$time{check} = $time2 - $time1; |
$out->input ($original_input); |
77 |
} |
return; |
78 |
|
} |
79 |
print STDOUT "#log\n"; |
|
80 |
for (qw/decode parse parse_xml serialize_html serialize_xml serialize_test |
my $checker_class = { |
81 |
check/) { |
'text/cache-manifest' => 'WebHACC::Language::CacheManifest', |
82 |
next unless defined $time{$_}; |
'text/css' => 'WebHACC::Language::CSS', |
83 |
print STDOUT { |
'text/x-css-inline' => 'WebHACC::Language::CSSInline', |
84 |
decode => 'bytes->chars', |
'text/html' => 'WebHACC::Language::HTML', |
85 |
parse => 'html5(chars)->dom5', |
'text/x-h2h' => 'WebHACC::Language::H2H', |
86 |
parse_xml => 'xml1(chars)->dom5', |
'text/x-regexp-js' => 'WebHACC::Language::RegExpJS', |
87 |
serialize_html => 'dom5->html5(char)', |
'text/x-webidl' => 'WebHACC::Language::WebIDL', |
88 |
serialize_xml => 'dom5->xml1(char)', |
|
89 |
serialize_test => 'dom5->test(char)', |
'text/xml' => 'WebHACC::Language::XML', |
90 |
check => 'dom5 check', |
'application/atom+xml' => 'WebHACC::Language::XML', |
91 |
}->{$_}; |
'application/rss+xml' => 'WebHACC::Language::XML', |
92 |
print STDOUT "\t", $time{$_}, "s\n"; |
'image/svg+xml' => 'WebHACC::Language::XML', |
93 |
open my $file, '>>', ".manakai-$_.txt" or die ".manakai-$_.txt: $!"; |
'application/xhtml+xml' => 'WebHACC::Language::XML', |
94 |
print $file $char_length, "\t", $time{$_}, "\n"; |
'application/xml' => 'WebHACC::Language::XML', |
95 |
|
## TODO: Should we make all XML MIME Types fall |
96 |
|
## into this category? |
97 |
|
|
98 |
|
## NOTE: This type has different model from normal XML types. |
99 |
|
'application/rdf+xml' => 'WebHACC::Language::XML', |
100 |
|
}->{$input->{media_type}} || 'WebHACC::Language::Default'; |
101 |
|
|
102 |
|
eval qq{ require $checker_class } or die "$0: Loading $checker_class: $@"; |
103 |
|
my $checker = $checker_class->new; |
104 |
|
$checker->input ($input); |
105 |
|
$checker->output ($out); |
106 |
|
$checker->result ($result); |
107 |
|
|
108 |
|
## TODO: A cache manifest MUST be text/cache-manifest |
109 |
|
## TODO: WebIDL media type "text/x-webidl" |
110 |
|
|
111 |
|
$checker->generate_syntax_error_section; |
112 |
|
$checker->generate_source_string_section; |
113 |
|
|
114 |
|
my @subdoc; |
115 |
|
$checker->onsubdoc (sub { |
116 |
|
push @subdoc, shift; |
117 |
|
}); |
118 |
|
|
119 |
|
$checker->generate_structure_dump_section; |
120 |
|
$checker->generate_structure_error_section; |
121 |
|
$checker->generate_additional_sections; |
122 |
|
|
123 |
|
my $id_prefix = 0; |
124 |
|
for my $_subinput (@subdoc) { |
125 |
|
my $subinput = WebHACC::Input::Subdocument->new (++$id_prefix); |
126 |
|
$subinput->{$_} = $_subinput->{$_} for keys %$_subinput; |
127 |
|
$subinput->{base_uri} = $subinput->{container_node}->base_uri |
128 |
|
unless defined $subinput->{base_uri}; |
129 |
|
$subinput->{parent_input} = $input; |
130 |
|
|
131 |
|
my $subresult = WebHACC::Result->new; |
132 |
|
$subresult->output ($out); |
133 |
|
$subresult->parent_result ($result); |
134 |
|
|
135 |
|
$subinput->start_section ($subresult); |
136 |
|
check_and_print ($subinput => $subresult => $out); |
137 |
|
$subinput->end_section ($subresult); |
138 |
} |
} |
139 |
|
|
140 |
exit; |
$result->generate_result_section; |
141 |
|
|
142 |
sub test_serialize ($) { |
$out->input ($original_input); |
143 |
my $node = shift; |
} # check_and_print |
|
my $r = ''; |
|
|
|
|
|
my @node = map { [$_, ''] } @{$node->child_nodes}; |
|
|
while (@node) { |
|
|
my $child = shift @node; |
|
|
my $nt = $child->[0]->node_type; |
|
|
if ($nt == $child->[0]->ELEMENT_NODE) { |
|
|
$r .= '| ' . $child->[1] . '<' . $child->[0]->tag_name . ">\x0A"; ## ISSUE: case? |
|
|
|
|
|
for my $attr (sort {$a->[0] cmp $b->[0]} map { [$_->name, $_->value] } |
|
|
@{$child->[0]->attributes}) { |
|
|
$r .= '| ' . $child->[1] . ' ' . $attr->[0] . '="'; ## ISSUE: case? |
|
|
$r .= $attr->[1] . '"' . "\x0A"; |
|
|
} |
|
|
|
|
|
unshift @node, |
|
|
map { [$_, $child->[1] . ' '] } @{$child->[0]->child_nodes}; |
|
|
} elsif ($nt == $child->[0]->TEXT_NODE) { |
|
|
$r .= '| ' . $child->[1] . '"' . $child->[0]->data . '"' . "\x0A"; |
|
|
} elsif ($nt == $child->[0]->CDATA_SECTION_NODE) { |
|
|
$r .= '| ' . $child->[1] . '<![CDATA[' . $child->[0]->data . "]]>\x0A"; |
|
|
} elsif ($nt == $child->[0]->COMMENT_NODE) { |
|
|
$r .= '| ' . $child->[1] . '<!-- ' . $child->[0]->data . " -->\x0A"; |
|
|
} elsif ($nt == $child->[0]->DOCUMENT_TYPE_NODE) { |
|
|
$r .= '| ' . $child->[1] . '<!DOCTYPE ' . $child->[0]->name . ">\x0A"; |
|
|
} elsif ($nt == $child->[0]->PROCESSING_INSTRUCTION_NODE) { |
|
|
$r .= '| ' . $child->[1] . '<?' . $child->[0]->target . ' ' . |
|
|
$child->[0]->data . "?>\x0A"; |
|
|
} else { |
|
|
$r .= '| ' . $child->[1] . $child->[0]->node_type . "\x0A"; # error |
|
|
} |
|
|
} |
|
|
|
|
|
return \$r; |
|
|
} # test_serialize |
|
|
|
|
|
sub get_node_path ($) { |
|
|
my $node = shift; |
|
|
my @r; |
|
|
while (defined $node) { |
|
|
my $rs; |
|
|
if ($node->node_type == 1) { |
|
|
$rs = $node->manakai_local_name; |
|
|
$node = $node->parent_node; |
|
|
} elsif ($node->node_type == 2) { |
|
|
$rs = '@' . $node->manakai_local_name; |
|
|
$node = $node->owner_element; |
|
|
} elsif ($node->node_type == 3) { |
|
|
$rs = '"' . $node->data . '"'; |
|
|
$node = $node->parent_node; |
|
|
} elsif ($node->node_type == 9) { |
|
|
$rs = ''; |
|
|
$node = $node->parent_node; |
|
|
} else { |
|
|
$rs = '#' . $node->node_type; |
|
|
$node = $node->parent_node; |
|
|
} |
|
|
unshift @r, $rs; |
|
|
} |
|
|
return join '/', @r; |
|
|
} # get_node_path |
|
144 |
|
|
145 |
=head1 AUTHOR |
=head1 AUTHOR |
146 |
|
|
148 |
|
|
149 |
=head1 LICENSE |
=head1 LICENSE |
150 |
|
|
151 |
Copyright 2007 Wakaba <w@suika.fam.cx> |
Copyright 2007-2008 Wakaba <w@suika.fam.cx> |
152 |
|
|
153 |
This library is free software; you can redistribute it |
This library is free software; you can redistribute it |
154 |
and/or modify it under the same terms as Perl itself. |
and/or modify it under the same terms as Perl itself. |