| 2 |
use strict; |
use strict; |
| 3 |
|
|
| 4 |
use lib qw[/home/httpd/html/www/markup/html/whatpm |
use lib qw[/home/httpd/html/www/markup/html/whatpm |
| 5 |
/home/wakaba/work/manakai/lib |
/home/wakaba/work/manakai2/lib]; |
|
/home/wakaba/public_html/-temp/wiki/lib]; |
|
| 6 |
use CGI::Carp qw[fatalsToBrowser]; |
use CGI::Carp qw[fatalsToBrowser]; |
|
use Time::HiRes qw/time/; |
|
| 7 |
|
|
| 8 |
use SuikaWiki::Input::HTTP; ## TODO: Use some better CGI module |
require WebHACC::Input; |
| 9 |
|
|
| 10 |
my $http = SuikaWiki::Input::HTTP->new; |
{ |
| 11 |
|
require Message::CGI::HTTP; |
| 12 |
|
my $http = Message::CGI::HTTP->new; |
| 13 |
|
|
| 14 |
|
require WebHACC::Output; |
| 15 |
|
my $out = WebHACC::Output->new; |
| 16 |
|
$out->handle (*STDOUT); |
| 17 |
|
$out->set_utf8; |
| 18 |
|
|
| 19 |
## TODO: _charset_ |
if ($http->get_meta_variable ('PATH_INFO') ne '/') { |
| 20 |
|
$out->http_error (404); |
|
my @mode = split m#/#, scalar $http->meta_variable ('PATH_INFO'), -1; |
|
|
shift @mode if @mode and $mode[0] == ''; |
|
|
## TODO: decode unreserved characters |
|
|
|
|
|
my $s = $http->parameter ('s'); |
|
|
if (length $s > 1000_000) { |
|
|
print STDOUT "Status: 400 Document Too Long\nContent-Type: text/plain; charset=us-ascii\n\nToo long"; |
|
| 21 |
exit; |
exit; |
| 22 |
} |
} |
|
my $char_length = length $s; |
|
|
my %time; |
|
|
my $time1; |
|
|
my $time2; |
|
| 23 |
|
|
| 24 |
require Message::DOM::DOMImplementation; |
## TODO: We need real conneg support... |
| 25 |
my $dom = Message::DOM::DOMImplementation->____new; |
my $primary_language = 'en'; |
| 26 |
# $| = 1; |
if ($ENV{HTTP_ACCEPT_LANGUAGE} =~ /ja/) { |
| 27 |
my $doc; |
$primary_language = 'ja'; |
| 28 |
my $el; |
} |
| 29 |
|
$out->load_text_catalog ($primary_language); |
| 30 |
if (@mode == 3 and $mode[0] eq 'html' and |
|
| 31 |
($mode[2] eq 'html' or $mode[2] eq 'test')) { |
$out->set_flush; |
| 32 |
print STDOUT "Content-Type: text/plain; charset=utf-8\n\n"; |
$out->http_header; |
| 33 |
|
$out->html_header; |
| 34 |
|
$out->unset_flush; |
| 35 |
|
|
| 36 |
|
my $input = get_input_document ($http); |
| 37 |
|
$out->input ($input); |
| 38 |
|
|
| 39 |
|
require WebHACC::Result; |
| 40 |
|
my $result = WebHACC::Result->new; |
| 41 |
|
$result->output ($out); |
| 42 |
|
$result->{conforming_min} = 1; |
| 43 |
|
$result->{conforming_max} = 1; |
| 44 |
|
|
| 45 |
require Encode; |
$out->html ('<script src="../cc-script.js"></script>'); |
|
require Whatpm::HTML; |
|
| 46 |
|
|
| 47 |
$time1 = time; |
check_and_print ($input => $result => $out); |
|
$s = Encode::decode ('utf-8', $s); |
|
|
$time2 = time; |
|
|
$time{decode} = $time2 - $time1; |
|
| 48 |
|
|
| 49 |
|
$result->generate_result_section; |
| 50 |
|
|
| 51 |
print STDOUT "#errors\n"; |
$out->nav_list; |
| 52 |
|
|
| 53 |
my $onerror = sub { |
exit; |
| 54 |
my (%opt) = @_; |
} |
| 55 |
print STDOUT "$opt{line},$opt{column},$opt{type}\n"; |
|
| 56 |
}; |
sub check_and_print ($$$) { |
| 57 |
|
my ($input, $result, $out) = @_; |
| 58 |
$doc = $dom->create_document; |
my $original_input = $out->input; |
| 59 |
$time1 = time; |
$out->input ($input); |
| 60 |
if (length $mode[1]) { |
|
| 61 |
$el = $doc->create_element_ns |
$input->generate_info_section ($result); |
| 62 |
('http://www.w3.org/1999/xhtml', [undef, $mode[1]]); |
|
| 63 |
Whatpm::HTML->set_inner_html ($el, $s, $onerror); |
$input->generate_transfer_sections ($result); |
| 64 |
} else { |
|
| 65 |
Whatpm::HTML->parse_string ($s => $doc, $onerror); |
unless (defined $input->{s}) { |
| 66 |
|
$result->{conforming_min} = 0; |
| 67 |
|
return; |
| 68 |
|
} |
| 69 |
|
|
| 70 |
|
my $checker_class = { |
| 71 |
|
'text/cache-manifest' => 'WebHACC::Language::CacheManifest', |
| 72 |
|
'text/css' => 'WebHACC::Language::CSS', |
| 73 |
|
'text/html' => 'WebHACC::Language::HTML', |
| 74 |
|
'text/x-webidl' => 'WebHACC::Language::WebIDL', |
| 75 |
|
|
| 76 |
|
'text/xml' => 'WebHACC::Language::XML', |
| 77 |
|
'application/atom+xml' => 'WebHACC::Language::XML', |
| 78 |
|
'application/rss+xml' => 'WebHACC::Language::XML', |
| 79 |
|
'image/svg+xml' => 'WebHACC::Language::XML', |
| 80 |
|
'application/xhtml+xml' => 'WebHACC::Language::XML', |
| 81 |
|
'application/xml' => 'WebHACC::Language::XML', |
| 82 |
|
## TODO: Should we make all XML MIME Types fall |
| 83 |
|
## into this category? |
| 84 |
|
|
| 85 |
|
## NOTE: This type has different model from normal XML types. |
| 86 |
|
'application/rdf+xml' => 'WebHACC::Language::XML', |
| 87 |
|
}->{$input->{media_type}} || 'WebHACC::Language::Default'; |
| 88 |
|
|
| 89 |
|
eval qq{ require $checker_class } or die "$0: Loading $checker_class: $@"; |
| 90 |
|
my $checker = $checker_class->new; |
| 91 |
|
$checker->input ($input); |
| 92 |
|
$checker->output ($out); |
| 93 |
|
$checker->result ($result); |
| 94 |
|
|
| 95 |
|
## TODO: A cache manifest MUST be text/cache-manifest |
| 96 |
|
## TODO: WebIDL media type "text/x-webidl" |
| 97 |
|
|
| 98 |
|
$checker->generate_syntax_error_section; |
| 99 |
|
$checker->generate_source_string_section; |
| 100 |
|
|
| 101 |
|
my @subdoc; |
| 102 |
|
$checker->onsubdoc (sub { |
| 103 |
|
push @subdoc, shift; |
| 104 |
|
}); |
| 105 |
|
|
| 106 |
|
$checker->generate_structure_dump_section; |
| 107 |
|
$checker->generate_structure_error_section; |
| 108 |
|
$checker->generate_additional_sections; |
| 109 |
|
|
| 110 |
|
my $id_prefix = 0; |
| 111 |
|
for my $_subinput (@subdoc) { |
| 112 |
|
my $subinput = WebHACC::Input::Subdocument->new (++$id_prefix); |
| 113 |
|
$subinput->{$_} = $_subinput->{$_} for keys %$_subinput; |
| 114 |
|
$subinput->{base_uri} = $subinput->{container_node}->base_uri |
| 115 |
|
unless defined $subinput->{base_uri}; |
| 116 |
|
$subinput->{parent_input} = $input; |
| 117 |
|
|
| 118 |
|
$subinput->start_section ($result); |
| 119 |
|
check_and_print ($subinput => $result => $out); |
| 120 |
|
$subinput->end_section ($result); |
| 121 |
} |
} |
|
$time2 = time; |
|
|
$time{parse} = $time2 - $time1; |
|
| 122 |
|
|
| 123 |
print "#document\n"; |
$out->input ($original_input); |
| 124 |
|
} # check_and_print |
| 125 |
|
|
| 126 |
my $out; |
sub get_input_document ($) { |
| 127 |
if ($mode[2] eq 'html') { |
my $http = shift; |
|
$time1 = time; |
|
|
$out = Whatpm::HTML->get_inner_html ($el || $doc); |
|
|
$time2 = time; |
|
|
$time{serialize_html} = $time2 - $time1; |
|
|
} else { # test |
|
|
$time1 = time; |
|
|
$out = test_serialize ($el || $doc); |
|
|
$time2 = time; |
|
|
$time{serialize_test} = $time2 - $time1; |
|
|
} |
|
|
print STDOUT Encode::encode ('utf-8', $$out); |
|
|
print STDOUT "\n"; |
|
|
} elsif (@mode == 3 and $mode[0] eq 'xhtml' and |
|
|
($mode[2] eq 'html' or $mode[2] eq 'test')) { |
|
|
print STDOUT "Content-Type: text/plain; charset=utf-8\n\n"; |
|
|
|
|
|
require Message::DOM::XMLParserTemp; |
|
|
print STDOUT "#errors\n"; |
|
|
|
|
|
my $onerror = sub { |
|
|
my $err = shift; |
|
|
print STDOUT $err->location->line_number, ","; |
|
|
print STDOUT $err->location->column_number, ","; |
|
|
print STDOUT $err->text, "\n"; |
|
|
return 1; |
|
|
}; |
|
|
|
|
|
open my $fh, '<', \$s; |
|
|
my $time1 = time; |
|
|
$doc = Message::DOM::XMLParserTemp->parse_byte_stream |
|
|
($fh => $dom, $onerror, charset => 'utf-8'); |
|
|
my $time2 = time; |
|
|
$time{parse_xml} = $time2 - $time1; |
|
|
|
|
|
print "#document\n"; |
|
|
|
|
|
my $out; |
|
|
if ($mode[2] eq 'html') { |
|
|
## TODO: Use XHTML serializer |
|
|
#$out = Whatpm::HTML->get_inner_html ($doc); |
|
|
} else { # test |
|
|
$time1 = time; |
|
|
$out = test_serialize ($doc); |
|
|
$time2 = time; |
|
|
$time{serialize_test} = $time2 - $time1; |
|
|
} |
|
|
print STDOUT Encode::encode ('utf-8', $$out); |
|
|
print STDOUT "\n"; |
|
|
} else { |
|
|
print STDOUT "Status: 404 Not Found\nContent-Type: text/plain; charset=us-ascii\n\n404"; |
|
|
exit; |
|
|
} |
|
| 128 |
|
|
| 129 |
if ($http->parameter ('dom5')) { |
require Message::DOM::DOMImplementation; |
| 130 |
require Whatpm::ContentChecker; |
my $dom = Message::DOM::DOMImplementation->new; |
| 131 |
my $onerror = sub { |
|
| 132 |
my %opt = @_; |
require Encode; |
| 133 |
print STDOUT get_node_path ($opt{node}) . ';' . $opt{type} . "\n"; |
my $request_uri = Encode::decode ('utf-8', $http->get_parameter ('uri')); |
| 134 |
}; |
my $r = WebHACC::Input->new; |
| 135 |
print STDOUT "#domerrors\n"; |
if (defined $request_uri and length $request_uri) { |
| 136 |
$time1 = time; |
my $uri = $dom->create_uri_reference ($request_uri); |
| 137 |
if ($el) { |
unless ({ |
| 138 |
Whatpm::ContentChecker->check_element ($el, $onerror); |
http => 1, |
| 139 |
} else { |
}->{lc $uri->uri_scheme}) { |
| 140 |
Whatpm::ContentChecker->check_document ($doc, $onerror); |
$r = WebHACC::Input::Error->new; |
| 141 |
|
$r->{uri} = $request_uri; |
| 142 |
|
$r->{request_uri} = $request_uri; |
| 143 |
|
$r->{error_status_text} = 'URL scheme not allowed'; |
| 144 |
} |
} |
|
$time2 = time; |
|
|
$time{check} = $time2 - $time1; |
|
|
} |
|
| 145 |
|
|
| 146 |
print STDOUT "#log\n"; |
require Message::Util::HostPermit; |
| 147 |
for (qw/decode parse parse_xml serialize_html serialize_xml serialize_test |
my $host_permit = new Message::Util::HostPermit; |
| 148 |
check/) { |
$host_permit->add_rule (<<EOH); |
| 149 |
next unless defined $time{$_}; |
Allow host=suika port=80 |
| 150 |
print STDOUT { |
Deny host=suika |
| 151 |
decode => 'bytes->chars', |
Allow host=suika.fam.cx port=80 |
| 152 |
parse => 'html5(chars)->dom5', |
Deny host=suika.fam.cx |
| 153 |
parse_xml => 'xml1(chars)->dom5', |
Deny host=localhost |
| 154 |
serialize_html => 'dom5->html5(char)', |
Deny host=*.localdomain |
| 155 |
serialize_xml => 'dom5->xml1(char)', |
Deny ipv4=0.0.0.0/8 |
| 156 |
serialize_test => 'dom5->test(char)', |
Deny ipv4=10.0.0.0/8 |
| 157 |
check => 'dom5 check', |
Deny ipv4=127.0.0.0/8 |
| 158 |
}->{$_}; |
Deny ipv4=169.254.0.0/16 |
| 159 |
print STDOUT "\t", $time{$_}, "s\n"; |
Deny ipv4=172.0.0.0/11 |
| 160 |
open my $file, '>>', ".manakai-$_.txt" or die ".manakai-$_.txt: $!"; |
Deny ipv4=192.0.2.0/24 |
| 161 |
print $file $char_length, "\t", $time{$_}, "\n"; |
Deny ipv4=192.88.99.0/24 |
| 162 |
} |
Deny ipv4=192.168.0.0/16 |
| 163 |
|
Deny ipv4=198.18.0.0/15 |
| 164 |
exit; |
Deny ipv4=224.0.0.0/4 |
| 165 |
|
Deny ipv4=255.255.255.255/32 |
| 166 |
sub test_serialize ($) { |
Deny ipv6=0::0/0 |
| 167 |
my $node = shift; |
Allow host=* |
| 168 |
my $r = ''; |
EOH |
| 169 |
|
unless ($host_permit->check ($uri->uri_host, $uri->uri_port || 80)) { |
| 170 |
my @node = map { [$_, ''] } @{$node->child_nodes}; |
my $r = WebHACC::Input::Error->new; |
| 171 |
while (@node) { |
$r->{uri} = $request_uri; |
| 172 |
my $child = shift @node; |
$r->{request_uri} = $request_uri; |
| 173 |
my $nt = $child->[0]->node_type; |
$r->{error_status_text} = 'Connection to the host is forbidden'; |
| 174 |
if ($nt == $child->[0]->ELEMENT_NODE) { |
return $r; |
| 175 |
$r .= '| ' . $child->[1] . '<' . $child->[0]->tag_name . ">\x0A"; ## ISSUE: case? |
} |
| 176 |
|
|
| 177 |
for my $attr (sort {$a->[0] cmp $b->[0]} map { [$_->name, $_->value] } |
require LWP::UserAgent; |
| 178 |
@{$child->[0]->attributes}) { |
my $ua = WDCC::LWPUA->new; |
| 179 |
$r .= '| ' . $child->[1] . ' ' . $attr->[0] . '="'; ## ISSUE: case? |
$ua->{wdcc_dom} = $dom; |
| 180 |
$r .= $attr->[1] . '"' . "\x0A"; |
$ua->{wdcc_host_permit} = $host_permit; |
| 181 |
|
$ua->agent ('Mozilla'); ## TODO: for now. |
| 182 |
|
$ua->parse_head (0); |
| 183 |
|
$ua->protocols_allowed ([qw/http/]); |
| 184 |
|
$ua->max_size (1000_000); |
| 185 |
|
my $req = HTTP::Request->new (GET => $request_uri); |
| 186 |
|
$req->header ('Accept-Encoding' => 'identity, *; q=0'); |
| 187 |
|
my $res = $ua->request ($req); |
| 188 |
|
## TODO: 401 sets |is_success| true. |
| 189 |
|
if ($res->is_success or $http->get_parameter ('error-page')) { |
| 190 |
|
$r->{base_uri} = $res->base; ## NOTE: It does check |Content-Base|, |Content-Location|, and <base>. ## TODO: Use our own code! |
| 191 |
|
$r->{uri} = $res->request->uri; |
| 192 |
|
$r->{request_uri} = $request_uri; |
| 193 |
|
|
| 194 |
|
## TODO: More strict parsing... |
| 195 |
|
my $ct = $res->header ('Content-Type'); |
| 196 |
|
if (defined $ct and $ct =~ /;\s*charset\s*=\s*"?([^\s;"]+)"?/i) { |
| 197 |
|
$r->{charset} = lc $1; |
| 198 |
|
$r->{charset} =~ tr/\\//d; |
| 199 |
|
$r->{official_charset} = $r->{charset}; |
| 200 |
} |
} |
| 201 |
|
|
| 202 |
unshift @node, |
my $input_charset = $http->get_parameter ('charset'); |
| 203 |
map { [$_, $child->[1] . ' '] } @{$child->[0]->child_nodes}; |
if (defined $input_charset and length $input_charset) { |
| 204 |
} elsif ($nt == $child->[0]->TEXT_NODE) { |
$r->{charset_overridden} |
| 205 |
$r .= '| ' . $child->[1] . '"' . $child->[0]->data . '"' . "\x0A"; |
= (not defined $r->{charset} or $r->{charset} ne $input_charset); |
| 206 |
} elsif ($nt == $child->[0]->CDATA_SECTION_NODE) { |
$r->{charset} = $input_charset; |
| 207 |
$r .= '| ' . $child->[1] . '<![CDATA[' . $child->[0]->data . "]]>\x0A"; |
} |
| 208 |
} elsif ($nt == $child->[0]->COMMENT_NODE) { |
|
| 209 |
$r .= '| ' . $child->[1] . '<!-- ' . $child->[0]->data . " -->\x0A"; |
## TODO: Support for HTTP Content-Encoding |
| 210 |
} elsif ($nt == $child->[0]->DOCUMENT_TYPE_NODE) { |
|
| 211 |
$r .= '| ' . $child->[1] . '<!DOCTYPE ' . $child->[0]->name . ">\x0A"; |
$r->{s} = ''.$res->content; |
| 212 |
} elsif ($nt == $child->[0]->PROCESSING_INSTRUCTION_NODE) { |
|
| 213 |
$r .= '| ' . $child->[1] . '<?' . $child->[0]->target . ' ' . |
require Whatpm::ContentType; |
| 214 |
$child->[0]->data . "?>\x0A"; |
($r->{official_type}, $r->{media_type}) |
| 215 |
|
= Whatpm::ContentType->get_sniffed_type |
| 216 |
|
(get_file_head => sub { |
| 217 |
|
return substr $r->{s}, 0, shift; |
| 218 |
|
}, |
| 219 |
|
http_content_type_byte => $ct, |
| 220 |
|
has_http_content_encoding => |
| 221 |
|
defined $res->header ('Content-Encoding'), |
| 222 |
|
supported_image_types => {}); |
| 223 |
} else { |
} else { |
| 224 |
$r .= '| ' . $child->[1] . $child->[0]->node_type . "\x0A"; # error |
$r->{uri} = $res->request->uri; |
| 225 |
|
$r->{request_uri} = $request_uri; |
| 226 |
|
$r->{error_status_text} = $res->status_line; |
| 227 |
} |
} |
| 228 |
|
|
| 229 |
|
$r->{header_field} = []; |
| 230 |
|
$res->scan (sub { |
| 231 |
|
push @{$r->{header_field}}, [$_[0], $_[1]]; |
| 232 |
|
}); |
| 233 |
|
$r->{header_status_code} = $res->code; |
| 234 |
|
$r->{header_status_text} = $res->message; |
| 235 |
|
} else { |
| 236 |
|
$r->{s} = ''.$http->get_parameter ('s'); |
| 237 |
|
$r->{uri} = q<thismessage:/>; |
| 238 |
|
$r->{request_uri} = q<thismessage:/>; |
| 239 |
|
$r->{base_uri} = q<thismessage:/>; |
| 240 |
|
$r->{charset} = ''.$http->get_parameter ('_charset_'); |
| 241 |
|
$r->{charset} =~ s/\s+//g; |
| 242 |
|
$r->{charset} = 'utf-8' if $r->{charset} eq ''; |
| 243 |
|
$r->{official_charset} = $r->{charset}; |
| 244 |
|
$r->{header_field} = []; |
| 245 |
|
|
| 246 |
|
require Whatpm::ContentType; |
| 247 |
|
($r->{official_type}, $r->{media_type}) |
| 248 |
|
= Whatpm::ContentType->get_sniffed_type |
| 249 |
|
(get_file_head => sub { |
| 250 |
|
return substr $r->{s}, 0, shift; |
| 251 |
|
}, |
| 252 |
|
http_content_type_byte => undef, |
| 253 |
|
has_http_content_encoding => 0, |
| 254 |
|
supported_image_types => {}); |
| 255 |
} |
} |
|
|
|
|
return \$r; |
|
|
} # test_serialize |
|
| 256 |
|
|
| 257 |
sub get_node_path ($) { |
my $input_format = $http->get_parameter ('i'); |
| 258 |
my $node = shift; |
if (defined $input_format and length $input_format) { |
| 259 |
my @r; |
$r->{media_type_overridden} |
| 260 |
while (defined $node) { |
= (not defined $r->{media_type} or $input_format ne $r->{media_type}); |
| 261 |
my $rs; |
$r->{media_type} = $input_format; |
| 262 |
if ($node->node_type == 1) { |
} |
| 263 |
$rs = $node->manakai_local_name; |
if (defined $r->{s} and not defined $r->{media_type}) { |
| 264 |
$node = $node->parent_node; |
$r->{media_type} = 'text/html'; |
| 265 |
} elsif ($node->node_type == 2) { |
$r->{media_type_overridden} = 1; |
| 266 |
$rs = '@' . $node->manakai_local_name; |
} |
| 267 |
$node = $node->owner_element; |
|
| 268 |
} elsif ($node->node_type == 3) { |
if ($r->{media_type} eq 'text/xml') { |
| 269 |
$rs = '"' . $node->data . '"'; |
unless (defined $r->{charset}) { |
| 270 |
$node = $node->parent_node; |
$r->{charset} = 'us-ascii'; |
| 271 |
} elsif ($node->node_type == 9) { |
$r->{official_charset} = $r->{charset}; |
| 272 |
$rs = ''; |
} elsif ($r->{charset_overridden} and $r->{charset} eq 'us-ascii') { |
| 273 |
$node = $node->parent_node; |
$r->{charset_overridden} = 0; |
|
} else { |
|
|
$rs = '#' . $node->node_type; |
|
|
$node = $node->parent_node; |
|
| 274 |
} |
} |
|
unshift @r, $rs; |
|
| 275 |
} |
} |
| 276 |
return join '/', @r; |
|
| 277 |
} # get_node_path |
if (length $r->{s} > 1000_000) { |
| 278 |
|
$r->{error_status_text} = 'Entity-body too large'; |
| 279 |
|
delete $r->{s}; |
| 280 |
|
return $r; |
| 281 |
|
} |
| 282 |
|
|
| 283 |
|
$r->{inner_html_element} = $http->get_parameter ('e'); |
| 284 |
|
|
| 285 |
|
return $r; |
| 286 |
|
} # get_input_document |
| 287 |
|
|
| 288 |
|
package WDCC::LWPUA; |
| 289 |
|
BEGIN { push our @ISA, 'LWP::UserAgent'; } |
| 290 |
|
|
| 291 |
|
sub redirect_ok { |
| 292 |
|
my $ua = shift; |
| 293 |
|
unless ($ua->SUPER::redirect_ok (@_)) { |
| 294 |
|
return 0; |
| 295 |
|
} |
| 296 |
|
|
| 297 |
|
my $uris = $_[1]->header ('Location'); |
| 298 |
|
return 0 unless $uris; |
| 299 |
|
my $uri = $ua->{wdcc_dom}->create_uri_reference ($uris); |
| 300 |
|
unless ({ |
| 301 |
|
http => 1, |
| 302 |
|
}->{lc $uri->uri_scheme}) { |
| 303 |
|
return 0; |
| 304 |
|
} |
| 305 |
|
unless ($ua->{wdcc_host_permit}->check ($uri->uri_host, $uri->uri_port || 80)) { |
| 306 |
|
return 0; |
| 307 |
|
} |
| 308 |
|
return 1; |
| 309 |
|
} # redirect_ok |
| 310 |
|
|
| 311 |
=head1 AUTHOR |
=head1 AUTHOR |
| 312 |
|
|
| 314 |
|
|
| 315 |
=head1 LICENSE |
=head1 LICENSE |
| 316 |
|
|
| 317 |
Copyright 2007 Wakaba <w@suika.fam.cx> |
Copyright 2007-2008 Wakaba <w@suika.fam.cx> |
| 318 |
|
|
| 319 |
This library is free software; you can redistribute it |
This library is free software; you can redistribute it |
| 320 |
and/or modify it under the same terms as Perl itself. |
and/or modify it under the same terms as Perl itself. |