| 1 |
use strict; |
use strict; |
| 2 |
|
|
|
my $data_suffix = q[.dat]; |
|
| 3 |
my $data_dir_name = q[data/]; |
my $data_dir_name = q[data/]; |
| 4 |
my $data2_dir_name = q[data2/]; |
my $data2_dir_name = q[data2/]; |
| 5 |
my $data2_suffix = q[.dat]; |
my $data2_suffix = q[.dat]; |
| 6 |
my $lock_suffix = q[.lock]; |
my $lock_suffix = q[.lock]; |
| 7 |
|
|
| 8 |
|
## SEE ALSO: |Makefile|. |
| 9 |
my $fallback_file_name = $data2_dir_name . 'fallback' . $data2_suffix; |
my $fallback_file_name = $data2_dir_name . 'fallback' . $data2_suffix; |
| 10 |
|
|
| 11 |
## SEE ALSO: |Makefile|. |
## SEE ALSO: |Makefile|. |
| 23 |
|
|
| 24 |
sub get_hash ($) { |
sub get_hash ($) { |
| 25 |
require Digest::MD5; |
require Digest::MD5; |
| 26 |
return Digest::MD5::md5_hex (normalize ($_[0])); |
require Encode; |
| 27 |
|
return Digest::MD5::md5_hex (Encode::encode ('utf8', normalize ($_[0]))); |
| 28 |
} # get_hash |
} # get_hash |
| 29 |
|
|
| 30 |
sub create_pattern1 ($) { |
sub create_pattern1 ($) { |
| 40 |
return $s; |
return $s; |
| 41 |
} # replace_pattern2 |
} # replace_pattern2 |
| 42 |
|
|
|
sub load_data_file ($$$) { |
|
|
my ($data_file_name, $exact_data, $pattern_data) = @_; |
|
|
|
|
|
open my $data_file, '<:utf8', $data_file_name |
|
|
or die "$0: $data_file_name: $!"; |
|
|
local $/ = undef; |
|
|
my $data = <$data_file>; |
|
|
$data =~ s/\x0D?\x0A/\n/g; |
|
|
for (split /\n\n+(?=#)/, $data) { |
|
|
my ($en, $ja) = split /\n#ja\n/, $_; |
|
|
if ($en =~ s/^#en\n//) { |
|
|
$exact_data->{normalize ($en)} = $ja; |
|
|
} elsif ($en =~ s/^#pattern\n//) { |
|
|
$pattern_data->{create_pattern1 (normalize ($en))} = $ja; |
|
|
} |
|
|
} |
|
|
} # load_data_file |
|
|
|
|
|
sub for_each_data_file ($) { |
|
|
my ($code) = @_; |
|
|
|
|
|
opendir my $data_dir, $data_dir_name or die "$0: $data_dir_name: $!"; |
|
|
for (sort {$a cmp $b} readdir $data_dir) { |
|
|
next if /^\./; # hidden files |
|
|
next if /^_/; # editable but not-used files |
|
|
my $data_file_name = qq[$data_dir_name$_]; |
|
|
next unless $data_file_name =~ /\Q$data_suffix\E$/; |
|
|
$code->($data_file_name, $_); |
|
|
} |
|
|
} # for_each_data_file |
|
|
|
|
| 43 |
sub read_data_file ($) { |
sub read_data_file ($) { |
| 44 |
my $file_name = shift; |
my $file_name = shift; |
| 45 |
if (-f $file_name) { |
if (-f $file_name) { |
| 56 |
|
|
| 57 |
require Data::Dumper; |
require Data::Dumper; |
| 58 |
local $Data::Dumper::Sortkeys = 1; |
local $Data::Dumper::Sortkeys = 1; |
| 59 |
|
local $Data::Dumper::Useqq = 1; |
| 60 |
|
local *Data::Dumper::qquote = sub { |
| 61 |
|
my $s = shift; |
| 62 |
|
$s =~ s/([\x27\x5C])/sprintf '\x%02X', ord $1/ge; |
| 63 |
|
return q<qq'> . $s . q<'>; |
| 64 |
|
}; # Data::Dumper::qquote |
| 65 |
|
|
| 66 |
my $had_file = -f $file_name; |
my $had_file = -f $file_name; |
| 67 |
open my $file, '>:encoding(utf8)', $file_name or die "$0: $file_name: $!"; |
open my $file, '>:encoding(utf8)', $file_name or die "$0: $file_name: $!"; |
| 68 |
|
print $file "use utf8;\n"; |
| 69 |
print $file Data::Dumper::Dumper ($data); |
print $file Data::Dumper::Dumper ($data); |
| 70 |
close $file; |
close $file; |
| 71 |
unless ($had_file) { |
unless ($had_file) { |
| 164 |
return $Entry; |
return $Entry; |
| 165 |
} # get_all_entries |
} # get_all_entries |
| 166 |
|
|
| 167 |
|
sub for_each_entry_set ($;$) { |
| 168 |
|
my $code = shift; |
| 169 |
|
my $on_the_fly = shift; |
| 170 |
|
|
| 171 |
|
opendir my $dir, $data2_dir_name or die "$0: $data2_dir_name: $!"; |
| 172 |
|
for (readdir $dir) { |
| 173 |
|
next unless /\Q$data2_suffix\E$/; |
| 174 |
|
my $file_name = $data2_dir_name . $_; |
| 175 |
|
next if $file_name eq $fallback_file_name; |
| 176 |
|
|
| 177 |
|
if ($Entry->{$file_name}) { |
| 178 |
|
$code->($file_name, $Entry->{$file_name}); |
| 179 |
|
} elsif ($on_the_fly) { |
| 180 |
|
$code->($file_name, read_data_file ($file_name)); |
| 181 |
|
} else { |
| 182 |
|
$Entry->{$file_name} = read_data_file ($file_name); |
| 183 |
|
$code->($file_name, $Entry->{$file_name}); |
| 184 |
|
} |
| 185 |
|
} |
| 186 |
|
} # for_each_entry_set |
| 187 |
|
|
| 188 |
my $FallbackEntry; |
my $FallbackEntry; |
| 189 |
sub get_fallback_entry ($) { |
sub get_fallback_entry ($) { |
| 190 |
my $hash = shift; |
my $hash = shift; |
| 201 |
unless (defined $entry->{en}) { |
unless (defined $entry->{en}) { |
| 202 |
$entry = get_fallback_entry ($hash); |
$entry = get_fallback_entry ($hash); |
| 203 |
} |
} |
| 204 |
$entry->{tags} ||= [] if defined $entry->{en}; |
$entry->{tags} ||= []; |
| 205 |
$entry->{isPattern} = 1 if $is_pattern; |
$entry->{isPattern} = 1 if $is_pattern; |
| 206 |
|
|
| 207 |
return $entry; |
return $entry; |
| 215 |
$FallbackEntry->{$hash} = $value; |
$FallbackEntry->{$hash} = $value; |
| 216 |
} # set_fallback_entry |
} # set_fallback_entry |
| 217 |
|
|
| 218 |
sub clear_fallback_entries () { |
sub get_fallback_entries () { |
| 219 |
$FallbackEntry = {}; |
unless (defined $FallbackEntry) { |
| 220 |
} # clear_fallback_entries |
$FallbackEntry = read_data_file ($fallback_file_name); |
| 221 |
|
} |
| 222 |
|
|
| 223 |
|
return $FallbackEntry; |
| 224 |
|
} # get_fallback_entries |
| 225 |
|
|
| 226 |
sub save_fallback_entries () { |
sub save_fallback_entries () { |
| 227 |
write_data_file ($fallback_file_name => $FallbackEntry) |
write_data_file ($fallback_file_name => $FallbackEntry) |
| 233 |
return map {tr/\x0D\x0A//d; $_} <$file>; |
return map {tr/\x0D\x0A//d; $_} <$file>; |
| 234 |
} # get_modified_hashes |
} # get_modified_hashes |
| 235 |
|
|
|
sub clear_modified_hashes () { |
|
|
open my $file, '>', $patch_file_name; |
|
|
close $file; |
|
|
} # clear_modified_hashes |
|
|
|
|
| 236 |
sub htescape ($) { |
sub htescape ($) { |
| 237 |
my $s = shift; |
my $s = shift; |
| 238 |
$s =~ s/&/&/g; |
$s =~ s/&/&/g; |
| 248 |
|
|
| 249 |
1; |
1; |
| 250 |
|
|
| 251 |
|
## Author: Wakaba <w@suika.fam.cx>. |
| 252 |
|
## License: Copyright 2008 Wakaba. You are granted a license to use, |
| 253 |
|
## reproduce and create derivative works of this script. |
| 254 |
|
## $Date$ |