| 1 |
wakaba |
1.1 |
use strict; |
| 2 |
|
|
|
| 3 |
wakaba |
1.7 |
my $data_dir_name = q[data/]; |
| 4 |
wakaba |
1.8 |
my $data2_dir_name = q[data2/]; |
| 5 |
|
|
my $data2_suffix = q[.dat]; |
| 6 |
|
|
my $lock_suffix = q[.lock]; |
| 7 |
wakaba |
1.13 |
|
| 8 |
|
|
## SEE ALSO: |Makefile|. |
| 9 |
wakaba |
1.8 |
my $fallback_file_name = $data2_dir_name . 'fallback' . $data2_suffix; |
| 10 |
wakaba |
1.12 |
|
| 11 |
|
|
## SEE ALSO: |Makefile|. |
| 12 |
wakaba |
1.11 |
my $patch_file_name = $data2_dir_name . 'modified.txt'; |
| 13 |
wakaba |
1.8 |
|
| 14 |
|
|
our $UseCVS //= 1; |
| 15 |
wakaba |
1.1 |
|
| 16 |
|
|
sub normalize ($) { |
| 17 |
|
|
my $s = shift; |
| 18 |
|
|
$s =~ s/\s+/ /g; |
| 19 |
|
|
$s =~ s/^ //; |
| 20 |
|
|
$s =~ s/ $//g; |
| 21 |
|
|
return $s; |
| 22 |
|
|
} # normalize |
| 23 |
|
|
|
| 24 |
wakaba |
1.8 |
sub get_hash ($) { |
| 25 |
|
|
require Digest::MD5; |
| 26 |
wakaba |
1.14 |
require Encode; |
| 27 |
|
|
return Digest::MD5::md5_hex (Encode::encode ('utf8', normalize ($_[0]))); |
| 28 |
wakaba |
1.8 |
} # get_hash |
| 29 |
|
|
|
| 30 |
wakaba |
1.1 |
sub create_pattern1 ($) { |
| 31 |
|
|
my $s = quotemeta shift; |
| 32 |
|
|
$s =~ s/\\\*/(.+)/g; |
| 33 |
|
|
return $s; |
| 34 |
|
|
} # create_pattern1 |
| 35 |
|
|
|
| 36 |
|
|
sub replace_pattern2 ($@) { |
| 37 |
|
|
my $s = shift; |
| 38 |
|
|
my @arg = @_; |
| 39 |
|
|
$s =~ s/\$(\d+)/$arg[$1 - 1]/g; |
| 40 |
|
|
return $s; |
| 41 |
|
|
} # replace_pattern2 |
| 42 |
|
|
|
| 43 |
wakaba |
1.8 |
sub read_data_file ($) { |
| 44 |
|
|
my $file_name = shift; |
| 45 |
|
|
if (-f $file_name) { |
| 46 |
|
|
warn "Loading $file_name...\n"; |
| 47 |
|
|
return do $file_name; |
| 48 |
|
|
} else { |
| 49 |
|
|
warn "File $file_name not found\n"; |
| 50 |
|
|
return {}; |
| 51 |
|
|
} |
| 52 |
|
|
} # read_data_file |
| 53 |
|
|
|
| 54 |
|
|
sub write_data_file ($$) { |
| 55 |
|
|
my ($file_name, $data) = @_; |
| 56 |
|
|
|
| 57 |
|
|
require Data::Dumper; |
| 58 |
|
|
local $Data::Dumper::Sortkeys = 1; |
| 59 |
|
|
|
| 60 |
|
|
my $had_file = -f $file_name; |
| 61 |
|
|
open my $file, '>:encoding(utf8)', $file_name or die "$0: $file_name: $!"; |
| 62 |
|
|
print $file Data::Dumper::Dumper ($data); |
| 63 |
|
|
close $file; |
| 64 |
|
|
unless ($had_file) { |
| 65 |
apache |
1.9 |
system_ ('cvs', 'add', $file_name) if $UseCVS; |
| 66 |
wakaba |
1.8 |
} |
| 67 |
|
|
} # write_data_file |
| 68 |
|
|
|
| 69 |
|
|
sub hash_to_file_name ($) { |
| 70 |
|
|
return $data2_dir_name . substr ($_[0], 0, 2) . $data2_suffix; |
| 71 |
|
|
} # hash_to_file_name |
| 72 |
|
|
|
| 73 |
|
|
my $Entry = {}; |
| 74 |
wakaba |
1.11 |
my $ModifiedHash = {}; |
| 75 |
wakaba |
1.8 |
|
| 76 |
|
|
sub get_entry ($) { |
| 77 |
|
|
my $hash = shift; |
| 78 |
|
|
|
| 79 |
|
|
my $file_name = hash_to_file_name ($hash); |
| 80 |
|
|
unless ($Entry->{$file_name}) { |
| 81 |
|
|
$Entry->{$file_name} = read_data_file ($file_name); |
| 82 |
|
|
} |
| 83 |
|
|
|
| 84 |
|
|
if ($Entry->{$file_name}->{exact}->{$hash}) { |
| 85 |
|
|
return (0, $Entry->{$file_name}->{exact}->{$hash}); |
| 86 |
|
|
} elsif ($Entry->{$file_name}->{pattern}->{$hash}) { |
| 87 |
|
|
return (1, $Entry->{$file_name}->{pattern}->{$hash}); |
| 88 |
|
|
} else { |
| 89 |
|
|
return (undef, undef); |
| 90 |
|
|
} |
| 91 |
|
|
} # get_entry |
| 92 |
|
|
|
| 93 |
|
|
sub set_entry ($$$) { |
| 94 |
|
|
my ($hash, $is_pattern, $value) = @_; |
| 95 |
|
|
|
| 96 |
|
|
my $file_name = hash_to_file_name ($hash); |
| 97 |
|
|
unless ($Entry->{$file_name}) { |
| 98 |
|
|
$Entry->{$file_name} = read_data_file ($file_name); |
| 99 |
|
|
} |
| 100 |
|
|
|
| 101 |
|
|
unless ($value) { |
| 102 |
|
|
delete $Entry->{$file_name}->{exact}->{$hash}; |
| 103 |
|
|
delete $Entry->{$file_name}->{pattern}->{$hash}; |
| 104 |
|
|
} elsif ($is_pattern) { |
| 105 |
|
|
delete $Entry->{$file_name}->{exact}->{$hash}; |
| 106 |
|
|
$Entry->{$file_name}->{pattern}->{$hash} = $value; |
| 107 |
|
|
} else { |
| 108 |
|
|
$Entry->{$file_name}->{exact}->{$hash} = $value; |
| 109 |
|
|
delete $Entry->{$file_name}->{pattern}->{$hash}; |
| 110 |
|
|
} |
| 111 |
|
|
$Entry->{$file_name}->{modified} = 1; |
| 112 |
wakaba |
1.11 |
$ModifiedHash->{$hash} = 1; |
| 113 |
wakaba |
1.8 |
} # set_entry |
| 114 |
|
|
|
| 115 |
|
|
use Fcntl ':flock'; |
| 116 |
|
|
my $Lock; |
| 117 |
|
|
|
| 118 |
|
|
sub lock_entry ($) { |
| 119 |
|
|
if ($Lock) { |
| 120 |
|
|
die "$0: lock_entry: Another entry is locked"; |
| 121 |
|
|
} |
| 122 |
|
|
|
| 123 |
|
|
my $hash = shift; |
| 124 |
|
|
my $file_name = hash_to_file_name ($hash) . $lock_suffix; |
| 125 |
|
|
open $Lock, '>', $file_name or die "$0: $file_name: $!"; |
| 126 |
|
|
flock $Lock, LOCK_EX; |
| 127 |
|
|
} # lock_entry |
| 128 |
|
|
|
| 129 |
|
|
sub commit_entries ($) { |
| 130 |
|
|
for my $file_name (keys %{$Entry}) { |
| 131 |
|
|
if ($Entry->{$file_name}->{modified}) { |
| 132 |
wakaba |
1.11 |
delete $Entry->{$file_name}->{modified}; |
| 133 |
wakaba |
1.8 |
write_data_file ($file_name => $Entry->{$file_name}); |
| 134 |
|
|
} |
| 135 |
|
|
} |
| 136 |
|
|
|
| 137 |
wakaba |
1.11 |
open my $file, '>>', $patch_file_name or die "$0: $patch_file_name: $!"; |
| 138 |
|
|
for (keys %$ModifiedHash) { |
| 139 |
|
|
print $file "$_\n"; |
| 140 |
|
|
} |
| 141 |
|
|
close $file; |
| 142 |
|
|
|
| 143 |
wakaba |
1.8 |
my $msg = shift // $0; |
| 144 |
wakaba |
1.10 |
system_ ('cvs', 'commit', -m => $msg, $data2_dir_name) if $UseCVS; |
| 145 |
wakaba |
1.8 |
} # commit_entries |
| 146 |
|
|
|
| 147 |
|
|
sub get_all_entries () { |
| 148 |
|
|
opendir my $dir, $data2_dir_name or die "$0: $data2_dir_name: $!"; |
| 149 |
|
|
for (readdir $dir) { |
| 150 |
|
|
next unless /\Q$data2_suffix\E$/; |
| 151 |
|
|
my $file_name = $data2_dir_name . $_; |
| 152 |
|
|
next if $Entry->{$file_name}; |
| 153 |
|
|
|
| 154 |
|
|
$Entry->{$file_name} = read_data_file ($file_name); |
| 155 |
|
|
} |
| 156 |
|
|
|
| 157 |
|
|
return $Entry; |
| 158 |
|
|
} # get_all_entries |
| 159 |
|
|
|
| 160 |
wakaba |
1.14 |
sub for_each_entry_set ($;$) { |
| 161 |
|
|
my $code = shift; |
| 162 |
|
|
my $on_the_fly = shift; |
| 163 |
|
|
|
| 164 |
|
|
opendir my $dir, $data2_dir_name or die "$0: $data2_dir_name: $!"; |
| 165 |
|
|
for (readdir $dir) { |
| 166 |
|
|
next unless /\Q$data2_suffix\E$/; |
| 167 |
|
|
my $file_name = $data2_dir_name . $_; |
| 168 |
|
|
next if $file_name eq $fallback_file_name; |
| 169 |
|
|
|
| 170 |
|
|
if ($Entry->{$file_name}) { |
| 171 |
|
|
$code->($file_name, $Entry->{$file_name}); |
| 172 |
|
|
} elsif ($on_the_fly) { |
| 173 |
|
|
$code->($file_name, read_data_file ($file_name)); |
| 174 |
|
|
} else { |
| 175 |
|
|
$Entry->{$file_name} = read_data_file ($file_name); |
| 176 |
|
|
$code->($file_name, $Entry->{$file_name}); |
| 177 |
|
|
} |
| 178 |
|
|
} |
| 179 |
|
|
} # for_each_entry_set |
| 180 |
|
|
|
| 181 |
wakaba |
1.8 |
my $FallbackEntry; |
| 182 |
|
|
sub get_fallback_entry ($) { |
| 183 |
|
|
my $hash = shift; |
| 184 |
|
|
unless (defined $FallbackEntry) { |
| 185 |
|
|
$FallbackEntry = read_data_file ($fallback_file_name); |
| 186 |
|
|
} |
| 187 |
|
|
return $FallbackEntry->{$hash} // {}; |
| 188 |
|
|
} # get_fallback_entry |
| 189 |
|
|
|
| 190 |
wakaba |
1.11 |
sub get_entry_or_fallback_entry ($) { |
| 191 |
|
|
my $hash = shift; |
| 192 |
|
|
|
| 193 |
|
|
my ($is_pattern, $entry) = get_entry ($hash); |
| 194 |
|
|
unless (defined $entry->{en}) { |
| 195 |
|
|
$entry = get_fallback_entry ($hash); |
| 196 |
|
|
} |
| 197 |
wakaba |
1.13 |
$entry->{tags} ||= []; |
| 198 |
wakaba |
1.11 |
$entry->{isPattern} = 1 if $is_pattern; |
| 199 |
|
|
|
| 200 |
|
|
return $entry; |
| 201 |
|
|
} # get_entry_or_fallback_entry |
| 202 |
|
|
|
| 203 |
wakaba |
1.8 |
sub set_fallback_entry ($$) { |
| 204 |
|
|
my ($hash, $value) = @_; |
| 205 |
|
|
unless (defined $FallbackEntry) { |
| 206 |
|
|
$FallbackEntry = read_data_file ($fallback_file_name); |
| 207 |
|
|
} |
| 208 |
|
|
$FallbackEntry->{$hash} = $value; |
| 209 |
|
|
} # set_fallback_entry |
| 210 |
wakaba |
1.14 |
|
| 211 |
|
|
sub get_fallback_entries () { |
| 212 |
|
|
unless (defined $FallbackEntry) { |
| 213 |
|
|
$FallbackEntry = read_data_file ($fallback_file_name); |
| 214 |
|
|
} |
| 215 |
|
|
|
| 216 |
|
|
return $FallbackEntry; |
| 217 |
|
|
} # get_fallback_entries |
| 218 |
wakaba |
1.8 |
|
| 219 |
|
|
sub save_fallback_entries () { |
| 220 |
|
|
write_data_file ($fallback_file_name => $FallbackEntry) |
| 221 |
|
|
if defined $FallbackEntry; |
| 222 |
|
|
} # save_fallback_entries |
| 223 |
wakaba |
1.11 |
|
| 224 |
|
|
sub get_modified_hashes () { |
| 225 |
|
|
open my $file, '<', $patch_file_name or die "$0: $patch_file_name: $!"; |
| 226 |
|
|
return map {tr/\x0D\x0A//d; $_} <$file>; |
| 227 |
|
|
} # get_modified_hashes |
| 228 |
wakaba |
1.8 |
|
| 229 |
|
|
sub htescape ($) { |
| 230 |
|
|
my $s = shift; |
| 231 |
|
|
$s =~ s/&/&/g; |
| 232 |
|
|
$s =~ s/</</g; |
| 233 |
|
|
$s =~ s/"/"/g; |
| 234 |
|
|
return $s; |
| 235 |
|
|
} # htescape |
| 236 |
apache |
1.9 |
|
| 237 |
|
|
sub system_ (@) { |
| 238 |
wakaba |
1.10 |
(system join (' ', map {quotemeta $_} @_) . " > /dev/null") == 0 |
| 239 |
|
|
or die "$0: $?"; |
| 240 |
apache |
1.9 |
} # system_ |
| 241 |
wakaba |
1.1 |
|
| 242 |
|
|
1; |
| 243 |
|
|
|
| 244 |
wakaba |
1.16 |
## Author: Wakaba <w@suika.fam.cx>. |
| 245 |
|
|
## License: Copyright 2008 Wakaba. You are granted a license to use, |
| 246 |
|
|
## reproduce and create derivative works of this script. |
| 247 |
|
|
## $Date: 2008/10/27 04:52:39 $ |