add paths to plugin files in copyright
[ikiwiki.git] / IkiWiki / Plugin / amazon_s3.pm
blobcfd8cd3477f81dd0758564994f35979419e330a0
1 #!/usr/bin/perl
2 package IkiWiki::Plugin::amazon_s3;
4 use warnings;
5 no warnings 'redefine';
6 use strict;
7 use IkiWiki 3.00;
8 use IkiWiki::Render;
9 use Net::Amazon::S3;
11 # Store references to real subs before overriding them.
12 our %subs;
13 BEGIN {
14 foreach my $sub (qw{IkiWiki::writefile IkiWiki::prune}) {
15 $subs{$sub}=\&$sub;
19 sub import {
20 hook(type => "getopt", id => "amazon_s3", call => \&getopt);
21 hook(type => "getsetup", id => "amazon_s3", call => \&getsetup);
22 hook(type => "checkconfig", id => "amazon_s3", call => \&checkconfig);
25 sub getopt () {
26 eval q{use Getopt::Long};
27 error($@) if $@;
28 Getopt::Long::Configure('pass_through');
29 GetOptions("delete-bucket" => sub {
30 my $bucket=getbucket();
31 debug(gettext("deleting bucket.."));
32 my $resp = $bucket->list_all or die $bucket->err . ": " . $bucket->errstr;
33 foreach my $key (@{$resp->{keys}}) {
34 debug("\t".$key->{key});
35 $bucket->delete_key($key->{key}) or die $bucket->err . ": " . $bucket->errstr;
37 $bucket->delete_bucket or die $bucket->err . ": " . $bucket->errstr;
38 debug(gettext("done"));
39 exit(0);
40 });
43 sub getsetup () {
44 return
45 plugin => {
46 safe => 0,
47 rebuild => 0,
49 amazon_s3_key_id => {
50 type => "string",
51 example => "XXXXXXXXXXXXXXXXXXXX",
52 description => "public access key id",
53 safe => 1,
54 rebuild => 0,
56 amazon_s3_key_id => {
57 type => "string",
58 example => "$ENV{HOME}/.s3_key",
59 description => "file holding secret key (must not be readable by others!)",
60 safe => 0, # ikiwiki reads this file
61 rebuild => 0,
63 amazon_s3_bucket => {
64 type => "string",
65 example => "mywiki",
66 description => "globally unique name of bucket to store wiki in",
67 safe => 1,
68 rebuild => 1,
70 amazon_s3_prefix => {
71 type => "string",
72 example => "wiki/",
73 description => "a prefix to prepend to each page name",
74 safe => 1,
75 rebuild => 1,
77 amazon_s3_location => {
78 type => "string",
79 example => "EU",
80 description => "which S3 datacenter to use (leave blank for default)",
81 safe => 1,
82 rebuild => 1,
84 amazon_s3_dupindex => {
85 type => "boolean",
86 example => 0,
87 description => "store each index file twice? (allows urls ending in \"/index.html\" and \"/\")",
88 safe => 1,
89 rebuild => 1,
93 sub checkconfig {
94 foreach my $field (qw{amazon_s3_key_id amazon_s3_key_file
95 amazon_s3_bucket}) {
96 if (! exists $config{$field} || ! defined $config{$field}) {
97 error(sprintf(gettext("Must specify %s"), $field));
100 if (! exists $config{amazon_s3_prefix} ||
101 ! defined $config{amazon_s3_prefix}) {
102 $config{amazon_s3_prefix}="wiki/";
107 my $bucket;
108 sub getbucket {
109 return $bucket if defined $bucket;
111 open(IN, "<", $config{amazon_s3_key_file}) || error($config{amazon_s3_key_file}.": ".$!);
112 my $key=<IN>;
113 chomp $key;
114 close IN;
116 my $s3=Net::Amazon::S3->new({
117 aws_access_key_id => $config{amazon_s3_key_id},
118 aws_secret_access_key => $key,
119 retry => 1,
122 # make sure the bucket exists
123 if (exists $config{amazon_s3_location}) {
124 $bucket=$s3->add_bucket({
125 bucket => $config{amazon_s3_bucket},
126 location_constraint => $config{amazon_s3_location},
129 else {
130 $bucket=$s3->add_bucket({
131 bucket => $config{amazon_s3_bucket},
135 if (! $bucket) {
136 # Try to use existing bucket.
137 $bucket=$s3->bucket($config{amazon_s3_bucket});
139 if (! $bucket) {
140 error(gettext("Failed to create S3 bucket: ").
141 $s3->err.": ".$s3->errstr."\n");
144 return $bucket;
148 # Given a file, return any S3 keys associated with it.
149 sub file2keys ($) {
150 my $file=shift;
152 my @keys;
153 if ($file =~ /^\Q$config{destdir}\/\E(.*)/) {
154 push @keys, $config{amazon_s3_prefix}.$1;
156 # Munge foo/index.html to foo/
157 if ($keys[0]=~/(^|.*\/)index.$config{htmlext}$/) {
158 # A duplicate might need to be stored under the
159 # unmunged name too.
160 if (!$config{usedirs} || $config{amazon_s3_dupindex}) {
161 push @keys, $1;
163 else {
164 @keys=($1);
168 return @keys;
171 package IkiWiki;
172 use File::MimeInfo;
173 use Encode;
175 # This is a wrapper around the real writefile.
176 sub writefile ($$$;$$) {
177 my $file=shift;
178 my $destdir=shift;
179 my $content=shift;
180 my $binary=shift;
181 my $writer=shift;
183 # First, write the file to disk.
184 my $ret=$IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::writefile'}->($file, $destdir, $content, $binary, $writer);
186 my @keys=IkiWiki::Plugin::amazon_s3::file2keys("$destdir/$file");
188 # Store the data in S3.
189 if (@keys) {
190 my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
192 # The http layer tries to downgrade utf-8
193 # content, but that can fail (see
194 # http://rt.cpan.org/Ticket/Display.html?id=35710),
195 # so force convert it to bytes.
196 $content=encode_utf8($content) if defined $content;
198 my %opts=(
199 acl_short => 'public-read',
200 content_type => mimetype("$destdir/$file"),
203 # If there are multiple keys to write, data is sent
204 # multiple times.
205 # TODO: investigate using the new copy operation.
206 # (It may not be robust enough.)
207 foreach my $key (@keys) {
208 my $res;
209 if (! $writer) {
210 $res=$bucket->add_key($key, $content, \%opts);
212 else {
213 # This test for empty files is a workaround
214 # for this bug:
215 # http://rt.cpan.org//Ticket/Display.html?id=35731
216 if (-z "$destdir/$file") {
217 $res=$bucket->add_key($key, "", \%opts);
219 else {
220 # read back in the file that the writer emitted
221 $res=$bucket->add_key_filename($key, "$destdir/$file", \%opts);
224 if (! $res) {
225 error(gettext("Failed to save file to S3: ").
226 $bucket->err.": ".$bucket->errstr."\n");
231 return $ret;
234 # This is a wrapper around the real prune.
235 sub prune ($) {
236 my $file=shift;
238 my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file);
240 # Prune files out of S3 too.
241 if (@keys) {
242 my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
244 foreach my $key (@keys) {
245 my $res=$bucket->delete_key($key);
246 if (! $res) {
247 error(gettext("Failed to delete file from S3: ").
248 $bucket->err.": ".$bucket->errstr."\n");
253 return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file);