diff --git a/akamai-tools/purge-one-hour b/akamai-tools/purge-one-hour deleted file mode 100755 index c1dc3b2..0000000 --- a/akamai-tools/purge-one-hour +++ /dev/null @@ -1,109 +0,0 @@ -#! /usr/bin/perl -# -# script to purge the Akamai cache. -# -# Notes: -# -# - we limit the purging to files newer than an hour -# - there must be a file ~openssl/.edgerc with our Akamai credentials -# - the Akamai supplied program 'akamai-purge' must be installed in -# /usr/local/bin - -use strict; -use warnings; - -use Cwd qw(:DEFAULT abs_path); -use File::Basename; - -# Find all .html files that include a .inc file, and create a map -my %inc2html = (); - -my $debug = $ENV{DEBUG}; -my $dryrun = $ENV{DRYRUN}; - -if (scalar @ARGV != 2 || $ARGV[0] eq '-h') { - my $prog = basename($0); - print STDERR <<_____; -usage: $prog BASEDIR BASEURL - -where BASEDIR is the base directory where original web files are scanned for, -and BASEURL is the corresponding base URL. -_____ - exit $ARGV[0] eq '-h' ? 0 : 1; -} - -my $basedir = shift @ARGV; # For example, /var/www/openssl -my $baseurl = shift @ARGV; # For example, https://www.openssl.org - -# This ensures that we have an absolute base directory, and that it's -# normalised (i.e. doesn't have duplicated or ending slashes) -my $tmp = abs_path($basedir) or die "$basedir: $!\n"; -$basedir = $tmp; - -foreach ( `find $basedir -type f -name '*.html'` ) { - chomp; - my $file = $_; - my ($dn, $fn) = $_ =~ m/^(?:(.*)\/)?([^\/]*)$/; - my @incs = (); - - open HTML, $_; - foreach ( ) { - if (//) { - my $vf = $1; - $vf = ($vf =~ m|^/|) ? "$basedir$vf" : "$dn/$vf"; - push @incs, "$vf"; - } - } - close HTML; - - foreach ( @incs ) { - push @{$inc2html{$_}}, $file; - } -} - -if ($debug) { - for ( sort keys %inc2html ) { - print STDERR "DEBUG: $_ => ", join(", ", @{$inc2html{$_}}), "\n"; - } -} - -# Find all files younger than an hour -# Discard those in .git/ and bin/ -# Discard any .ht* -# For any virtually included file, use the corresponding .html file instead -# For all remaining files, turn it into a valid URL -# For any valid index file, duplicate into two URLs without the file, -# one with an ending slash and one without. -my %files = (); - -foreach ( `find $basedir -type f -mtime -2` ) { - chomp; - next if /^\Q$basedir\E\/(\.git|bin)/; - next if /\/\.ht\w+$/; - my $x = $_; - my @files = defined $inc2html{$x} ? @{$inc2html{$x}} : ( $x ); - foreach ( @files ) { - s/^\Q$basedir\E\//$baseurl\//; - $files{$_} = 1; - if ( /^(.*)\/index.(html|cgi|pl|php|xhtml|htm)$/ ) { - $files{"$1/"} = $files{"$1"} = 1; - } - } -} - -# Finally, output the result to the akamai-purge program -my @files = sort keys %files; -while ( @files ) { - my $count = 500; # Try not to overwhelm Akamai - if ( $dryrun || open PURGE, '| /usr/local/bin/akamai-purge invalidate' ) { - printf STDERR - "DEBUG: Invoking '/usr/local/bin/akamai-purge invalidate' with:\n" - if $debug; - while ( @files && $count-- > 0 ) { - my $file = pop @files; - print STDERR " ",$file,"\n" if $debug; - print PURGE $file,"\n" unless $dryrun; - } - close PURGE unless $dryrun; - } -}