diff -Nru apt-cacher-1.7.16/apt-cacher apt-cacher-1.7.20.1~18.04.sav0/apt-cacher
--- apt-cacher-1.7.16/apt-cacher 2017-11-08 09:14:29.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/apt-cacher 2019-03-24 17:11:53.000000000 +0000
@@ -37,6 +37,7 @@
use IO::Socket::INET;
use IO::Select;
use IO::Interface::Simple;
+use IO::Interactive ();
use HTTP::Request;
use HTTP::Response;
use HTTP::Date ();
@@ -125,7 +126,10 @@
EOM
}
- die "$0: Version $version\n" if $show_version;
+ if ($show_version) {
+ print STDERR "$0: Version $version\n";
+ exit;
+ }
# Sanity check
die "Chroot directory $chroot invalid: $!" if $chroot && !-d $chroot;
@@ -196,7 +200,6 @@
kill(-15, $$);
}
}
- exit(0);
}
sub reload_config {
@@ -334,7 +337,13 @@
last if ($ret = shift @seg);
}
- $uri->path_segments(@seg ? @seg : undef);
+ # undef here causes errors on perl 5.20:
+ #
+ # Use of uninitialized value $_ in substitution (s///) at /usr/share/perl5/URI/_generic.pm line 107.
+ # Use of uninitialized value $_ in substitution (s///) at /usr/share/perl5/URI/_generic.pm line 107.
+ # Use of uninitialized value $_ in substitution (s///) at /usr/share/perl5/URI/_generic.pm line 109.
+ # Use of uninitialized value $arg[0] in join or string at /usr/share/perl5/URI/_generic.pm line 111.
+ $uri->path_segments(@seg ? @seg : '' );
return $ret;
}
@@ -974,16 +983,26 @@
($range_begin >= $explen)){
# $range_end > $explen is OK. See RFC 7233 Section 2.1
info_message("Invalid range: $rangereq (cached length $explen)");
- sendrsp(HTTP::Response->new(416, "Invalid or unsatisfiable range: $rangereq", ['Content-Range' => "bytes */$explen"]));
- return;
+ for ($response->request->header('User-Agent') =~ /APT.+\(([0-9.]+)\)$/i) {
+ my @ua_ver = split /\./;
+ # 416 response handling is broken in Apt before 1.1
+ if ($ua_ver[0] >= 2 or $ua_ver[0] = 1 && $ua_ver[1] >= 1) {
+ sendrsp(HTTP::Response->new(416, "Invalid or unsatisfiable range: $rangereq", ['Content-Range' => "bytes */$explen"]));
+ return;
+ }
+ info_message("Not sending 416 to broken user agent " .$response->request->header('User-Agent'));
+ # Just go on to return complete file.
+ }
+ }
+ else {
+ $cfg->{debug} && debug_message("Range bytes: $range_begin-$range_end/$explen");
+ $response->header('Content-Range' => "bytes $range_begin-$range_end/$explen");
+ $response->code(206);
+ $response->message('Partial Content');
+ $response->content_length($range_end - $range_begin + 1); # Size of Partial Content
+ $curlen = $range_begin;
+ $explen = $range_end + 1;
}
- $cfg->{debug} && debug_message("Range bytes: $range_begin-$range_end/$explen");
- $response->header('Content-Range' => "bytes $range_begin-$range_end/$explen");
- $response->code(206);
- $response->message('Partial Content');
- $response->content_length($range_end - $range_begin + 1); # Size of Partial Content
- $curlen = $range_begin;
- $explen = $range_end + 1;
}
}
@@ -1143,8 +1162,8 @@
Usage:
- Edit /etc/apt/apt.conf to include the configuration
- Acquire::http::proxy=http://$hosturl
+Edit /etc/apt/apt.conf or add a configuration fragment under /etc/apt/apt.conf.d/ containing the following:
+ Acquire::http::Proxy "http://$hosturl";
Alternatively, edit /etc/apt/sources.list so all your HTTP sources are prepended
with the address of your apt-cacher machine and the port, like this:
deb http://example.debian.org/debian unstable main contrib non-free
@@ -1223,7 +1242,7 @@
chomp $message;
if (!defined $erlog_fh) {
- print STDERR "$message\n"; # Better than nothing
+ print STDERR "$message\n" if IO::Interactive::is_interactive(*STDERR); # Better than nothing if we have a tty
return;
}
flock($erlog_fh, LOCK_EX);
@@ -1237,7 +1256,7 @@
my ($msg) = @_;
write_error_log("error [$$]: $msg");
sendrsp(HTTP::Response->new(502, 'apt-cacher internal error (died)', ['Connection' => 'close'])) if $con;
- return;
+ exit 1;
}
# Stuff to append debug messages to the error log.
@@ -2222,11 +2241,9 @@
# Output data as soon as we print it
local $| = 1;
-# Catch early errors so that we don't leak them to the client in CGI/inetd mode
-my $buffer;
-open(my $stderr, '>&', STDERR) or die $!;
-close STDERR; # Must close first
-open(STDERR, '>', \$buffer) or die $!;
+# Install signal handlers to capture error messages
+local $SIG{__WARN__} = sub {write_error_log("warn [$$]: " . shift)};
+local $SIG{__DIE__} = sub {die_handler(shift)};
# Read config and command line, setup variables
setup();
@@ -2234,17 +2251,6 @@
setup_ownership();
open_log_files();
-# Install signal handlers to capture error messages
-local $SIG{__WARN__} = sub {write_error_log("warn [$$]: " . shift)};
-local $SIG{__DIE__} = sub {die_handler(shift)};
-
-# Replay early errors
-if (defined $buffer) {
- open (my $fh, '<', \$buffer) or die $!;
- warn($_) while <$fh>;
- undef $buffer;
-}
-
#Signal Handlers
local $SIG{CHLD} = 'IGNORE';
local $SIG{TERM} = sub {$cfg->{debug} && debug_message('received SIGTERM, terminating'); exit};
@@ -2257,9 +2263,6 @@
# Daemon mode
unless ($mode) {
- # Restore STDERR
- close STDERR; # Must close first
- open(STDERR, '>&', $stderr) or die $!;
$listeners=IO::Select->new;
for my $daemon_addr (cfg_split($cfg->{daemon_addr})) {
diff -Nru apt-cacher-1.7.16/apt-proxy-to-apt-cacher apt-cacher-1.7.20.1~18.04.sav0/apt-proxy-to-apt-cacher
--- apt-cacher-1.7.16/apt-proxy-to-apt-cacher 2005-08-13 13:15:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/apt-proxy-to-apt-cacher 2019-03-24 11:10:27.000000000 +0000
@@ -156,7 +156,6 @@
$answer= ;
if($answer eq "y\n") {
system "/etc/init.d/apt-proxy stop";
- system "echo AUTOSTART=1 >> /etc/default/apt-cacher";
system "/etc/init.d/apt-cacher restart";
}
diff -Nru apt-cacher-1.7.16/config/apt-cacher.conf apt-cacher-1.7.20.1~18.04.sav0/config/apt-cacher.conf
--- apt-cacher-1.7.16/config/apt-cacher.conf 2017-10-12 09:54:01.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/config/apt-cacher.conf 2019-03-24 11:10:27.000000000 +0000
@@ -194,7 +194,7 @@
# fetch upgrade information. As the naming scheme is unpredictable, new release
# names need to be added to this list.
#
-#ubuntu_release_names = dapper, edgy, feisty, gutsy, hardy, intrepid, jaunty, karmic, lucid, maverick, natty, oneiric, precise, quantal, raring, saucy, trusty, utopic, vivid, wily, xenial, yakkety, zesty
+#ubuntu_release_names = dapper, edgy, feisty, gutsy, hardy, intrepid, jaunty, karmic, lucid, maverick, natty, oneiric, precise, quantal, raring, saucy, trusty, utopic, vivid, wily, xenial, yakkety, zesty, artful, bionic, cosmic, disco
### HOUSEKEEPING ###
diff -Nru apt-cacher-1.7.16/config/apt-cacher.default apt-cacher-1.7.20.1~18.04.sav0/config/apt-cacher.default
--- apt-cacher-1.7.16/config/apt-cacher.default 2017-10-12 09:54:01.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/config/apt-cacher.default 1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-# apt-cacher daemon startup configuration file
-
-# Set to 1 to run apt-cacher as a standalone daemon, set to 0 if you are going
-# to run apt-cacher from /etc/inetd or in CGI mode (deprecated). Alternatively,
-# invoking "dpkg-reconfigure apt-cacher" should do the work for you.
-#
-AUTOSTART=0
-
-# extra settings to override the ones in apt-cacher.conf
-# EXTRAOPT=" daemon_port=3142 limit=3M "
diff -Nru apt-cacher-1.7.16/config/apt-cacher.default.md5sum apt-cacher-1.7.20.1~18.04.sav0/config/apt-cacher.default.md5sum
--- apt-cacher-1.7.16/config/apt-cacher.default.md5sum 2017-10-12 09:54:01.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/config/apt-cacher.default.md5sum 1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-afc7a4b065275465c1eeb5a09c985bde AUTOSTART=0
-f269a1c735ae47d7068db3ba5641a08b AUTOSTART=1
-1207bbf54d26ab191dbac80fe336dc48 pre 1.7: AUTOSTART=0
-046661f9e728b783ea90738769219d71 pre 1.7: AUTOSTART=1
diff -Nru apt-cacher-1.7.16/debian/apt-cacher.8 apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.8
--- apt-cacher-1.7.16/debian/apt-cacher.8 2017-10-12 09:54:01.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.8 2019-03-24 11:10:27.000000000 +0000
@@ -85,26 +85,18 @@
Apt\-cacher can be installed in various ways on the server. The recommended way
is by running the program as a daemon. This should give the best performance and
the lowest overall memory usage.
-
-.SH Daemon Mode
-.SS Stand\-alone Daemon:
-Edit the file /etc/default/apt\-cacher and change AUTOSTART=1, then run (as
-root)
-.IP
-/etc/init.d/apt\-cacher start
.PP
-to start the daemon.
-.SS Inetd Daemon:
-Edit /etc/inetd.conf and add the line
-.IP
-3142 stream tcp nowait www\-data /usr/sbin/apt\-cacher apt\-cacher \-i
-.PP
-Restart or send SIGHUP to inetd after saving the file. This is a good method if
-you do not wish the daemon to be loaded all the time.
+The easiest method of configuration is to use dpkg-reconfigure(8) in which case
+debconf(1) will do the work for you.
+.SH Daemon Modes
+Choose between stand\-alone daemon mode
+where the daemon runs continuously and inetd mode where the daemon is run as
+required by the inetd(8) superserver.
.PP
In either daemon mode, clients can access the server using
.B http://apt\-cacher.server:port/
-.PP
+.SS Stand\-alone Daemon:
+.SS Inetd Daemon:
NOTE: in inetd mode access control checks are not performed and the
allowed_hosts and denied_hosts options have no effect. Access controls for inetd
can be implemented using using inetd or tcpd wrapper. See inetd.conf(5) and
@@ -540,7 +532,7 @@
.B request_empty_lines [5]
The number of empty lines tolerated before an incoming connection is closed.
.TP
-.B request_timeout [10]
+.B request_timeout [30]
Maximum time in seconds that will be waited for a incoming request before
closing the connection.
.SH CLIENT CONFIGURATION
diff -Nru apt-cacher-1.7.16/debian/apt-cacher.default apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.default
--- apt-cacher-1.7.16/debian/apt-cacher.default 1970-01-01 00:00:00.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.default 2019-03-24 11:10:27.000000000 +0000
@@ -0,0 +1,4 @@
+# apt-cacher daemon startup configuration file
+
+# extra settings to override the ones in apt-cacher.conf
+# EXTRAOPT=" daemon_port=3142 limit=3M "
diff -Nru apt-cacher-1.7.16/debian/apt-cacher.init apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.init
--- apt-cacher-1.7.16/debian/apt-cacher.init 2017-09-01 10:26:40.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.init 2019-03-24 11:10:27.000000000 +0000
@@ -7,7 +7,7 @@
# Required-Stop: $remote_fs
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
-# Short-Description: apt-cacher package caching proxy daemon
+# Short-Description: apt-cacher package caching proxy daemon
# Description: The apt-cacher service is used to cache packages for a system or LAN
### END INIT INFO
@@ -18,11 +18,13 @@
RUNDIR=/var/run/$NAME
PIDFILE=$RUNDIR/$NAME.pid
SCRIPTNAME=/etc/init.d/$NAME
+CONFIG_FILES="/etc/$NAME/$NAME.conf $(run-parts --list /etc/$NAME/conf.d)"
+
# Gracefully exit if the package has been removed.
test -x $DAEMON || exit 0
-# Read config file if it is present.
+# Read default file if it is present.
if [ -r /etc/default/$NAME ]
then
. /etc/default/$NAME
@@ -30,42 +32,53 @@
. /lib/lsb/init-functions
+get_config() {
+ echo $EXTRAOPT | tr -s ' ' '\n' | \
+ sed -n "s/^\s*$1\s*=//p" $CONFIG_FILES - | \
+ tail -1 | tr -d '[:blank:]'
+}
+
#
# Function that starts the daemon/service.
#
d_start() {
-
- if test "$AUTOSTART" = 1 ; then
- start-stop-daemon --start --quiet \
- --exec $DAEMON -- -R 3 -d -p $PIDFILE $EXTRAOPT && \
- echo "$NAME."
- else
- echo "Not started (AUTOSTART not enabled in /etc/default/$NAME)";
-
- # apt-cacher needs $RUNDIR, but is not able to create it in inetd or CGI mode
- if test ! -d "$RUNDIR"; then
- mkdir -m 755 "$RUNDIR"
- CONFIG_FILES="/etc/$NAME/$NAME.conf $(run-parts --list /etc/$NAME/conf.d)"
- RUN_AS_USER=$(sed -n 's/^\s*user\s*=//p' $CONFIG_FILES | tail -1 | tr -d '[:blank:]')
- RUN_AS_GROUP=$(sed -n 's/^\s*group\s*=//p' $CONFIG_FILES | tail -1 | tr -d '[:blank:]')
- [ "$RUN_AS_USER" ] && chown $RUN_AS_USER "$RUNDIR"
- [ "$RUN_AS_GROUP" ] && chgrp $RUN_AS_GROUP "$RUNDIR"
- fi
-
- fi
+ start-stop-daemon --start --quiet \
+ --exec $DAEMON -- -R 3 -d -p $PIDFILE $EXTRAOPT && \
+ echo "$NAME."
}
#
# Function that stops the daemon/service.
#
d_stop() {
- start-stop-daemon --stop --quiet --retry=TERM/10/KILL/5 --pidfile $PIDFILE \
- --name $NAME
+ start-stop-daemon --stop --quiet --retry=TERM/10/KILL/5 --pidfile $PIDFILE \
+ --name $NAME
- # Also stop any running libcurl backend
- /usr/share/apt-cacher/libcurl.pl EXIT
+ # Also stop any running libcurl backend
+ /usr/share/apt-cacher/libcurl.pl EXIT
}
+# apt-cacher needs $RUNDIR, but is not able to create it in inetd
+# or CGI mode, so ensure it exists
+if test ! -d $RUNDIR; then
+ mkdir -m 755 $RUNDIR
+ RUN_AS_USER=$(get_config user)
+ RUN_AS_GROUP=$(get_config group)
+ [ "$RUN_AS_USER" ] && chown "$RUN_AS_USER" $RUNDIR
+ [ "$RUN_AS_GROUP" ] && chgrp "$RUN_AS_GROUP" $RUNDIR
+fi
+
+# Nothing more to do if apt-cacher run on same port from
+# /etc/inetd.conf
+RUN_ON_PORT=$(get_config daemon_port)
+if [ -f /etc/inetd.conf ] && \
+ grep -E -q "^${RUN_ON_PORT:=3142}\s+stream\s+tcp\s+nowait\s+www-data\s+/usr/sbin/apt-cacher\s+apt-cacher\s+-i" /etc/inetd.conf
+then
+ echo "$DESC on port $RUN_ON_PORT handled by /etc/inetd.conf."
+ exit 0
+fi
+
+
case "$1" in
start)
echo -n "Starting $DESC: "
@@ -84,11 +97,11 @@
;;
force-reload|reload)
echo -n "Reloading configuration of $DESC: "
- test -f $PIDFILE && pid=`cat $PIDFILE`
+ test -f $PIDFILE && pid=$(cat $PIDFILE)
if test -z "$pid" ; then
echo "$NAME not running."
else
- kill -HUP $pid && echo "done!."
+ kill -HUP "$pid" && echo "done!."
fi
;;
status)
diff -Nru apt-cacher-1.7.16/debian/apt-cacher.NEWS apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.NEWS
--- apt-cacher-1.7.16/debian/apt-cacher.NEWS 2017-11-08 09:14:29.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/apt-cacher.NEWS 2019-03-24 17:11:53.000000000 +0000
@@ -1,3 +1,12 @@
+apt-cacher (1.7.17) unstable; urgency=medium
+
+ * Use of AUTOSTART=1|0 in /etc/default/apt-cacher in no longer permitted
+ by Debian Policy 4.1.3. On upgrade, unmodified versions of this file
+ will be overwritten, but if you have made other changes to it, you
+ will need to remove references to AUTOSTART manually.
+
+ -- Mark Hindley Tue, 13 Mar 2018 10:12:09 +0000
+
apt-cacher (1.7.10) unstable; urgency=low
* Option http_proxy_auth is now deprecated. It is now possible to
diff -Nru apt-cacher-1.7.16/debian/changelog apt-cacher-1.7.20.1~18.04.sav0/debian/changelog
--- apt-cacher-1.7.16/debian/changelog 2017-11-08 09:16:20.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/changelog 2021-05-07 16:30:15.000000000 +0000
@@ -1,3 +1,62 @@
+apt-cacher (1.7.20.1~18.04.sav0) bionic; urgency=medium
+
+ * Backport to Bionic
+
+ -- Rob Savoury Fri, 07 May 2021 09:30:15 -0700
+
+apt-cacher (1.7.20.1) unstable; urgency=medium
+
+ * Avoid unnecessary dpkg prompting when upgrading from lenny/squeeze
+ (closes: #905178).
+
+ -- Mark Hindley Sun, 24 Mar 2019 17:11:53 +0000
+
+apt-cacher (1.7.20) unstable; urgency=medium
+
+ * Add recent Ubuntu release names to commented config file example.
+ * Increase default request_timeout to 30 seconds.
+ * Update control description to mention Devuan.
+ * Update to Standards version 4.3.0 (no changes).
+ * Work around broken APT versions (< 1.1) which don't understand 416
+ responses.
+ * Add Ubuntu 19.04 release name, disco.
+ * Update to debhelper compat 10.
+
+ -- Mark Hindley Fri, 04 Jan 2019 10:45:52 +0000
+
+apt-cacher (1.7.19) unstable; urgency=medium
+
+ * Update to Standards Version 4.2.1 (no changes).
+ * When upgrading remove unmodified ucf controlled
+ /etc/default/apt-cacher to prevent dpkg from prompting unnecessarily.
+ (closes: #905178).
+
+ -- Mark Hindley Fri, 31 Aug 2018 10:26:03 +0100
+
+apt-cacher (1.7.18) unstable; urgency=medium
+
+ * Improve handling of early errors:
+ - print them to STDERR only if it is attached to a terminal.
+ - Depend on libio-interactive-perl.
+ * Successful exit when printing version.
+ * Bump Standards Version to 4.1.4 (no changes).
+ * Add Ubuntu 18.10 codename cosmic.
+
+ -- Mark Hindley Wed, 13 Jun 2018 16:50:44 +0100
+
+apt-cacher (1.7.17) unstable; urgency=medium
+
+ * Work around URI::path_segments() uninitialized value error on recent
+ versions (1.64 at least).
+ * Fix example configuration (closes: #888962).
+ * Upgrade to Standards version 4.1.3 which now prohibits AUTOSTART=1|0
+ /etc/default/apt-cacher. To comply:
+ - initscript determines inetd mode by looking for entry in
+ /etc/inetd.conf
+ - no longer manage /etc/default/apt-cacher with ucf
+
+ -- Mark Hindley Fri, 23 Mar 2018 16:14:00 +0000
+
apt-cacher (1.7.16) unstable; urgency=medium
* Update to Standards version 4.1.1 (no changes).
diff -Nru apt-cacher-1.7.16/debian/compat apt-cacher-1.7.20.1~18.04.sav0/debian/compat
--- apt-cacher-1.7.16/debian/compat 2017-10-12 09:54:01.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/compat 2019-03-24 11:10:27.000000000 +0000
@@ -1 +1 @@
-9
+10
diff -Nru apt-cacher-1.7.16/debian/control apt-cacher-1.7.20.1~18.04.sav0/debian/control
--- apt-cacher-1.7.16/debian/control 2017-11-08 09:14:37.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/control 2019-03-24 11:10:27.000000000 +0000
@@ -3,16 +3,16 @@
Priority: optional
Maintainer: Mark Hindley
Uploaders: Eduard Bloch
-Build-Depends: debhelper (>= 8.1.0~), po-debconf
-Standards-Version: 4.1.1
+Build-Depends: debhelper (>= 10), po-debconf
+Standards-Version: 4.3.0
Package: apt-cacher
Architecture: all
Pre-Depends: ${misc:Pre-Depends}
-Depends: ${perl:Depends}, ${misc:Depends}, libwww-curl-perl (>=4.00), libwww-perl, ed, libio-interface-perl, libfilesys-df-perl, libnetaddr-ip-perl, lsb-base (>= 3.2-14), update-inetd, libsys-syscall-perl, ucf (>= 0.28), libipc-shareable-perl, libdpkg-perl
+Depends: ${perl:Depends}, ${misc:Depends}, libwww-curl-perl (>=4.00), libwww-perl, ed, libio-interface-perl, libio-interactive-perl, libfilesys-df-perl, libnetaddr-ip-perl, lsb-base (>= 3.2-14), update-inetd, libsys-syscall-perl, libipc-shareable-perl, libdpkg-perl
Recommends: libberkeleydb-perl (>=0.34), libio-compress-lzma-perl
Suggests: libio-socket-inet6-perl, libfreezethaw-perl
-Description: Caching proxy server for Debian/Ubuntu software repositories
+Description: Caching proxy server for Debian/Ubuntu/Devuan software repositories
Apt-cacher performs caching of files requested by apt-get (or other APT clients
such as aptitude or synaptic). Apt-cacher can also proxy Debian Bugs SOAP
requests for apt-listbugs. It is most useful for local area networks with
@@ -32,8 +32,8 @@
The package includes utilities to clean the cache (removing obsolete package
files), generate usage reports and import existing package files. Optional
features include a file checksum verification framework, IPv6 support, FTP and
- HTTPS (proxying only) support as well as the simultaneous caching of different
- repositories (e.g Debian and Ubuntu).
+ HTTPS (proxying only) support as well as the simultaneous caching of
+ repositories from different distributions.
.
Apt-cacher can be used as a replacement for apt-proxy, with no need to modify
client's /etc/apt/sources.list files (and even reusing its config and cached
diff -Nru apt-cacher-1.7.16/debian/dirs apt-cacher-1.7.20.1~18.04.sav0/debian/dirs
--- apt-cacher-1.7.16/debian/dirs 2016-07-01 08:39:40.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/dirs 2019-03-24 11:10:27.000000000 +0000
@@ -1,6 +1,5 @@
usr/sbin
usr/share/apt-cacher
-usr/share/apt-cacher/default
usr/share/apt-cacher/lib
usr/share/apt-cacher/lib/Linux
etc/apt-cacher
diff -Nru apt-cacher-1.7.16/debian/postinst apt-cacher-1.7.20.1~18.04.sav0/debian/postinst
--- apt-cacher-1.7.16/debian/postinst 2016-12-16 10:34:41.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/postinst 2019-03-24 11:10:27.000000000 +0000
@@ -28,7 +28,7 @@
. /usr/share/debconf/confmodule
case "$1" in
- reconfigure|configure)
+ configure|reconfigure)
# Handle upgrade from checksumming.conf
@@ -41,9 +41,9 @@
echo "Importing checksum config from $CONFDIR/checksumming.conf"
if grep -E -q "^[#[:space:]]*checksum=" $CONFFILE ; then
- TEMP_FILE=`mktemp -p /tmp`
- sed 's/^[#[:space:]]*checksum=.*$/checksum=1/' $CONFFILE > $TEMP_FILE
- mv $TEMP_FILE $CONFFILE
+ TEMP_FILE=$(mktemp -p /tmp)
+ sed 's/^[#[:space:]]*checksum=.*$/checksum=1/' $CONFFILE > "$TEMP_FILE"
+ mv "$TEMP_FILE" $CONFFILE
chmod 0644 $CONFFILE
else
echo "checksum=1" >> $CONFFILE
@@ -56,29 +56,20 @@
echo "Running apt-cacher's install script..."
/usr/share/apt-cacher/install.pl
- defaultfile='/etc/default/apt-cacher'
-
- ucf --debconf-ok /usr/share/apt-cacher/default/apt-cacher $defaultfile
- ucfr apt-cacher $defaultfile
-
db_get apt-cacher/mode
case "$RET" in
daemon)
echo "Setup apt-cacher running as standalone daemon."
update-inetd --remove "3142\s.+/usr/sbin/apt-cacher" # PCRE
- sed -i 's/^[#[:space:]]*AUTOSTART=.*$/AUTOSTART=1/' $defaultfile # POSIX.2 RE
;;
inetd)
echo "Setup apt-cacher running from /etc/inetd.conf."
update-inetd --add "3142\tstream\ttcp\tnowait\twww-data\t/usr/sbin/apt-cacher\tapt-cacher\t-i"
- sed -i 's/^[#[:space:]]*AUTOSTART=.*$/AUTOSTART=0/' $defaultfile # POSIX.2 RE
;;
manual)
- # Disable inetd and daemon
- update-inetd --remove "3142\s.+/usr/sbin/apt-cacher" # PCRE
- sed -i 's/^[#[:space:]]*AUTOSTART=.*$/AUTOSTART=0/' $defaultfile # POSIX.2 RE
- cat <&2
@@ -109,7 +99,4 @@
#DEBHELPER#
-db_stop
exit 0
-
-
diff -Nru apt-cacher-1.7.16/debian/postrm apt-cacher-1.7.20.1~18.04.sav0/debian/postrm
--- apt-cacher-1.7.16/debian/postrm 2016-07-01 08:39:40.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/postrm 2018-08-31 09:19:24.000000000 +0000
@@ -4,22 +4,6 @@
case "$1" in
purge)
- defaultfile='/etc/default/apt-cacher'
- # we mimic dpkg as closely as possible, so we remove configuration
- # files with dpkg and ucf backup extensions too:
- ### Some of the following is from Tore Anderson:
- for ext in '~' '%' .bak .dpkg-tmp .dpkg-new .dpkg-old .dpkg-dist .ucf-new .ucf-old .ucf-dist ''; do
- rm -f $defaultfile$ext
- done
-
- # and finally clear it out from the ucf database
- if which ucf >/dev/null; then
- ucf --purge $defaultfile
- fi
- if which ucfr >/dev/null; then
- ucfr --purge apt-cacher $defaultfile
- fi
-
rm -rf /var/cache/apt-cacher /var/log/apt-cacher
;;
diff -Nru apt-cacher-1.7.16/debian/preinst apt-cacher-1.7.20.1~18.04.sav0/debian/preinst
--- apt-cacher-1.7.16/debian/preinst 1970-01-01 00:00:00.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/preinst 2019-03-24 17:11:53.000000000 +0000
@@ -0,0 +1,67 @@
+#! /bin/sh
+# preinst script for apt-cacher
+#
+# see: dh_installdeb(1)
+
+set -e
+
+case "$1" in
+ install|upgrade)
+ if dpkg --compare-versions "$2" ge "1.7.6" && \
+ dpkg --compare-versions "$2" le "1.7.16" ; then
+ # We reverted to dpkg handling of conffile /etc/default/apt-cacher
+ # in 1.7.17 and don't need ucf anymore (introduced in version
+ # 1.7.6). Remove it from the ucf database unconditionally.
+
+ defaultfile='/etc/default/apt-cacher'
+ if which ucf >/dev/null; then
+ echo "Purging obsolete ucf control of conffile $defaultfile."
+ ucf --purge $defaultfile
+ fi
+ if which ucfr >/dev/null; then
+ ucfr --purge apt-cacher $defaultfile
+ fi
+
+ # If ucf kept the squeeze or lenny version of defaultfile, revert
+ # postinst changes to the AUTOSTART setting to prevent dpkg from
+ # prompting.
+ if echo "046661f9e728b783ea90738769219d71 $defaultfile" | md5sum -c >/dev/null 2>&1; then
+ echo "Reverting maintscript changes to squeeze/lenny version of $defaultfile"
+ sed -i s/AUTOSTART=1/AUTOSTART=0/ $defaultfile
+ else
+ # Remove any unmodified ucf version of defaultfile to prevent dpkg
+ # from prompting.
+
+ # Test MD5SUMS are:
+ # 1.7.6: AUTOSTART=0
+ # 1.7.6: AUTOSTART=1
+ # 1.7.14: AUTOSTART=0
+ # 1.7.14: AUTOSTART=1
+ for test_md5sum in afc7a4b065275465c1eeb5a09c985bde \
+ f269a1c735ae47d7068db3ba5641a08b \
+ 58440d3f69d8775a54c9c9e482eb90fb \
+ f7ad90e6cfe327cfab7b16b9206cc56c
+ do
+ if echo "$test_md5sum $defaultfile" | md5sum -c >/dev/null 2>&1; then
+ echo "Removing unmodified ucf controlled version of $defaultfile"
+ rm $defaultfile
+ break
+ fi
+ done
+ fi
+ fi ;;
+
+ abort-upgrade)
+ ;;
+
+ *)
+ echo "preinst called with unknown argument \`$1'" >&2
+ exit 1
+ ;;
+esac
+
+#DEBHELPER#
+
+exit 0
+
+
diff -Nru apt-cacher-1.7.16/debian/rules apt-cacher-1.7.20.1~18.04.sav0/debian/rules
--- apt-cacher-1.7.16/debian/rules 2017-04-24 18:27:50.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/debian/rules 2019-03-24 11:10:27.000000000 +0000
@@ -46,8 +46,6 @@
install -m644 config/apt-cacher.conf debian/apt-cacher/etc/apt-cacher/
install -m644 config/apache.conf debian/apt-cacher/etc/apt-cacher/
- install -m644 config/apt-cacher.default debian/apt-cacher/usr/share/apt-cacher/default/apt-cacher
- install -m644 config/apt-cacher.default.md5sum debian/apt-cacher/usr/share/apt-cacher/default/apt-cacher.md5sum
install -m755 apt-cacher-cgi.pl debian/apt-cacher/usr/share/apt-cacher/
install -m755 apt-cacher debian/apt-cacher/usr/share/apt-cacher/
install -m755 apt-proxy-to-apt-cacher debian/apt-cacher/usr/share/apt-cacher/
diff -Nru apt-cacher-1.7.16/lib/apt-cacher.pl apt-cacher-1.7.20.1~18.04.sav0/lib/apt-cacher.pl
--- apt-cacher-1.7.16/lib/apt-cacher.pl 2017-11-08 09:14:29.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/lib/apt-cacher.pl 2019-03-24 17:11:53.000000000 +0000
@@ -57,7 +57,7 @@
limit_global => 0,
log_dir => '/var/log/apt-cacher',
request_empty_lines => 5,
- request_timeout => 10,
+ request_timeout => 30,
return_buffer_size => 1048576, # 1Mb
reverse_path_map => 1,
supported_archs => join(', ', qw(
@@ -116,6 +116,8 @@
zesty
artful
bionic
+ cosmic
+ disco
)),
user => $>,
diff -Nru apt-cacher-1.7.16/test/benchmark.pl apt-cacher-1.7.20.1~18.04.sav0/test/benchmark.pl
--- apt-cacher-1.7.16/test/benchmark.pl 2017-05-09 08:38:13.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/benchmark.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,191 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use Benchmark qw(:all);
-
-use FindBin qw($Bin);
-use lib "$Bin/../lib";
-use File::Spec;
-use IO::Uncompress::AnyUncompress qw($AnyUncompressError);
-
-require('apt-cacher.pl');
-
-our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf');
-
-private_config();
-
- my %hash_length = map { $_->[1] => $_->[0] } @{$cfg->{_algorithms}};
- my $algorithms_regexp = join ('|', map { "[0-9a-f]{$_->[1]}" } @{$cfg->{_algorithms}});
- my $packages_regexp = join('|', map { "(?'alg'\U$_->[0])" . ($_->[0] eq 'md5' ? 'sum': '') . ":\\s+(?'hash'[a-z0-9]{$_->[1]})" } @{$cfg->{_algorithms}});
- $_ = qr/$_/ foreach $algorithms_regexp, $packages_regexp;
-
-
-my ($name) = @ARGV;
-open(my $fh, '<', $name) || die "Open $name failed: $!";
-
-cmpthese(-10, {
- 'v1.7-devel' =>
- sub {
-
- seek($fh,0,0) || die "Seek failed: $!";
-
- my $raw = IO::Uncompress::AnyUncompress->new($fh)
- or die "Decompression failed: $AnyUncompressError\n";
-
- # Name is just the cached filename without path
- $name = (File::Spec->splitpath($name))[2];
-
- # Determine namespace
- my $namespace;
- if ($namespace = get_namespace(get_original_url($name)) || '') { # Default empty, not undef
- $namespace .= '/';
- }
-
- my ($indexbase) = ($name =~ /([^\/]+_)(?:Index|(?:In)?Release)$/);
- $indexbase = '' unless $indexbase; # Empty by default (for Sources)
-
- my %hash_length = (32 => 'md5', 40 => 'sha1', 64 => 'sha256');
- my ($skip,%data);
- while (<$raw>) {
- last if $AnyUncompressError;
- chomp;
- if (/^SHA\d+-Patches:/) {
- $skip = 0;
- } elsif (/^SHA\d+-[a-zA-Z]+:/) {
- # This flag prevents us bothering with unnecessary sections
- # (History|Current|Download) of diff_Index files
- $skip = 1;
- } elsif (/^\s+([a-z0-9]{32,64})\s+(\d+)\s(\S+)$/) { # diff_Index/Release/Sources
- next if $skip;
- my $hexdigest=$1;
- my $size=$2;
- my $file=$indexbase.$3;
-
- $file=~s!/!_!g; # substitute any separators in indexed filename
-
- if ($name =~ /Index$/) {
- $file.=".gz";
- } elsif ($name =~ /_Sources(?:\.(?:x|g)z|\.bz2)?$/) {
- # Prepend namespace, if set
- $file = $namespace . $file;
- }
- $data{$file}{size} = $size;
- { # Select algorithm based on hex length
- my $len = length($hexdigest);
- if (exists $hash_length{$len}) {
- $data{$file}{$hash_length{$len}}=$hexdigest;
- } else {
- warn "Unrecognised algorithm length: $len. Ignoring.";
- }
- }
- } elsif (/^MD5sum:\s+([a-z0-9]{32})$/) { # Packages
- $data{md5}=$1;
- } elsif (/^SHA1:\s+([a-z0-9]{40})$/) {
- $data{sha1}=$1;
- } elsif (/^SHA256:\s+([a-z0-9]{64})$/) {
- $data{sha256}=$1;
- } elsif (/^Size:\s+([0-9]+)$/) {
- $data{size}=$1;
- } elsif (/^Filename:\s+.*?([^\/]+)$/) { # Non-greedy quantifier essential
- # Prepend namespace, if set
- $data{file} = $namespace . $1;
- }
- } continue {
- # diff_Index and Release files have no empty line at the end, so also
- # test eof() for them
- if (!length || $raw->eof()) { # End of record/file
- if (exists $data{file}) {
- # From Packages. Convert to hash of hashes with filename as key
- foreach (qw(size md5 sha1 sha256)) {
- $data{$data{file}}{$_} = $data{$_};
- delete $data{$_};
- }
- delete $data{file};
- }
-
- undef %data; # Reset
- }
- };
- },
- 'hash_algorithms' =>
- sub {
-
- seek($fh,0,0) || die "Seek failed: $!";
-
- my $raw = IO::Uncompress::AnyUncompress->new($fh)
- or die "Decompression failed: $AnyUncompressError\n";
-
- # Name is just the cached filename without path
- $name = (File::Spec->splitpath($name))[2];
-
- # Determine namespace
- my $namespace;
- if ($namespace = get_namespace(get_original_url($name)) || '') { # Default empty, not undef
- $namespace .= '/';
- }
-
- my ($indexbase) = ($name =~ /([^\/]+_)(?:Index|(?:In)?Release)$/);
- $indexbase = '' unless $indexbase; # Empty by default (for Sources)
-
- my ($skip,%data);
- while (<$raw>) {
- last if $AnyUncompressError;
- chomp;
- if (/^SHA\d+-Patches:/) {
- $skip = 0;
- } elsif (/^SHA\d+-[a-zA-Z]+:/) {
- # This flag prevents us bothering with unnecessary sections
- # (History|Current|Download) of diff_Index files
- $skip = 1;
- } elsif (/^\s+($algorithms_regexp)\s+(\d+)\s(\S+)$/o) { # diff_Index/Release/Sources
- next if $skip;
- my $hexdigest=$1;
- my $size=$2;
- my $file=$indexbase.$3;
-
- $file=~s!/!_!g; # substitute any separators in indexed filename
-
- if ($name =~ /Index$/) {
- $file.=".gz";
- } elsif ($name =~ /_Sources(?:\.(?:x|g)z|\.bz2)?$/) {
- # Prepend namespace, if set
- $file = $namespace . $file;
- }
- $data{$file}{size} = $size;
- { # Select algorithm based on hex length
- my $len = length($hexdigest);
- if (exists $hash_length{$len}) {
- $data{$file}{$hash_length{$len}}=$hexdigest;
- } else {
- warn "Unrecognised algorithm length: $len. Ignoring.";
- }
- }
- } elsif (/^$packages_regexp$/o) { # Packages
- $data{lc $+{alg}}=$+{hash};
- } elsif (/^Size:\s+([0-9]+)$/) {
- $data{size}=$1;
- } elsif (/^Filename:\s+.*?([^\/]+)$/) { # Non-greedy quantifier essential
- # Prepend namespace, if set
- $data{file} = $namespace . $1;
- }
- } continue {
- # diff_Index and Release files have no empty line at the end, so also
- # test eof() for them
- if (!length || $raw->eof()) { # End of record/file
- if (exists $data{file}) {
- # From Packages. Convert to hash of hashes with filename as key
- foreach (qw(size), map {$_->[0]} @{$cfg->{_algorithms}}) {
- next unless exists $data{$_};
- $data{$data{file}}{$_} = $data{$_};
- delete $data{$_};
- }
- delete $data{file};
- }
-
- undef %data; # Reset
- }
- } ;
- }
- });
diff -Nru apt-cacher-1.7.16/test/bug.pl apt-cacher-1.7.20.1~18.04.sav0/test/bug.pl
--- apt-cacher-1.7.16/test/bug.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/bug.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,42 +0,0 @@
-#!/usr/bin/perl
-use strict;
-use warnings;
-
-use WWW::Curl::Easy;
-use WWW::Curl::Share;
-
-my $count = 5;
-while ($count--) {
- if (my $pid = fork) {
- &doloop;
- }
- else {
- &doloop;
- }
-}
-exit;
-
-INIT {
- my $curlsh = new WWW::Curl::Share;
- $curlsh->setopt(CURLSHOPT_SHARE, CURL_LOCK_DATA_DNS);
- print "Share is $curlsh\n";
-
- sub doloop
- {
- my $curl;
-
- unless ($curl) {
- $curl = new WWW::Curl::Easy;
- $curl->setopt(CURLOPT_SHARE, $curlsh);
-
- $curl->setopt(CURLOPT_VERBOSE, 1);
- $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1);
- $curl->setopt(CURLOPT_CONNECT_ONLY, 1);
- $curl->setopt(CURLOPT_FORBID_REUSE, 1);
- $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org');
- }
- $curl->perform;
- }
-}
-
-
diff -Nru apt-cacher-1.7.16/test/curlftp.pl apt-cacher-1.7.20.1~18.04.sav0/test/curlftp.pl
--- apt-cacher-1.7.16/test/curlftp.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/curlftp.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use WWW::Curl::Easy;
-
-my $curl = WWW::Curl::Easy->new;
-$curl->setopt(CURLOPT_VERBOSE, 1);
-$curl->setopt(CURLOPT_FILETIME, 1);
-$curl->setopt(CURLOPT_FTP_FILEMETHOD, 2); # CURLFTPMETHOD_NOCWD
-$curl->setopt(CURLOPT_FTPPORT, '-');
-$curl->setopt(CURLOPT_FTP_USE_EPSV, 1);
-$curl->setopt(CURLOPT_URL, 'ftp://ftp.uk.debian.org/debian/dists/stable/Release.gpg');
-
-$curl->perform;
-
diff -Nru apt-cacher-1.7.16/test/curlmulti.pl apt-cacher-1.7.20.1~18.04.sav0/test/curlmulti.pl
--- apt-cacher-1.7.16/test/curlmulti.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/curlmulti.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,73 +0,0 @@
-#!/usr/bin/perl
-use strict;
-use warnings;
-
-use WWW::Curl::Easy;
-use WWW::Curl::Multi;
-
-
-pipe(my $libcurl, my $daemon)|| die $!;
-
-unless (my $libcurlpid = fork) {
- # Child -- libcurl thread
- close $daemon;
-
- my %easy;
- my $curlm = WWW::Curl::Multi->new;
- my $curl_id = $$; # This should be a handle unique id.
- my $active_handles = 0;
-
- # Loop requests
- while (<$libcurl>) {
- print "Got request $_\n";
- my $curl = new WWW::Curl::Easy;
- $easy{$curl_id} = $curl; # Register handle
- $curl->setopt(CURLOPT_PRIVATE,$curl_id); # Assign Multi ID
- # do the usual configuration on the handle
-
- $curl->setopt(CURLOPT_VERBOSE, 1);
- $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1);
- $curl->setopt(CURLOPT_NOBODY, 1);
-
- $curl->setopt(CURLOPT_URL, $_);
-
- # Add easy handles to multi
- $curlm->add_handle($curl);
- $active_handles++;
-
- while (my $active_transfers = $curlm->perform) {
- if ($active_transfers != $active_handles) {
- while (my ($id,$return_value) = $curlm->info_read) {
- if ($id) {
- $active_handles--;
- my $actual_easy_handle = $easy{$id};
- # do the usual result/error checking routine here
- # ...
- # letting the curl handle get garbage collected, or we leak memory.
- delete $easy{$id};
- }
- }
- }
- }
- }
- exit;
-}
-
-close $libcurl;
-if (my $pid = fork) {
- &doloop;
-}
-else {
- &doloop;
-}
-
-sub doloop
- {
- my $n=4;
- while ($n--) {
- print "$$: Sending request\n";
- print $daemon "http://ftp.us.debian.org\n";
- }
- }
-
-
diff -Nru apt-cacher-1.7.16/test/curlshare.pl apt-cacher-1.7.20.1~18.04.sav0/test/curlshare.pl
--- apt-cacher-1.7.16/test/curlshare.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/curlshare.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,73 +0,0 @@
-#!/usr/bin/perl
-use strict;
-use warnings;
-
-use WWW::Curl::Easy;
-use WWW::Curl::Multi;
-
-
-pipe(my $libcurl, my $daemon)|| die $!;
-
-unless (my $libcurlpid = fork) {
- # Child -- libcurl thread
- close $daemon;
-
- my %easy;
- my $curlm = WWW::Curl::Multi->new;
- my $curl_id = $$; # This should be a handle unique id.
- my $active_handles = 0;
-
- # Loop requests
- while (<$libcurl>) {
- print "Got request $_\n";
- my $curl = new WWW::Curl::Easy;
- $easy{$curl_id} = $curl; # Register handle
- $curl->setopt(CURLOPT_PRIVATE,$curl_id); # Assign Multi ID
- # do the usual configuration on the handle
-
- $curl->setopt(CURLOPT_VERBOSE, 1);
- $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1);
- $curl->setopt(CURLOPT_NOBODY, 1);
-
- $curl->setopt(CURLOPT_URL, $_);
-
- # Add easy handles to multi
- $curlm->add_handle($curl);
- $active_handles++;
-
- while (my $active_transfers = $curlm->perform) {
- if ($active_transfers != $active_handles) {
- while (my ($id,$return_value) = $curlm->info_read) {
- if ($id) {
- $active_handles--;
- my $actual_easy_handle = $easy{$id};
- # do the usual result/error checking routine here
- # ...
- # letting the curl handle get garbage collected, or we leak memory.
- delete $easy{$id};
- }
- }
- }
- }
- }
- exit;
-}
-
-close $libcurl;
-if (my $pid = fork) {
- &doloop;
-}
-else {
- &doloop;
-}
-
-sub doloop
- {
- my $n=4;
- while ($n--) {
- print "$$: Sending request\n";
- print $daemon "http://ftp.us.debian.org\n";
- }
- }
-
-
diff -Nru apt-cacher-1.7.16/test/db-cache.pl apt-cacher-1.7.20.1~18.04.sav0/test/db-cache.pl
--- apt-cacher-1.7.16/test/db-cache.pl 2012-05-03 10:47:36.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/db-cache.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,78 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-
-use BerkeleyDB;
-
-our $cfg;
-my $count=2;
-
-sub sig_handler {
- warn "Got SIG@_. Exiting gracefully!\n" if $cfg->{debug};
- exit 1;
-}
-
-sub db {
- for ('INT', 'TERM', 'PIPE', 'QUIT', 'HUP', 'SEGV') {
- $SIG{$_} = \&sig_handler unless $SIG{$_};
- }
-
- my $env = new BerkeleyDB::Env
- -Home => '/tmp',
- -Flags => DB_CREATE | DB_INIT_MPOOL | DB_INIT_CDB,
- -ErrFile => *STDERR,
- -ThreadCount => 64,
- -ErrPrefix => "[$$]:"
- or die $BerkeleyDB::Error;
-
- $env->set_isalive();
- if ($env->failchk == DB_RUNRECOVERY) {
- warn "Failed thread detected.\n";
- }
-
- my $dbh = new BerkeleyDB::Btree
- -Filename => '/tmp/test.db',
- -Flags => DB_CREATE,
- -Env => $env
- or die $BerkeleyDB::Error;
-
- return $dbh;
-}
-
-sub fetch_store {
- my $cpid;
- my $dbh=db();
- warn "[$$]: Init DB in fetch_store\n";
-
-
-
- return 1;
-}
-
-
-sub return_file {
- my $dbh=$_[0];
- warn "[$$]: Init DB in return_file\n";
- $dbh->db_put('child', $$) && die $!;
- return 1;
-}
-
-warn "[$$]: Parent\n";
-
-while ($count--) {
- my $pid = fork;
- die $! unless defined $pid;
- if ($pid == 0) {
- my $write = "$$: $count";
- warn "[$$]: Write $write\n";
- db()->db_put('child', $write) && die $!;
- exit;
- }
- else {
- waitpid $pid, 0;
- db()->db_get('child', my $read) && die $!;
- warn "[$$]: Read $read\n";
- }
-}
diff -Nru apt-cacher-1.7.16/test/db_compact.pl apt-cacher-1.7.20.1~18.04.sav0/test/db_compact.pl
--- apt-cacher-1.7.16/test/db_compact.pl 2016-06-15 06:34:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/db_compact.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,89 +0,0 @@
-#! perl
-
-use strict;
-use warnings;
-
-use FindBin ();
-use lib "$FindBin::Bin/../lib";
-
-my $configfile = '/etc/apt-cacher/apt-cacher.conf';
-my $verbose=1;
-
-# Include the library for the config file parser
-require('apt-cacher.pl');
-# Read in the config file and set the necessary variables
-
-# $cfg needs to be global for setup_ownership
-our $cfg = eval{ read_config($configfile) };
-
-# not sure what to do if we can't read the config file...
-die "Could not read configuration file '$configfile': $@" if $@;
-
-private_config();
-
-# check whether we're actually meant to clean the cache
-if ( $cfg->{clean_cache} ne 1 ) {
- printmsg("Maintenance disallowed by configuration item clean_cache\n");
- exit 0;
-}
-
-check_install(); # Before we give up rights
-
-# change uid and gid if root and another user/group configured
-if (($cfg->{user} && $cfg->{user} !~ 'root' && !$> )
- || ($cfg->{group} && $cfg->{group} !~ 'root' && !$) =~ /^0/)){
- printmsg("Invoked as root, changing to $cfg->{user}:$cfg->{group} and re-execing.\n");
- setup_ownership($cfg);
- # Rexec to ensure /proc/self/fd ownerships correct which are needed for red
- # patching with pdiffs
- exec($0, @ARGV) or die "Unable to rexec $0: $!\n";
-}
-# Output data as soon as we print it
-local $| = 1;
-
-load_checksum(); # Will disable checksum if BerkeleyDB not available
-
-sub printmsg {
- my @args = @_;
- my $ret;
- $ret = print @args if $verbose;
- return $ret;
-}
-
-sub db_compact {
- my ($dbh) = @_; #db();
- printmsg "Waiting for CDS lock...\n";
- my $db_lock = $dbh->cds_lock();
- printmsg "Compacting checksum database....\n";
- my ($status, %results) = @{_db_compact($dbh)};
- if ($status) {
- printmsg "db_compact failed: $status\n";
- }
- else {
- printmsg " Compacted ". $results{compact_pages_free} ." pages\n Freed ". $results{compact_pages_truncated} ." pages\n";
- }
- return;
-}
-
-sub get_cursor {
- my ($dbh,$write)=@_;
- my $cursor = $dbh->db_cursor($write?BerkeleyDB->DB_WRITECURSOR:undef) or die $BerkeleyDB::Error;
- return $cursor;
-}
-
-if ($cfg->{checksum} and my $db=db()) {
-
- printmsg "Removing expired entries from checksum database\n";
-
- my $do_compact;
- {
- my $cursor = get_cursor($db,1);
- my ($filename,$data)=('','');
- while (cursor_next($cursor, \$filename, \$data) == 0)
- {
- $do_compact++;
- }
- }
- printmsg "$do_compact\n";
- db_compact($db) if $do_compact;
-}
diff -Nru apt-cacher-1.7.16/test/db_obj.pl apt-cacher-1.7.20.1~18.04.sav0/test/db_obj.pl
--- apt-cacher-1.7.16/test/db_obj.pl 2016-05-24 06:04:03.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/db_obj.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-#! perl
-
-use strict;
-use warnings;
-
-use FindBin qw($Bin);
-use lib "$Bin/../lib";
-
-use AptCacherDB;
-
-require('apt-cacher.pl');
-
-our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf');
-
-my $db=AptCacherDB->new();
-
-sleep 1;
diff -Nru apt-cacher-1.7.16/test/db.pl apt-cacher-1.7.20.1~18.04.sav0/test/db.pl
--- apt-cacher-1.7.16/test/db.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/db.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,80 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-
-use BerkeleyDB;
-
-our $cfg;
-my $count=2;
-
-sub sig_handler {
- warn "Got SIG@_. Exiting gracefully!\n" if $cfg->{debug};
- exit 1;
-}
-
-sub db {
- for ('INT', 'TERM', 'PIPE', 'QUIT', 'HUP', 'SEGV') {
- $SIG{$_} = \&sig_handler unless $SIG{$_};
- }
-
- my $env = new BerkeleyDB::Env
- -Home => '/tmp',
- -Flags => DB_CREATE | DB_INIT_MPOOL | DB_INIT_CDB,
- -ErrFile => *STDERR,
- -ThreadCount => 64,
- -ErrPrefix => "[$$]:"
- or die $BerkeleyDB::Error;
-
- $env->set_isalive();
- if ($env->failchk == DB_RUNRECOVERY) {
- warn "Failed thread detected.\n";
- }
-
- $SIG{ALRM} = sub {
- $env->failchk;
- alarm 1;
- };
- alarm 1;
-
- my $dbh = new BerkeleyDB::Btree
- -Filename => '/tmp/test.db',
- -Flags => DB_CREATE,
- -Env => $env
- or die $BerkeleyDB::Error;
- undef $env;
- return $dbh;
-}
-
-sub fetch_store {
- my $cpid;
- my $dbh=db();
- warn "[$$]: Init DB in fetch_store\n";
-
- $dbh->db_put('test', 0) && die $!;
- $dbh->db_get('child', $cpid) && die $!;
- return 1;
-}
-
-
-sub return_file {
- my $dbh=$_[0];
- warn "[$$]: Init DB in return_file\n";
- $dbh->db_put('child', $$) && die $!;
- return 1;
-}
-
-while ($count--) {
- my $pid = fork;
- die $! unless defined $pid;
- if ($pid == 0) {
- fetch_store();
- exit(0);
- }
- # use %db here
- warn "[$$]: Child fetcher process $pid\n";
- if (return_file(db())) {
- sleep 1;
- }
-}
diff -Nru apt-cacher-1.7.16/test/db_tie.pl apt-cacher-1.7.20.1~18.04.sav0/test/db_tie.pl
--- apt-cacher-1.7.16/test/db_tie.pl 2016-06-17 08:59:25.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/db_tie.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,45 +0,0 @@
-#! perl
-
-use strict;
-use warnings;
-
-use FindBin ();
-use lib "$FindBin::Bin/../lib";
-
-my $configfile = '/etc/apt-cacher/apt-cacher.conf';
-my $verbose=1;
-
-# Include the library for the config file parser
-require('apt-cacher.pl');
-# Read in the config file and set the necessary variables
-
-# $cfg needs to be global for setup_ownership
-our $cfg = eval{ read_config($configfile) };
-
-# not sure what to do if we can't read the config file...
-die "Could not read configuration file '$configfile': $@" if $@;
-
-private_config();
-
-check_install(); # Before we give up rights
-
-# change uid and gid if root and another user/group configured
-if (($cfg->{user} && $cfg->{user} !~ 'root' && !$> )
- || ($cfg->{group} && $cfg->{group} !~ 'root' && !$) =~ /^0/)){
- printmsg("Invoked as root, changing to $cfg->{user}:$cfg->{group} and re-execing.\n");
- setup_ownership($cfg);
- # Rexec to ensure /proc/self/fd ownerships correct which are needed for red
- # patching with pdiffs
- exec($0, @ARGV) or die "Unable to rexec $0: $!\n";
-}
-# Output data as soon as we print it
-local $| = 1;
-
-load_checksum(); # Will disable checksum if BerkeleyDB not available
-
-tie my %db, "BerkeleyDB::Btree",
- -Filename => '/var/cache/apt-cacher/sums.db',
- -Env => db()->Env
- or die "cannot open database: $BerkeleyDB::Error\n";
-
-sleep 1;
diff -Nru apt-cacher-1.7.16/test/dns.pl apt-cacher-1.7.20.1~18.04.sav0/test/dns.pl
--- apt-cacher-1.7.16/test/dns.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/dns.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,18 +0,0 @@
-#!/usr/local/bin/perl -- # -*-Perl-*-
-
-use Net::DNS;
-
-while (1) {
- my $res = Net::DNS::Resolver->new;
- my $query = $res->search("ftp.us.debian.org");
-
- if ($query) {
- foreach my $rr ($query->answer) {
- next unless $rr->type eq "A";
- print $rr->address, "\n";
- last;
- }
- } else {
- warn "query failed: ", $res->errorstring, "\n";
- }
-}
diff -Nru apt-cacher-1.7.16/test/extract_optimise.pl apt-cacher-1.7.20.1~18.04.sav0/test/extract_optimise.pl
--- apt-cacher-1.7.16/test/extract_optimise.pl 2017-05-09 08:38:13.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/extract_optimise.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,30 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use FindBin qw($Bin);
-use lib "$Bin/../lib";
-
-require('apt-cacher.pl');
-
-our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf');
-
-private_config();
-
-my %h;
-
-extract_sums($_, undef, \%h) foreach @ARGV;
-
-exit;
-print "$_\n" foreach keys %h;
-exit;
-
-
-foreach my $file (keys %h) {
- print "$file:\n";
- my $href = hashify(\$h{$file});
- foreach (keys %$href) {
- print " $_: $href->{$_}\n" if $href->{$_};
- }
-}
diff -Nru apt-cacher-1.7.16/test/extract_sums.pl apt-cacher-1.7.20.1~18.04.sav0/test/extract_sums.pl
--- apt-cacher-1.7.16/test/extract_sums.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/extract_sums.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,26 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use lib '/usr/src/apt-cacher/src';
-
-require 'lib/apt-cacher.pl';
-
-our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf');
-
-my %h;
-
-extract_sums('/var/cache/apt-cacher/packages/debian_dists_experimental_main_binary-i386_Packages.diff_Index', \%h);
-extract_sums('/var/cache/apt-cacher/packages/titan:9999_debian_dists_unstable_contrib_binary-i386_Packages.bz2', \%h);
-extract_sums('/var/cache/apt-cacher/packages/titan:9999_debian_dists_stable_contrib_source_Sources.bz2', \%h);
-extract_sums('/var/cache/apt-cacher/packages/titan:9999_debian_dists_stable_Release', \%h);
-extract_sums('/var/cache/apt-cacher/packages/titan_~mark_debian_Packages.gz', \%h);
-
-foreach my $file (keys %h) {
- print "$file:\n";
- my $href = hashify(\$h{$file});
- foreach (keys %$href) {
- print " $_: $href->{$_}\n" if $href->{$_};
- }
-}
diff -Nru apt-cacher-1.7.16/test/fork-flock.pl apt-cacher-1.7.20.1~18.04.sav0/test/fork-flock.pl
--- apt-cacher-1.7.16/test/fork-flock.pl 2012-01-20 13:18:43.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/fork-flock.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,30 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use Fcntl qw':flock';
-use IO::Handle;
-
-open(my $tmpfile, "+>", undef) or die $!;
-
-flock $tmpfile, LOCK_EX or die $!;
-
-defined (my $cpid = fork) or die "Fork failed: $!";
-
-if ($cpid){
- #open my $newfile, '<', '/dev/fd/' . $tmpfile->fileno or die $!; # Works, but not completely portable
- open my $newfile, '<', '/dev/fd/' . $tmpfile->fileno or die $!;
- # open my $newfile, '+>&' . $tmpfile->fileno or die $!;
- undef $tmpfile;
- print "$$: parent reopen\n";
-
- flock $newfile, LOCK_SH or die $!;
- print "$$: parent lock\n";
-}
-else {
- sleep 2;
- print "$$: child unlock\n";
- flock $tmpfile, LOCK_UN or die $!;
- print "$$: child exiting\n";
-}
diff -Nru apt-cacher-1.7.16/test/ftp.pl apt-cacher-1.7.20.1~18.04.sav0/test/ftp.pl
--- apt-cacher-1.7.16/test/ftp.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/ftp.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,31 +0,0 @@
-#! /usr/bin/perl
-
-use strict;
-use warnings;
-use WWW::Curl::Easy;
-use WWW::Curl::Multi;
-
-
- my $curl=WWW::Curl::Easy->new;
- my $multi=WWW::Curl::Multi->new;
- $curl->setopt(CURLOPT_VERBOSE, 1);
- $curl->setopt(CURLOPT_URL, 'ftp://ftp.uk.debian.org/debian/dists/stable/Release.gpg');
- $curl->setopt(CURLOPT_FTPPORT, '-');
- open my $fh, ">/dev/tty"||die $!;
- $curl->setopt(CURLOPT_WRITEHEADER, $fh);
- $curl->setopt(CURLOPT_HEADERFUNCTION, \&callback);
- sub callback {
- my ($chunk,$fh)=@_;
- print $fh $chunk;
- return length($chunk);
- }
-
- $multi->add_handle($curl);
-
- while ($multi->perform){};
-
- close $fh;
-
-while (1) {
-}
-
diff -Nru apt-cacher-1.7.16/test/load.pl apt-cacher-1.7.20.1~18.04.sav0/test/load.pl
--- apt-cacher-1.7.16/test/load.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/load.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,26 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use POSIX ":sys_wait_h";
-use File::Temp;
-
-for (my $count = 5; $count; $count--) {
- defined(my $pid = fork) || die "Fork failed: $!";
- if ($pid) {
- sleep(rand 60);
- next;
- }
- else {
- my $dir = File::Temp::tempdir(CLEANUP => 1);
- print "$$: Running new debootstrap\n";
- exec("fakechroot /usr/sbin/debootstrap --download-only --variant=fakechroot stable $dir http://localhost:3142/titan:9999/debian > /dev/null");
- die "exec() failed: $!";
- }
-}
-
-
-sleep(10) while waitpid(-1, WNOHANG) > 0;
-
-print "Done!\n"
diff -Nru apt-cacher-1.7.16/test/mmap.pl apt-cacher-1.7.20.1~18.04.sav0/test/mmap.pl
--- apt-cacher-1.7.16/test/mmap.pl 2011-12-28 15:53:34.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/mmap.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,57 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use Benchmark qw(:all);
-
-
-my $file = '/var/log/syslog';
-
-cmpthese(-1, {
- 'PerlIO' =>
- sub {
- open(my $fh, '<', $file) || die $1;
- while (<$fh>) {
- /^t/;
- }
- },
-
- 'Pragma mmap' =>
- sub {
- use open IO => ':mmap';
- open(my $fh, '<', $file) || die $1;
- while (<$fh>) {
- /^t/;
- }
- },
-
- 'Pragma mmap repeat' =>
- sub {
- use open IO => 'mmap';
- open(my $fh, '<', $file) || die $1;
-# print "$_\n" foreach PerlIO::get_layers($fh);
- while (<$fh>) {
- /^t/;
- }
- },
-
- 'mmap' =>
- sub {
- open(my $fh, '<:mmap', $file) || die $1;
-# print "$_\n" foreach PerlIO::get_layers($fh);
- while (<$fh>) {
- /^t/;
- }
- },
-
- 'PerlIO repeat' =>
- sub {
- open(my $fh, '<', $file) || die $1;
-# print "$_\n" foreach PerlIO::get_layers($fh);
- while (<$fh>) {
- /^t/;
- }
- },
-
- });
diff -Nru apt-cacher-1.7.16/test/multi.pl apt-cacher-1.7.20.1~18.04.sav0/test/multi.pl
--- apt-cacher-1.7.16/test/multi.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/multi.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,41 +0,0 @@
-#!/usr/bin/perl
-use strict;
-use warnings;
-
-use WWW::Curl::Easy;
-use WWW::Curl::Multi;
-
-my $count = 5;
-while ($count--) {
- if (my $pid = fork) {
- &doloop;
- }
- else {
- &doloop;
- }
-}
-exit;
-
-INIT {
- my $curlm = new WWW::Curl::Multi;
-
-
- sub doloop
- {
- my $curl;
-
- unless ($curl) {
- $curl = new WWW::Curl::Easy;
- $curlm->add_handle($curl);
-
- $curl->setopt(CURLOPT_VERBOSE, 1);
- $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1);
- $curl->setopt(CURLOPT_CONNECT_ONLY, 1);
- $curl->setopt(CURLOPT_FORBID_REUSE, 1);
- $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org');
- }
- $curl->perform;
- }
-}
-
-
diff -Nru apt-cacher-1.7.16/test/namespace.pl apt-cacher-1.7.20.1~18.04.sav0/test/namespace.pl
--- apt-cacher-1.7.16/test/namespace.pl 2016-06-07 09:32:21.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/namespace.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-#! perl
-
-use strict;
-use warnings;
-
-use FindBin qw($Bin);
-use lib "$Bin/../lib";
-
-require('apt-cacher.pl');
-
-our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf');
-
-print get_namespace(get_original_url('debian_dists_stable-updates_main_source_Sources.gz')) . "\n";
diff -Nru apt-cacher-1.7.16/test/object.pl apt-cacher-1.7.20.1~18.04.sav0/test/object.pl
--- apt-cacher-1.7.16/test/object.pl 2012-02-28 15:01:27.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/object.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use File::Spec;
-use lib (File::Spec->splitpath($0))[1] .'../lib';
-use CachedRequest;
-use HTTP::Request;
-
-require('/usr/src/apt-cacher/src/lib/apt-cacher.pl');
-
-our $cfg = read_config('/etc/apt-cacher/apt-cacher.conf');
-
-my $r = CachedRequest->new(HTTP::Request->new('GET' => 'http://debian/dists/stable/Release'));
-
-sleep 1;
diff -Nru apt-cacher-1.7.16/test/regex.pl apt-cacher-1.7.20.1~18.04.sav0/test/regex.pl
--- apt-cacher-1.7.16/test/regex.pl 2017-05-09 08:38:13.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/regex.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,29 +0,0 @@
-#! perl
-
-use strict;
-use warnings;
-
-use Benchmark qw(:all);
-
-my $interp = join '|', qw(a..z);
-my $compiled = qr/$interp/;
-
-cmpthese(-1, {
- 'Plain' =>
- sub {
- 'test' =~ /$interp/;
- },
- 'OnDemand' =>
- sub {
- $interp = qr/$interp/ unless ref $interp;
- 'test' =~ /$interp/;
- },
- 'PreCompile' =>
- sub {
- 'test' =~ /$compiled/;
- },
- 'Once' =>
- sub {
- 'test' =~ /$interp/o;
- }
- });
diff -Nru apt-cacher-1.7.16/test/sendfile.pl apt-cacher-1.7.20.1~18.04.sav0/test/sendfile.pl
--- apt-cacher-1.7.16/test/sendfile.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/sendfile.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-use Sys::Syscall ':sendfile';
-
-open(my $fh, '<', \"test\n") || die $!;
-my $sent = sendfile(*STDOUT, $fh, 100)|| die $!;
-print "sent $sent\n";
diff -Nru apt-cacher-1.7.16/test/shm.pl apt-cacher-1.7.20.1~18.04.sav0/test/shm.pl
--- apt-cacher-1.7.16/test/shm.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/shm.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,55 +0,0 @@
-#!/usr/bin/perl
-use strict;
-use warnings;
-
-use WWW::Curl::Easy;
-use WWW::Curl::Share;
-use Storable qw(freeze thaw);
-use Data::Dumper;
-use IPC::ShareLite;
-
-
-#my $count = 5;
-#while ($count--) {
-# if (my $pid = fork) {
-# &doloop;
-# }
-# else {
- &doloop;
-# }
-#}
-exit;
-
-INIT {
- my $curlsh = new WWW::Curl::Share;
- $curlsh->setopt(CURLSHOPT_SHARE, CURL_LOCK_DATA_DNS);
- print "Share is $curlsh\n";
-
- # put it in IPC
-
- my $shm = new IPC::ShareLite( -key => 1971,
- -create => 'yes',
- -destroy => 'yes' ) or die $!;
-
- $shm->store( Data::Dumper->Dump([$curlsh],['curlsh']));
-
- sub doloop
- {
- my $curl;
-# unless ($curl) {
- $curl = new WWW::Curl::Easy;
- print "Thawed share is ".($shm->fetch)."\n";
- eval($shm->fetch);
-# $curl->setopt(CURLOPT_SHARE, (thaw($shm->fetch))[0]);
-
- $curl->setopt(CURLOPT_VERBOSE, 1);
- $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1);
- $curl->setopt(CURLOPT_CONNECT_ONLY, 1);
- $curl->setopt(CURLOPT_FORBID_REUSE, 1);
- $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org');
-# }
- $curl->perform;
- }
-}
-
-
diff -Nru apt-cacher-1.7.16/test/signal.pl apt-cacher-1.7.20.1~18.04.sav0/test/signal.pl
--- apt-cacher-1.7.16/test/signal.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/signal.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,11 +0,0 @@
-#!/usr/bin/perl
-
-use warnings;
-use strict;
-
-
-$SIG{SEGV} = sub {print "Got signal\n";};
-
-while (1){
- sleep 100;
-}
diff -Nru apt-cacher-1.7.16/test/socket.pl apt-cacher-1.7.20.1~18.04.sav0/test/socket.pl
--- apt-cacher-1.7.16/test/socket.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/socket.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,33 +0,0 @@
-use strict;
-use warnings;
-use Socket;
-use IO::Socket;
-use FreezeThaw qw(freeze thaw);
-use HTTP::Response;
-
-socketpair(CHILD, PARENT, AF_UNIX, SOCK_STREAM, PF_UNSPEC) or die "socketpair: $!";
-
-
-$SIG{CHLD} = 'IGNORE';
-
-CHILD->autoflush(1);
-PARENT->autoflush(1);
-
-if (my $pid = fork) {
- close PARENT;
-# print CHILD "Parent Pid $$ is sending\n";
- print CHILD freeze(HTTP::Response->new(200, 'Message for code'));
- close CHILD;
- waitpid($pid,0);
-} else {
- die "cannot fork: $!" unless defined $pid;
- close CHILD;
- while (my $line = ) {
- chomp $line;
- print "Child Pid $$ just read this: `$line'\n";
- my $obj = (thaw($line))[0];
- print $obj->message."\n"
- }
- close PARENT;
- exit;
-}
diff -Nru apt-cacher-1.7.16/test/thread.pl apt-cacher-1.7.20.1~18.04.sav0/test/thread.pl
--- apt-cacher-1.7.16/test/thread.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/thread.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,47 +0,0 @@
-#!/usr/bin/perl
-use strict;
-use warnings;
-
-use WWW::Curl::Easy;
-use WWW::Curl::Share;
-use threads;
-use threads::shared;
-
-
-my $count = 5;
-while ($count--) {
-threads->new(\&doloop); next;
- if (my $pid = fork) {
- &doloop;
- }
- else {
- &doloop;
- }
-}
-
-
-exit;
-
-INIT {
- my $curlsh = &share(new WWW::Curl::Share);
- $curlsh->setopt(CURLSHOPT_SHARE, CURL_LOCK_DATA_DNS);
-
- sub doloop
- {
- my $curl;
- unless ($curl) {
- $curl = new WWW::Curl::Easy;
- $curl->setopt(CURLOPT_SHARE, $curlsh);
-
- $curl->setopt(CURLOPT_VERBOSE, 1);
- $curl->setopt(CURLOPT_DNS_CACHE_TIMEOUT, -1);
- $curl->setopt(CURLOPT_CONNECT_ONLY, 1);
- $curl->setopt(CURLOPT_FORBID_REUSE, 1);
- $curl->setopt(CURLOPT_URL, 'http://ftp.us.debian.org');
- }
- print "Share is $curlsh\n";
- $curl->perform;
- }
-}
-
-
diff -Nru apt-cacher-1.7.16/test/verify.pl apt-cacher-1.7.20.1~18.04.sav0/test/verify.pl
--- apt-cacher-1.7.16/test/verify.pl 2011-11-07 10:49:11.000000000 +0000
+++ apt-cacher-1.7.20.1~18.04.sav0/test/verify.pl 1970-01-01 00:00:00.000000000 +0000
@@ -1,15 +0,0 @@
-#! /usr/bin/perl
-
-use BerkeleyDB;
-
-while (1) {
-
- alarm(1);
- eval {
- local $SIG{ALRM} = sub { die "Timeout\n";};
- my $status = BerkeleyDB::db_verify
- -Filename => '/var/cache/apt-cacher/sums.db';
- print "Returned ".$status." \n";
- };
- print "$@";
-}
|