#!/usr/bin/perl
#
# Brad Fitzpatrick, bradfitz@bradfitz.com
#
# recursively suck down part (or all) of a FTP site served by Windows NT
#

use Cwd;
use Net::FTP;

$ftp = Net::FTP->new("some.hostname.here");
$ftp->login("anonymous", "\@myperlscript");
$ftp->binary();

my $SOURCE = cwd;
$SOURCE =~ s/\/$//;

&recurse_dir("/");

sub recurse_dir
{
    my $dir = shift;
    my $localdir = $SOURCE . $dir;

    mkdir $localdir, 0750;
    chdir $localdir;
    my $cwd = cwd;
    print "Dir is $cwd\n";	

    ## get the file sizes
    system("ls -l > .dirlist.txt");
    open (DIRL, ".dirlist.txt");
    my %filesize;
    while (my $line = <DIRL>) {
	my @fields = split(/\s+/, $line);
	$filesize{$fields[8]} = $fields[4];
    }
    unlink(".dirlist.txt");

    print "DIR: $dir\n";
    $ftp->cwd($dir);
    my @lines = $ftp->dir();
    my @dirs;
    foreach my $line (@lines)
    {
	$line =~ /^.+\s+(\S+?)$/;
	my $file = $1;
	if ($line =~ /<DIR>/) { 
	    push @dirs, "$dir$file/";
	    next;
	}
	$line =~ /^.+?\s(\d+) (\S+?)$/;
	my $size = $1;

	# too slow:
	# my $localsize = (stat($file))[7];
	my $localsize = $filesize{$file};

	$localsize += 0;
	print "     $file ($localsize/$size bytes)\n";
	next if ($localsize == $size);
	$ftp->get($file, $file, $localsize);
    }
    foreach (@dirs) 
    {
	&recurse_dir($_);
    }
}