accelerate.pl

 
 1  #!/usr/bin/perl -w
 2
 3  $url = $ARGV[0] || die "Specify the URL";
 4
 5  $header = `HEAD $url`;
 6
 7  # Now that we have the HTTP response header, extract the Content-Length field -
 8  # I've noticed that some servers return a dummy length, so there is a provision
 9  # for the length to be specified as a command line argument
10  $header=~m/content-length\s*:\s*(\d+)/i;
11  $total_length=$ARGV[1] || $1;
12
13  # This is the core function - It takes in the following parameters
14  # $url          - The location of the file
15  # $offset       - The offset within the file from where it must be downloaded
16  # $length       - The number of bytes to be downloaded
17  # $file         - The name of the file into which the contents must be dumped
18  sub download{
19          my($url,$offset,$length,$file)=@_;
20
21          print "$offset $length $file\n";
22          my $end_offset=$offset+$length-1;
23          `GET -H "Range: bytes=$offset-$end_offset" $url > $file`;
24          exit;
25  }
26
27
28  # The idea is to spawn 10 concurrent downloads
29
30  $NUMBER_OF_PARTS=10;
31  $part_size=int $total_length/$NUMBER_OF_PARTS;
32  $offset=0;
33
34  print "$part_size\n";
35
36  for $i(1..$NUMBER_OF_PARTS){
37          if($i == $NUMBER_OF_PARTS){
38                  $part_size+=($total_length%10);
39          }
40          &download($url,$offset,$part_size,"part$i") if (fork==0);
41          $offset+=$part_size;
42  }
43
44  1 until (wait==-1);