#2 Implement -g
Merged 8 years ago by tibbs. Opened 8 years ago by zbyszek.
zbyszek/spectool master  into  master

file modified
+54 -62
@@ -4,7 +4,10 @@ 

  import glob

  import re

  import sys

+ import shutil

+ import os.path

  from subprocess import CalledProcessError, PIPE, Popen, TimeoutExpired

+ from urllib import request

  

  # Python conversion of spectool.

  # Spectool has two functions:
@@ -22,6 +25,8 @@ 

  PROTOCOLS = ['ftp', 'http', 'https']

  dbprint = None

  

+ USER_AGENT = 'spectool/' + VERSION

+ 

  

  # Sure wish I had Python 3.5's subprocess.run(), so here's a hacked one.

  class CompletedProcess(object):
@@ -81,7 +86,7 @@ 

  def error(message, exception=None):

      print(message, file=sys.stderr)

      if exception is not None:

-         print(e, file=sys.stderr)

+         print(exception, file=sys.stderr)

      sys.exit(1)

  

  class Spec(object):
@@ -134,7 +139,6 @@ 

              self.patches.append(val)

              self.patchnums.append(int(m.group('pnum') or 0))

  

- 

  class SelectionError(Exception):

      pass

  
@@ -308,78 +312,66 @@ 

      return expand_sourcedir_macro(spec)

  

  

- def is_downloadable(url):

-     """Check that string is a valid URL of a protocol which CURL can handle."""

-     return False

- 

- 

- def download_files(spec, opts, selected):

-     """

-     Fetch the sources.

- 

-     Here's the relevant perl code:

-         if (retrievable ($url)) {

-                 my $path = File::Spec->catfile($where, $url =~ m|([^/]+)$|);

-                 print "Getting $url to $path\n";

-                 if (-e $path) {

-                         if ($force) {

-                                 if (! unlink $path) {

-                                         warn("Could not unlink $path, skipping download: $!\n");

-                                         return 1;

-                                 }

-                         } else {

-                                 warn("$path already exists, skipping download\n");

-                                 return 0;

-                         }

-                 }

-                 # Note: -k/--insecure is intentionally not here; add it to

-                 #       $CURLRC if you want it.

-                 my @cmd = (qw (curl --fail --remote-time --location

-                                --output), $path,

-                            '--user-agent', "spectool/$VERSION");

-                 push(@cmd, '--config', $CURLRC) if (-e $CURLRC);

-                 push(@cmd, $url);

-                 print "--> @cmd\n" if ($verbose > 1);

-                 if (! $dryrun) {

-                         system @cmd;

-                         return $? == -1 ? 127 : $? >> 8;

-                 } else {

-                         print "dry run: @cmd\n";

-                 }

-         } else {

-                 warn "Couldn't fetch $url: missing/unsupported URL\n" if ($verbose);

-         }

-         return 0;

-     """

-     dir = get_download_location(spec, opts)

-     # urls = []

- 

-     # Iterate over sources

- 

-     print(dir)

- 

- 

- def listfiles(spec, opts, selected):

+ def generate_asset_list(spec, opts, selected):

      if opts.allsources or opts.all:

-         for source, num in sorted(zip(spec.sources, spec.sourcenums), key=operator.itemgetter(1)):

-             print("Source{}: {}".format(num, source))

+         for num, source in sorted(zip(spec.sourcenums, spec.sources)):

+             yield 'Source', num, source

  

      if opts.allpatches or opts.all:

-         for patch, num in sorted(zip(spec.patches, spec.patchnums), key=operator.itemgetter(1)):

-             print("Patch{}: {}".format(num, patch))

+         for num, patch in sorted(zip(spec.patchnums, spec.patches)):

+             yield 'Patch', num, patch

  

      for source in opts.sourcelist:

          if source in spec.sourcenums:

-             print("Source{}: {}".format(source, spec.sources[source]))

+             yield 'Source', source, spec.sources[source]

          else:

-             print('No source item {}.'.format(source))

+             yield 'Error', source, 'No source item {}'.format(source)

  

      for patch in opts.patchlist:

          if patch in spec.patchnums:

-             print("Patch{}: {}".format(patch, spec.patches[patch]))

+             yield 'Patch', patch, spec.patches[patch]

          else:

-             print('No patch item {}.'.format(patch))

+             yield 'Error', patch, 'No patch item {}'.format(patch)

  

+ def listfiles(spec, opts, selected):

+     for typ, num, asset in generate_asset_list(spec, opts, selected):

+         if typ == 'Error':

+             print(asset)

+         else:

+             print('{}{}: {}'.format(typ, num, asset))

+ 

+ 

+ def is_downloadable(url):

+     """Check that string is a valid URL of a protocol which we can handle."""

+     return url.split('://')[0] in {'http', 'https', 'ftp'}

+ 

+ def path_download_name(url):

+     return url.split('/')[-1]

+ 

+ def download_file(url, dest):

+     req = request.Request(url, headers={'User-Agent': USER_AGENT})

+     with request.urlopen(req) as inp:

+         with open(dest, 'wb') as out:

+             shutil.copyfileobj(inp, out)

+ 

+ def download_files(spec, opts, selected):

+     """

+     Fetch the sources.

+     """

+     dir = get_download_location(spec, opts)

+     for typ, num, asset in generate_asset_list(spec, opts, selected):

+         if typ == 'Error':

+             raise IndexError(asset)

+         if not is_downloadable(asset):

+             print('{}{}: {} cannot be downloaded'.format(typ, num, asset))

+         else:

+             dest = os.path.join(dir, path_download_name(asset))

+             if not opts.force and os.path.exists(dest):

+                 print('{}{}: {} already exists'.format(typ, num, dest))

+             else:

+                 print('{}{}: {} → {}'.format(typ, num, asset, dest))

+                 if not opts.dryrun:

+                     download_file(asset, dest)

  

  def show_parsed_data(spec, opts):

      print("Parsed these tags:")

no initial comment

Nothing fancy, implements -g in the most straightforward way. Works for a few spec files I tested this on.

Pull-Request has been rebased

8 years ago

Pull-Request has been merged by tibbs

8 years ago
Metadata