tpylint and pep8 fixes - amprolla - devuan's apt repo merger
git clone git://parazyd.org/amprolla.git
Log
Files
Refs
README
LICENSE
---
commit 5d279f837f4bfd6d393d01cd8b8f03289d1fbf28
parent acdff2408cb6a5fc0bdbbdb89b68ee46bc75f55d
Author: parazyd 
Date:   Sun, 17 Dec 2017 02:14:17 +0100

pylint and pep8 fixes

Diffstat:
  M amprolla_merge.py                   |       6 +++---
  M amprolla_merge_contents.py          |       2 +-
  M amprolla_update.py                  |      18 +++++++++---------
  M lib/package.py                      |      16 ++++++++--------
  M lib/parse.py                        |      40 ++++++++++++++++----------------
  M lib/release.py                      |      55 ++++++++++++++++---------------

6 files changed, 70 insertions(+), 67 deletions(-)
---
diff --git a/amprolla_merge.py b/amprolla_merge.py
t@@ -12,8 +12,8 @@ from time import time
 
 import lib.globalvars as globalvars
 from lib.config import (aliases, arches, banpkgs, categories, cpunm, mergedir,
-                        mergesubdir, pkgfiles, repos, repo_order, signrelease,
-                        spooldir, srcfiles, suites, skips)
+                        mergesubdir, pkgfiles, repos, repo_order, spooldir,
+                        srcfiles, suites, skips)
 from lib.lock import check_lock, free_lock
 from lib.package import (load_packages_file, merge_packages_many,
                          write_packages)
t@@ -143,7 +143,7 @@ def gen_release(suite):
                             join(spooldir, repos['devuan']['dists']))
 
     print('Writing Release')
-    write_release(oldrfl, newrfl, filelist, rootdir, sign=signrelease)
+    write_release(oldrfl, newrfl, filelist, rootdir)
 
 
 def main_merge(packages_file):
diff --git a/amprolla_merge_contents.py b/amprolla_merge_contents.py
t@@ -12,11 +12,11 @@ from os.path import dirname, join, isfile
 from time import time
 
 import lib.globalvars as globalvars
-from amprolla_merge import prepare_merge_dict
 from lib.config import (arches, categories, cpunm, mergedir, mergesubdir,
                         repos, spooldir)
 from lib.lock import check_lock, free_lock
 from lib.log import die, info
+from amprolla_merge import prepare_merge_dict
 
 
 def merge_contents(filelist):
diff --git a/amprolla_update.py b/amprolla_update.py
t@@ -11,12 +11,12 @@ from time import time
 import requests
 
 import lib.globalvars as globalvars
-from amprolla_merge import gen_release, merge, prepare_merge_dict
 from lib.config import aliases, cpunm, repos, repo_order, spooldir, skips
 from lib.lock import check_lock, free_lock
 from lib.log import info, warn, die
 from lib.parse import compare_dict, get_date, get_time, parse_release
 from lib.net import download
+from amprolla_merge import gen_release, merge, prepare_merge_dict
 
 
 def remote_is_newer(remote, local):
t@@ -48,27 +48,27 @@ def perform_update(suite, paths):
     needsmerge = {}
     needsmerge['downloads'] = []  # all files that have to be downloaded
     regenrelease = False
-    c = 0
+    cnt = 0
     for i in repo_order:
         # i = repository name
         needsmerge[i] = {}
         needsmerge[i]['mergelist'] = []
 
-        if paths[c]:
+        if paths[cnt]:
             info('Working on %s repo' % i)
-            remote_path = paths[c].replace(spooldir, repos[i]['host'])
+            remote_path = paths[cnt].replace(spooldir, repos[i]['host'])
             try:
                 remote_rel = requests.get(join(remote_path, 'Release'))
             except requests.exceptions.ConnectionError as err:
                 warn('Caught exception: "%s". Retrying...' % err)
                 return perform_update(suite, paths)
 
-            local_rel_text = open(join(paths[c], 'Release')).read()
+            local_rel_text = open(join(paths[cnt], 'Release')).read()
 
             diffs = {}
             if remote_is_newer(remote_rel.text, local_rel_text):
                 download((join(remote_path, 'Release'),
-                          join(paths[c], 'Release')))
+                          join(paths[cnt], 'Release')))
                 regenrelease = True
 
                 diffs = compare_dict(parse_release(remote_rel.text),
t@@ -78,12 +78,12 @@ def perform_update(suite, paths):
                 for k in diffs:
                     if k.endswith('Packages.gz') or k.endswith('Sources.gz'):
                         needsmerge[i]['mergelist'].append(k)
-                    rmt = join(paths[c].replace(spooldir, repos[i]['host']), k)
-                    loc = join(paths[c], k)
+                    rmt = join(paths[cnt].replace(spooldir, repos[i]['host']), k)
+                    loc = join(paths[cnt], k)
                     dlf = (rmt, loc)
                     needsmerge['downloads'].append(dlf)
 
-        c += 1
+        cnt += 1
         # break
 
     # download what needs to be downloaded
diff --git a/lib/package.py b/lib/package.py
t@@ -27,10 +27,10 @@ def write_packages(packages, filename, sort=True, sources=False):
     bsnm = 'Packages.gz'
     if sources:
         bsnm = 'Sources.gz'
-    rl = filename.replace(bsnm, 'Release')
-    sprl = rl.replace(mergedir, join(spooldir, 'devuan'))
-    if not isfile(rl) and isfile(sprl):
-        copyfile(sprl, rl)
+    rel = filename.replace(bsnm, 'Release')
+    sprl = rel.replace(mergedir, join(spooldir, 'devuan'))
+    if not isfile(rel) and isfile(sprl):
+        copyfile(sprl, rel)
 
     gzf = gzip_open(filename, 'w')
     xzf = lzma_open(filename.replace('.gz', '.xz'), 'w')
t@@ -48,10 +48,10 @@ def write_packages(packages, filename, sort=True, sources=False):
     for pkg_name, pkg_contents in pkg_items:
         for key in keylist:
             if key in pkg_contents:
-                s = '%s: %s\n' % (key, pkg_contents[key])
-                gzf.write(s.encode('utf-8'))
-                xzf.write(s.encode('utf-8'))
-                # f.write(s.encode('utf-8'))
+                sin = '%s: %s\n' % (key, pkg_contents[key])
+                gzf.write(sin.encode('utf-8'))
+                xzf.write(sin.encode('utf-8'))
+                # f.write(sin.encode('utf-8'))
         gzf.write(b'\n')
         xzf.write(b'\n')
         # f.write(b'\n')
diff --git a/lib/parse.py b/lib/parse.py
t@@ -69,9 +69,9 @@ def parse_release_head(reltext):
         elif line.startswith(splitter):
             md5sum = True
         else:
-            k = line.split(': ')[0]
-            v = line.split(': ')[1]
-            metadata[k] = v
+            key = line.split(': ')[0]
+            val = line.split(': ')[1]
+            metadata[key] = val
 
     return metadata
 
t@@ -92,9 +92,9 @@ def parse_package(entry):
         else:
             pkgs[key] = value
 
-            v = line.split(':', 1)
-            key = v[0]
-            value = v[1][1:]
+            val = line.split(':', 1)
+            key = val[0]
+            value = val[1][1:]
 
     if key:
         pkgs[key] = value
t@@ -133,41 +133,41 @@ def parse_dependencies(dependencies):
 
         {'lib6': '(>= 2.4)', 'libdbus-1-3': '(>= 1.0.2)', 'foo': None}
     """
-    r = {}
+    ret = {}
 
     for pkg_plus_version in dependencies.split(', '):
-        v = pkg_plus_version.split(' ', 1)
-        name = v[0]
+        ver = pkg_plus_version.split(' ', 1)
+        name = ver[0]
 
         # If we get passed an empty string, the name is '', and we just
         # outright stop
         if not name:
             return {}
 
-        if len(v) == 2:
-            version = v[1]
-            r[name] = version
+        if len(ver) == 2:
+            version = ver[1]
+            ret[name] = version
         else:
-            r[name] = None
+            ret[name] = None
 
-    return r
+    return ret
 
 
-def compare_dict(d1, d2):
+def compare_dict(dic1, dic2):
     """
     Compares two dicts
     Takes two dicts and returns a dict of tuples with the differences.
 
     Example input:
 
-        d1={'foo': 'bar'}, 22={'foo': 'baz'}
+        dic1={'foo': 'bar'}, dic2={'foo': 'baz'}
 
     Example output:
 
         {'foo': ('bar', 'baz')}
     """
-    d1_keys = set(d1.keys())
-    d2_keys = set(d2.keys())
+    d1_keys = set(dic1.keys())
+    d2_keys = set(dic2.keys())
     intersect_keys = d1_keys.intersection(d2_keys)
-    modified = {o: (d1[o], d2[o]) for o in intersect_keys if d1[o] != d2[o]}
-    return modified
+    mod = {o: (dic1[o], dic2[o]) for o in intersect_keys if dic1[o] != dic2[o]}
+    return mod
diff --git a/lib/release.py b/lib/release.py
t@@ -4,7 +4,7 @@
 Release file functions and helpers
 """
 
-from datetime import datetime, timedelta
+from datetime import datetime  # , timedelta
 from gzip import decompress as gzip_decomp
 from lzma import compress as lzma_comp
 from os.path import getsize, isfile
t@@ -12,7 +12,7 @@ from subprocess import Popen
 
 import lib.globalvars as globalvars
 from lib.config import (checksums, distrolabel, gpgdir, release_aliases,
-                        release_keys, signingkey)
+                        release_keys, signingkey, signrelease)
 from lib.log import info
 from lib.parse import parse_release_head, parse_release
 
t@@ -31,23 +31,23 @@ def rewrite_release_head(headers):
     return headers
 
 
-def write_release(oldrel, newrel, filelist, r, sign=True, rewrite=True):
+def write_release(oldrel, newrel, filelist, rmstr, rewrite=True):
     """
     Generates a valid Release file
     if sign=False: do not use gnupg to sign the file
     if rewrite=True: rewrite the Release headers as defined in the config
 
-    Arguments taken: oldrel, newrel, filelist, r
+    Arguments taken: oldrel, newrel, filelist, rmstr
         * location of the old Release file (used to take metadata)
         * location where to write the new Release file
         * list of files to make checksums
         * string to remove from the path of the hashed file
     """
-    t1 = datetime.utcnow()
-    # t2 = datetime.utcnow() + timedelta(days=7)
+    time1 = datetime.utcnow()
+    # time2 = datetime.utcnow() + timedelta(days=7)
 
-    prettyt1 = t1.strftime('%a, %d %b %Y %H:%M:%S UTC')
-    # prettyt2 = t2.strftime('%a, %d %b %Y %H:%M:%S UTC')
+    prettyt1 = time1.strftime('%a, %d %b %Y %H:%M:%S UTC')
+    # prettyt2 = time2.strftime('%a, %d %b %Y %H:%M:%S UTC')
 
     # this holds our local data in case we don't want to rehash files
     local_rel = open(newrel).read()
t@@ -69,7 +69,7 @@ def write_release(oldrel, newrel, filelist, r, sign=True, rewrite=True):
             new.write('%s: %s\n' % (k, rel_cont[k]))
 
     if globalvars.rehash:
-        rehash_release(filelist, new, r)
+        rehash_release(filelist, new, rmstr)
     else:
         info('Reusing old checksums')
         for csum in checksums:
t@@ -79,32 +79,35 @@ def write_release(oldrel, newrel, filelist, r, sign=True, rewrite=True):
 
     new.close()
 
-    if sign:
+    if signrelease:
         sign_release(newrel)
 
 
-def rehash_release(_filelist, fd, r):
+def rehash_release(_filelist, fdesc, rmstr):
     """
     Calculates checksums of a given filelist and writes them to the given
-    file descriptor. Takes r as the third argument, which is a string to
+    file descriptor. Takes rmstr as the third argument, which is a string to
     remove from the path of the hashed file when writing it to a file.
     """
     info('Hashing checksums')
     for csum in checksums:
-        fd.write('%s:\n' % csum['name'])
-        for f in _filelist:
-            if isfile(f):
-                cont = open(f, 'rb').read()
-                fd.write(' %s %8s %s\n' % (csum['f'](cont).hexdigest(),
-                                           getsize(f), f.replace(r+'/', '')))
-            elif f.endswith('.xz') and isfile(f.replace('.xz', '.gz')):
-                xzstr = lzma_comp(open(f.replace('.xz', '.gz'), 'rb').read())
-                fd.write(' %s %8s %s\n' % (csum['f'](xzstr).hexdigest(),
-                                           len(xzstr), f.replace(r+'/', '')))
-            elif not f.endswith('.gz') and isfile(f+'.gz'):
-                uncomp = gzip_decomp(open(f+'.gz', 'rb').read())
-                fd.write(' %s %8s %s\n' % (csum['f'](uncomp).hexdigest(),
-                                           len(uncomp), f.replace(r+'/', '')))
+        fdesc.write('%s:\n' % csum['name'])
+        for i in _filelist:
+            if isfile(i):
+                cont = open(i, 'rb').read()
+                fdesc.write(' %s %8s %s\n' % (csum['f'](cont).hexdigest(),
+                                              getsize(i),
+                                              i.replace(rmstr+'/', '')))
+            elif i.endswith('.xz') and isfile(i.replace('.xz', '.gz')):
+                xzstr = lzma_comp(open(i.replace('.xz', '.gz'), 'rb').read())
+                fdesc.write(' %s %8s %s\n' % (csum['f'](xzstr).hexdigest(),
+                                              len(xzstr),
+                                              i.replace(rmstr+'/', '')))
+            elif not i.endswith('.gz') and isfile(i+'.gz'):
+                uncomp = gzip_decomp(open(i+'.gz', 'rb').read())
+                fdesc.write(' %s %8s %s\n' % (csum['f'](uncomp).hexdigest(),
+                                              len(uncomp),
+                                              i.replace(rmstr+'/', '')))
     return