amprolla

devuan's apt repo merger
git clone git://parazyd.org/amprolla.git
Log | Files | Refs | README | LICENSE

commit 57ac2b2a17fbeb08fc845bbb0b275d22c568892f
parent eea1227c2aa304cfcceb9de2a836026a6f646ff2
Author: parazyd <parazyd@dyne.org>
Date:   Mon, 24 Jul 2017 11:55:02 +0200

styling fixes

Diffstat:
Mamprolla_merge_contents.py | 6+++---
Mlib/config.def.py | 3++-
Mlib/log.py | 6+++---
Mlib/net.py | 20++++++++++----------
Mlib/package.py | 2+-
Mlib/release.py | 5-----
6 files changed, 19 insertions(+), 23 deletions(-)

diff --git a/amprolla_merge_contents.py b/amprolla_merge_contents.py @@ -52,9 +52,9 @@ def write_contents(pkgs, filename): gzf = gzip_open(filename, 'w') for pkg, files in sorted(pkgs.items()): - for f in files: - ln = "%s %s\n" % (f, pkg) - gzf.write(ln.encode('utf-8')) + for file in files: + line = "%s %s\n" % (file, pkg) + gzf.write(line.encode('utf-8')) gzf.write(b'\n') gzf.close() diff --git a/lib/config.def.py b/lib/config.def.py @@ -4,7 +4,8 @@ amprolla configuration file """ -from hashlib import md5, sha1, sha256 +# from hashlib import md5, sha1, sha256 +from hashlib import sha256 cpunm = 4 # number of cpus you want to use for multiprocessing logdir = './log' diff --git a/lib/log.py b/lib/log.py @@ -51,6 +51,6 @@ def logtofile(filename, text, redo=False): makedirs(logdir, exist_ok=True) if redo: remove(join(logdir, filename)) - lf = open(join(logdir, filename), 'a') - lf.write(text) - lf.close() + lfile = open(join(logdir, filename), 'a') + lfile.write(text) + lfile.close() diff --git a/lib/net.py b/lib/net.py @@ -8,7 +8,7 @@ from os import makedirs from os.path import dirname import requests -from lib.log import die, info, warn +from lib.log import info, warn def download(uris): @@ -20,21 +20,21 @@ def download(uris): info("dl: %s" % url) try: - r = requests.get(url, stream=True, timeout=20) + rfile = requests.get(url, stream=True, timeout=20) except (requests.exceptions.ConnectionError, - requests.exceptions.ReadTimeout) as e: - warn('Caught exception: "%s". Retrying...' % e) + requests.exceptions.ReadTimeout) as err: + warn('Caught exception: "%s". Retrying...' % err) return download(uris) - if r.status_code != 200: - warn('%s failed: %d' % (url, r.status_code)) + if rfile.status_code != 200: + warn('%s failed: %d' % (url, rfile.status_code)) return makedirs(dirname(path), exist_ok=True) - f = open(path, 'wb') + lfile = open(path, 'wb') # chunk_size {sh,c}ould be more on gbit servers - for chunk in r.iter_content(chunk_size=1024): + for chunk in rfile.iter_content(chunk_size=1024): if chunk: - f.write(chunk) + lfile.write(chunk) # f.flush() - f.close() + lfile.close() diff --git a/lib/package.py b/lib/package.py @@ -7,7 +7,7 @@ Package merging functions and helpers from os import makedirs from os.path import dirname, isfile, join from gzip import open as gzip_open -from lzma import open as lzma_open +# from lzma import open as lzma_open from shutil import copyfile import lib.globalvars as globalvars diff --git a/lib/release.py b/lib/release.py @@ -77,11 +77,6 @@ def write_release(oldrel, newrel, filelist, r, sign=True, rewrite=True): uncomp = gzip_decomp(open(f+'.gz', 'rb').read()) new.write(' %s %8s %s\n' % (csum['f'](uncomp).hexdigest(), len(uncomp), f.replace(r+'/', ''))) - # elif basename(f).startswith('Contents') and isfile(f+'.gz'): - # uncomp = gzip_decomp(open(f+'.gz', 'rb').read()) - # new.write(' %s %8s %s\n' % (csum['f'](uncomp).hexdigest(), - # len(uncomp), f.replace(r+'/', ''))) - new.close() if sign: