commit 1d85a8d1cdd5794fdd6362254d0953c85ca2cc2a
parent f94e64b215e8bec044a897b61d14f0aba97721fe
Author: parazyd <parazyd@dyne.org>
Date:   Thu,  8 Jun 2017 00:34:01 +0200
cleanup; remove old cruft
Diffstat:
8 files changed, 28 insertions(+), 59 deletions(-)
diff --git a/amprolla_init.py b/amprolla_init.py
@@ -10,7 +10,7 @@ from os.path import join
 from multiprocessing import Pool
 from time import time
 
-from lib.config import repos, suites, aliases, spooldir, mainrepofiles
+from lib.config import aliases, cpunm, mainrepofiles, repos, spooldir, suites
 from lib.net import download
 from lib.parse import parse_release
 
@@ -21,9 +21,10 @@ def pop_dirs(repo):
     directory structure.
     Returns a list of tuples holding the remote and local locations
     of the files
+
     Example:
-        (http://auto.mirror.devuan.org/devuan/dists/jessie/main/binary-armhf/Packages.gz,
-         ./spool/devuan/dists/unstable/contrib/binary-armhf/Packages.gz)
+    (http://deb.debian.org/debian/dists/jessie/main/binary-all/Packages.gz,
+     ./spool/debian/dists/jessie/main/binary-all/Packages.gz)
     """
     repodata = repos[repo]
 
@@ -51,7 +52,7 @@ def pop_dirs(repo):
 
 def main():
     """
-    Loops through all repositories, and downloads their *Release* files, along
+    Loops through all repositories, and downloads their Release files, along
     with all the files listed within those Release files.
     """
     for dist in repos:
@@ -62,7 +63,7 @@ def main():
             for file in mainrepofiles:
                 urls = (join(url[0], file), join(url[1], file))
                 tpl.append(urls)
-            dlpool = Pool(4)
+            dlpool = Pool(cpunm)
             dlpool.map(download, tpl)
             dlpool.close()
 
@@ -73,7 +74,7 @@ def main():
                 # if k.endswith('/binary-armhf/Packages.gz'):
                 urls = (join(url[0], k), join(url[1], k))
                 tpl.append(urls)
-            dlpool = Pool(4)
+            dlpool = Pool(cpunm)
             dlpool.map(download, tpl)
             dlpool.close()
 
diff --git a/amprolla_merge.py b/amprolla_merge.py
@@ -8,13 +8,13 @@ Amprolla main module
 from os.path import basename, join
 from multiprocessing import Pool
 from time import time
-# from pprint import pprint
 
-from lib.package import (write_packages, load_packages_file,
-                         merge_packages_many)
-from lib.config import (aliases, banpkgs, repo_order, repos, spooldir, suites,
-                        mergedir, mergesubdir, pkgfiles, srcfiles, categories,
-                        arches)
+
+from lib.config import (aliases, arches, banpkgs, categories, cpunm, mergedir,
+                        mergesubdir, pkgfiles, repos, repo_order, spooldir,
+                        srcfiles, suites)
+from lib.package import (load_packages_file, merge_packages_many,
+                         write_packages)
 from lib.release import write_release
 
 
@@ -61,7 +61,7 @@ def devuan_rewrite(pkg, repo_name):
                                                   repos[repo_name]['name'])
     if 'Directory' in pkg:
         pkg['Directory'] = pkg['Directory'].replace('pool/', 'pool/%s/' %
-                                                  repos[repo_name]['name'])
+                                                    repos[repo_name]['name'])
 
     return pkg
 
@@ -171,8 +171,7 @@ def main():
 
             pkg.append(join(j, i, mrgfile))
 
-    # pprint(pkg)
-    mrgpool = Pool(4)  # Set it to the number of CPUs you want to use
+    mrgpool = Pool(cpunm)
     mrgpool.map(main_merge, pkg)
     mrgpool.close()
 
@@ -180,9 +179,8 @@ def main():
     for i in suites:
         for j in suites[i]:
             rel_list.append(j)
-            # gen_release(j)
 
-    relpool = Pool(4)  # Set it to the number of CPUs you want to use
+    relpool = Pool(cpunm)
     relpool.map(gen_release, rel_list)
     relpool.close()
 
diff --git a/amprolla_update.py b/amprolla_update.py
@@ -10,9 +10,9 @@ from multiprocessing import Pool
 from time import time
 import requests
 
-from amprolla_merge import prepare_merge_dict, gen_release, merge
-from lib.config import repos, spooldir, repo_order, aliases
-from lib.parse import parse_release, get_time, get_date, compare_dict
+from amprolla_merge import gen_release, merge, prepare_merge_dict
+from lib.config import aliases, cpunm, repos, repo_order, spooldir
+from lib.parse import compare_dict, get_date, get_time, parse_release
 from lib.net import download
 
 
@@ -77,7 +77,7 @@ def perform_update(suite, paths):
     # download what needs to be downloaded
     if needsmerge['downloads']:
         print('Downloading updates...')
-        dlpool = Pool(4)
+        dlpool = Pool(cpunm)
         dlpool.map(download, needsmerge['downloads'])
 
     # create union of our Packages.gz and Sources.gz files we will merge
@@ -112,7 +112,7 @@ def perform_update(suite, paths):
     # perform the actual merge
     if merge_list:
         print('Merging files...')
-        mrgpool = Pool(4)
+        mrgpool = Pool(cpunm)
         mrgpool.map(merge, merge_list)
 
     # generate Release files if we got any new files
diff --git a/lib/config.def.py b/lib/config.def.py
@@ -6,6 +6,7 @@ amprolla configuration file
 
 from hashlib import md5, sha1, sha256
 
+cpunm = 4  # number of cpus you want to use for multiprocessing
 spooldir = './spool'
 signingkey = 'CA608125'
 mergedir = './merged'
diff --git a/lib/net.py b/lib/net.py
@@ -17,7 +17,9 @@ def download(uris):
     url = uris[0]
     path = uris[1]
     print("downloading: %s\nto: %s" % (url, path))
+
     r = requests.get(url, stream=True)
+
     if r.status_code == 404:
         warn("download of %s failed: not found!" % url)
         return
@@ -32,5 +34,3 @@ def download(uris):
             f.write(chunk)
             # f.flush()
     f.close()
-    print("\033[1;32m .  done\033[0m")
-    return
diff --git a/lib/package.py b/lib/package.py
@@ -10,8 +10,8 @@ from gzip import open as gzip_open
 from lzma import open as lzma_open
 from shutil import copyfile
 
-from lib.parse import (parse_packages, parse_dependencies)
-from lib.config import packages_keys, sources_keys, mergedir, spooldir
+from lib.config import mergedir, packages_keys, sources_keys, spooldir
+from lib.parse import parse_dependencies, parse_packages
 
 
 def write_packages(packages, filename, sort=True, sources=False):
diff --git a/lib/parse.py b/lib/parse.py
@@ -50,10 +50,7 @@ def parse_release(reltext):
 
 
 def parse_release_head(reltext):
-    """
-    Parses the header of the release file to grab potentially needed
-    metadata
-    """
+    """Parses the header of the release file to grab needed metadata"""
     metadata = {}
 
     contents = reltext.split('\n')
@@ -72,23 +69,6 @@ def parse_release_head(reltext):
     return metadata
 
 
-def parse_release_re(reltext):
-    """
-    Parses a Release file using regular expressions and returns a dict
-    of the files we keed
-    key = filename, value = sha256 checksum
-    """
-    _hash = {}
-    match = re.search('SHA256:+', reltext)
-    if match:
-        line = reltext[match.start():-1]
-        for i in line.split('\n'):
-            if i == 'SHA256:' or i == '\n':  # XXX: hack
-                continue
-            _hash[(i.split()[2])] = i.split()[0]
-        return _hash
-
-
 def parse_package(entry):
     """ Parses a single Packages entry """
     pkgs = {}
@@ -113,17 +93,6 @@ def parse_package(entry):
     return pkgs
 
 
-PACKAGES_REGEX = re.compile('([A-Za-z0-9\-]+): ')
-def parse_package_re(entry):
-    """ Parses a single Packages entry """
-    contents = PACKAGES_REGEX.split(entry)[1:]  # Throw away the first ''
-
-    keys = contents[::2]
-    vals = map(lambda x: x.strip(), contents[1::2])
-
-    return dict(zip(keys, vals))
-
-
 def parse_packages(pkgtext):
     """
     Parses our package file contents into a hashmap
diff --git a/lib/release.py b/lib/release.py
@@ -8,7 +8,7 @@ from datetime import datetime, timedelta
 from os.path import getsize, isfile
 import gnupg
 
-from lib.config import release_keys, checksums, signingkey
+from lib.config import checksums, release_keys, signingkey
 from lib.parse import parse_release_head