commit f0914de43726ef280ecde1203421c5721dd22507
parent e07a9378a40812ccdfcbb742b8eea1a2905717a7
Author: parazyd <parazyd@dyne.org>
Date:   Wed,  7 Jun 2017 15:10:45 +0200
finalize incremental updates
Diffstat:
2 files changed, 59 insertions(+), 33 deletions(-)
diff --git a/amprolla_merge.py b/amprolla_merge.py
@@ -66,6 +66,8 @@ def devuan_rewrite(pkg, repo_name):
 def merge(packages_list):
     """
     Merges the Packages/Sources files given in the package list
+
+    ['path/to/devuan/Packages.gz', None, 'path/to/debian/Packages.gz']
     """
     all_repos = []
     print('Loading packages: %s' % packages_list)
diff --git a/amprolla_update.py b/amprolla_update.py
@@ -6,19 +6,19 @@ Perform incremental updates
 """
 
 from os.path import join
-import requests
 from multiprocessing import Pool
+import requests
 
-from amprolla_init import pop_dirs
-from amprolla_merge import prepare_merge_dict
-from lib.config import repos, spooldir, repo_order
+from amprolla_merge import prepare_merge_dict, gen_release, merge
+from lib.config import repos, spooldir, repo_order, aliases
 from lib.parse import parse_release, get_time, get_date, compare_dict
 from lib.net import download
 
-from pprint import pprint
-
 
 def remote_is_newer(remote, local):
+    """
+    Checks if a remote Release file holds a newer date, and returns True if so
+    """
     rem_date = get_date(remote)
     loc_date = get_date(local)
 
@@ -33,7 +33,9 @@ def remote_is_newer(remote, local):
 
 
 def perform_update(suite, paths):
-    print('==================================================')
+    """
+    Performs an incremental update and merge of a given suite
+    """
     print('Checking for updates in %s' % suite)
     print(paths)
 
@@ -49,33 +51,33 @@ def perform_update(suite, paths):
             print('Working on %s repo' % i)
             remote_path = paths[c].replace(spooldir, repos[i]['host'])
             remote_rel = requests.get(join(remote_path, 'Release'))
-            remote_rel_text = remote_rel.text
-            tup = (remote_rel, join(paths[c], 'Release'))
-            download(tup)
 
             local_rel_text = open(join(paths[c], 'Release')).read()
 
-            if remote_is_newer(remote_rel_text, local_rel_text):
-                remote_parsed = parse_release(remote_rel_text)
-                local_parsed = parse_release(local_rel_text)
-                diffs = compare_dict(remote_parsed, local_parsed)
-                if diffs:
-                    for k in diffs:
-                        if k.endswith('Packages.gz') or k.endswith('Sources.gz'):
-                            needsmerge[i]['mergelist'].append(k)
-                        rmt = join(paths[c].replace(spooldir, repos[i]['host']), k)
-                        loc = join(paths[c], k)
-                        dlf = (rmt, loc)
-                        needsmerge['downloads'].append(dlf)
+            diffs = {}
+            if remote_is_newer(remote_rel.text, local_rel_text):
+                download((join(remote_path, 'Release'),
+                          join(paths[c], 'Release')))
+
+                diffs = compare_dict(parse_release(remote_rel.text),
+                                     parse_release(local_rel_text))
+            if diffs:
+                for k in diffs:
+                    if k.endswith('Packages.gz') or k.endswith('Sources.gz'):
+                        needsmerge[i]['mergelist'].append(k)
+                    rmt = join(paths[c].replace(spooldir, repos[i]['host']), k)
+                    loc = join(paths[c], k)
+                    dlf = (rmt, loc)
+                    needsmerge['downloads'].append(dlf)
 
         c += 1
         # break
 
     # download what needs to be downloaded
-    print('Downloading updates...')
-    dlpool = Pool(4)
-    dlpool.map(download, needsmerge['downloads'])
-    dlpool.close
+    if needsmerge['downloads']:
+        print('Downloading updates...')
+        dlpool = Pool(4)
+        dlpool.map(download, needsmerge['downloads'])
 
     # create union of our Packages.gz and Sources.gz files we will merge
     uni = []
@@ -83,17 +85,39 @@ def perform_update(suite, paths):
         uni.append(needsmerge[i]['mergelist'])
     updpkg_list = set().union(*uni)
 
+    # make a list of package lists to feed into merge()
+    merge_list = []
+    for i in updpkg_list:
+        pkgs = []
+        for j in repo_order:
+            sui = suite
+            # append valid aliases
+            if repos[j]['aliases']:
+                if suite in aliases[repos[j]['name']]:
+                    sui = aliases[repos[j]['name']][suite]
+                elif repos[j]['skipmissing']:
+                    sui = None
+                skips = ['jessie-security', 'ascii-security']  # hack
+                if j == 'debian' and suite in skips:
+                    sui = None
+
+            if sui:
+                pkgs.append(join(spooldir, repos[j]['dists'], sui, i))
+            else:
+                pkgs.append(None)
+
+        merge_list.append(pkgs)
+
     # perform the actual merge
-    if updpkg_list:
+    if merge_list:
         print('Merging files...')
         mrgpool = Pool(4)
-        mrgpool.map(merge, updpkg_list)
-        mrgpool.close()
-
-    print('Generating Release...')
-    gen_release(suite)
+        mrgpool.map(merge, merge_list)
 
-    print('==================================================')
+    # generate release files
+    if needsmerge['downloads']:
+        print('Generating Release...')
+        gen_release(suite)
 
 
 def main():