commit f37b80ecce7862b04cebe6a80f673c3c577bb780
parent 447efaed180df30074f5722963ebf28e91911efd
Author: parazyd <parazyd@dyne.org>
Date: Tue, 6 Jun 2017 00:43:03 +0200
multiprocess initial download
Diffstat:
2 files changed, 24 insertions(+), 16 deletions(-)
diff --git a/amprolla_init.py b/amprolla_init.py
@@ -7,6 +7,7 @@ the spooldir, along with all the files hashed inside the Release files
"""
from os.path import join
+from multiprocessing import Pool
import lib.config as config
from lib.net import download
@@ -56,20 +57,24 @@ def main():
for dist in config.repos:
dlurls = pop_dirs(dist)
for url in dlurls:
+ tpl = []
for file in config.mainrepofiles:
- remote = join(url[0], file)
- local = join(url[1], file)
- download(remote, local)
+ uu = (join(url[0], file), join(url[1], file))
+ tpl.append(uu)
+ p = Pool(4)
+ p.map(download, tpl)
+ p.close()
release_contents = open(join(url[1], 'Release')).read()
release_contents = parse_release(release_contents)
- # for k in release_contents.keys():
+ tpl = []
for k in release_contents:
# if k.endswith('/binary-armhf/Packages.gz'):
- # if k.endswith('Packages.gz'):
- remote = join(url[0], k)
- local = join(url[1], k)
- download(remote, local)
+ uu = (join(url[0], k), join(url[1], k))
+ tpl.append(uu)
+ p = Pool(4)
+ p.map(download, tpl)
+ p.close()
if __name__ == '__main__':
diff --git a/lib/net.py b/lib/net.py
@@ -10,10 +10,12 @@ import requests
from .log import die, warn
-def download(url, path):
+def download(uris):
"""
- Downloads a file by providing it an url and a write path
+ Downloads a file by providing it an url and a write path in a tuple
"""
+ url = uris[0]
+ path = uris[1]
print("downloading: %s\nto: %s" % (url, path))
r = requests.get(url, stream=True)
if r.status_code == 404:
@@ -23,11 +25,12 @@ def download(url, path):
die("download of %s failed" % url)
os.makedirs(os.path.dirname(path), exist_ok=True)
- with open(path, "wb") as f:
- # chunk_size {sh,c}ould be more on gbit servers
- for chunk in r.iter_content(chunk_size=1024):
- if chunk:
- f.write(chunk)
- # f.flush()
+ f = open(path, 'wb')
+ # chunk_size {sh,c}ould be more on gbit servers
+ for chunk in r.iter_content(chunk_size=1024):
+ if chunk:
+ f.write(chunk)
+ # f.flush()
+ f.close()
print("\033[1;32m . done\033[0m")
return