X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fprocess_unchecked.py;h=123fd9f37e7e6f7316300f8b98c90452b12a086c;hb=d16e2f9f99f018d3672e77f86a3adf264844590f;hp=04afb7b30065648d3cd3cdf633778915477cb766;hpb=aa83ebb15882823869b109d370e24ef3efd730f8;p=dak.git diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index 04afb7b3..123fd9f3 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -630,11 +630,11 @@ def check_files(): # Check the md5sum & size against existing files (if any) files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"]) - files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"]) + files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"]) if files_id == -1: reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f)) elif files_id == -2: - reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f)) + reject("md5sum and/or size mismatch on existing copy of %s." % (f)) files[f]["files id"] = files_id # Check for packages that have moved from one component to another @@ -919,111 +919,13 @@ def check_urgency (): ################################################################################ def check_hashes (): - # Make sure we recognise the format of the Files: field - format = changes.get("format", "0.0").split(".",1) - if len(format) == 2: - format = int(format[0]), int(format[1]) - else: - format = int(float(format[0])), 0 - - check_hash(".changes", files, "md5sum", apt_pkg.md5sum) - check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum) - - # (hashname, function, originate) - # If originate is true, we have to calculate it because - # the changes file version is too early for it to be - # included - hashes = [("sha1", apt_pkg.sha1sum, False), - ("sha256", apt_pkg.sha256sum, False)] - - if format <= (1,8): - hashes["sha1"] = True - hashes["sha256"] = True - - for x in changes: - if x.startswith("checksum-"): - h = x.split("-",1)[1] - if h not in dict(hashes): - reject("Unsupported checksum field in .changes" % (h)) - - for x in dsc: - if x.startswith("checksum-"): - h = x.split("-",1)[1] - if h not in dict(hashes): - reject("Unsupported checksum field in .dsc" % (h)) - - for h,f,o in hashes: - try: - fs = utils.build_file_list(changes, 0, "checksums-%s" % h, h) - if o: - create_hash(fs, h, f, files) - else: - check_hash(".changes %s" % (h), fs, h, f, files) - except NoFilesFieldError: - reject("No Checksums-%s: field in .changes" % (h)) - except UnknownFormatError, format: - reject("%s: unknown format of .changes" % (format)) - except ParseChangesError, line: - reject("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line)) - - if "source" not in changes["architecture"]: continue - - try: - fs = utils.build_file_list(dsc, 1, "checksums-%s" % h, h) - if o: - create_hash(fs, h, f, dsc_files) - else: - check_hash(".dsc %s" % (h), fs, h, f, dsc_files) - except UnknownFormatError, format: - reject("%s: unknown format of .dsc" % (format)) - except NoFilesFieldError: - reject("No Checksums-%s: field in .dsc" % (h)) - except ParseChangesError, line: - reject("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line)) - -################################################################################ - -def create_hash (lfiles, key, testfn, basedict = None): - for f in lfiles.keys(): - try: - file_handle = utils.open_file(f) - except CantOpenError: - continue - - # Check hash - basedict[f]['%ssum' % key] = testfn(file_handle) - file_handle.close() - - -################################################################################ - -def check_hash (where, lfiles, key, testfn, basedict = None): - if basedict: - for f in basedict.keys(): - if f not in lfiles: - reject("%s: no %s checksum" % (f, key)) - - for f in lfiles.keys(): - if basedict and f not in basedict: - reject("%s: extraneous entry in %s checksums" % (f, key)) - - try: - file_handle = utils.open_file(f) - except CantOpenError: - continue + utils.check_hash(".changes", files, "md5sum", apt_pkg.md5sum) + utils.check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum) - # Check hash - if testfn(file_handle) != lfiles[f][key]: - reject("%s: %s check failed." % (f, key)) - file_handle.close() - # Store the hashes for later use - basedict[f]['%ssum' % key] = lfiles[f][key] - # Check size - actual_size = os.stat(f)[stat.ST_SIZE] - size = int(lfiles[f]["size"]) - if size != actual_size: - reject("%s: actual file size (%s) does not match size (%s) in %s" - % (f, actual_size, size, where)) + # This is stupid API, but it'll have to do for now until + # we actually have proper abstraction + for m in utils.ensure_hashes(changes, dsc, files, dsc_files): + reject(m) ################################################################################