X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fprocess_unchecked.py;h=04afb7b30065648d3cd3cdf633778915477cb766;hb=aa83ebb15882823869b109d370e24ef3efd730f8;hp=34d1739f7665a025f26581a4a24c19a2d838a44e;hpb=c9e12bb14c839b31d48db95406b4497bb4cdffde;p=dak.git diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index 34d1739f..04afb7b3 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -630,11 +630,11 @@ def check_files(): # Check the md5sum & size against existing files (if any) files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"]) - files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"]) + files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"]) if files_id == -1: reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f)) elif files_id == -2: - reject("md5sum and/or size mismatch on existing copy of %s." % (f)) + reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f)) files[f]["files id"] = files_id # Check for packages that have moved from one component to another @@ -700,6 +700,9 @@ def check_dsc(): except NoFilesFieldError: reject("%s: no Files: field." % (dsc_filename)) return 0 + except UnknownFormatError, format: + reject("%s: unknown format '%s'." % (dsc_filename, format)) + return 0 except ParseChangesError, line: reject("%s: parse error, can't grok: %s." % (dsc_filename, line)) return 0 @@ -774,6 +777,8 @@ def check_dsc(): files[orig_tar_gz] = {} files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE] files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"] + files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"] + files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"] files[orig_tar_gz]["section"] = files[dsc_filename]["section"] files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"] files[orig_tar_gz]["component"] = files[dsc_filename]["component"] @@ -906,10 +911,10 @@ def check_urgency (): if changes["architecture"].has_key("source"): if not changes.has_key("urgency"): changes["urgency"] = Cnf["Urgency::Default"] + changes["urgency"] = changes["urgency"].lower() if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"): reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ") changes["urgency"] = Cnf["Urgency::Default"] - changes["urgency"] = changes["urgency"].lower() ################################################################################ @@ -924,11 +929,16 @@ def check_hashes (): check_hash(".changes", files, "md5sum", apt_pkg.md5sum) check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum) - if format >= (1,8): - hashes = [("sha1", apt_pkg.sha1sum), - ("sha256", apt_pkg.sha256sum)] - else: - hashes = [] + # (hashname, function, originate) + # If originate is true, we have to calculate it because + # the changes file version is too early for it to be + # included + hashes = [("sha1", apt_pkg.sha1sum, False), + ("sha256", apt_pkg.sha256sum, False)] + + if format <= (1,8): + hashes["sha1"] = True + hashes["sha256"] = True for x in changes: if x.startswith("checksum-"): @@ -942,12 +952,17 @@ def check_hashes (): if h not in dict(hashes): reject("Unsupported checksum field in .dsc" % (h)) - for h,f in hashes: + for h,f,o in hashes: try: fs = utils.build_file_list(changes, 0, "checksums-%s" % h, h) - check_hash(".changes %s" % (h), fs, h, f, files) + if o: + create_hash(fs, h, f, files) + else: + check_hash(".changes %s" % (h), fs, h, f, files) except NoFilesFieldError: reject("No Checksums-%s: field in .changes" % (h)) + except UnknownFormatError, format: + reject("%s: unknown format of .changes" % (format)) except ParseChangesError, line: reject("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line)) @@ -955,7 +970,12 @@ def check_hashes (): try: fs = utils.build_file_list(dsc, 1, "checksums-%s" % h, h) - check_hash(".dsc %s" % (h), fs, h, f, dsc_files) + if o: + create_hash(fs, h, f, dsc_files) + else: + check_hash(".dsc %s" % (h), fs, h, f, dsc_files) + except UnknownFormatError, format: + reject("%s: unknown format of .dsc" % (format)) except NoFilesFieldError: reject("No Checksums-%s: field in .dsc" % (h)) except ParseChangesError, line: @@ -963,6 +983,20 @@ def check_hashes (): ################################################################################ +def create_hash (lfiles, key, testfn, basedict = None): + for f in lfiles.keys(): + try: + file_handle = utils.open_file(f) + except CantOpenError: + continue + + # Check hash + basedict[f]['%ssum' % key] = testfn(file_handle) + file_handle.close() + + +################################################################################ + def check_hash (where, lfiles, key, testfn, basedict = None): if basedict: for f in basedict.keys(): @@ -982,6 +1016,8 @@ def check_hash (where, lfiles, key, testfn, basedict = None): if testfn(file_handle) != lfiles[f][key]: reject("%s: %s check failed." % (f, key)) file_handle.close() + # Store the hashes for later use + basedict[f]['%ssum' % key] = lfiles[f][key] # Check size actual_size = os.stat(f)[stat.ST_SIZE] size = int(lfiles[f]["size"])