# Check the md5sum & size against existing files (if any)
files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
- files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"])
+ files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
if files_id == -1:
reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
elif files_id == -2:
- reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f))
+ reject("md5sum and/or size mismatch on existing copy of %s." % (f))
files[f]["files id"] = files_id
# Check for packages that have moved from one component to another
files[orig_tar_gz] = {}
files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
- files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
- files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
else:
format = int(float(format[0])), 0
- utils.check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
- utils.check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
+ check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
+ check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
- # This is stupid API, but it'll have to do for now until
- # we actually have proper abstraction
- for m in utils.ensure_hashes(Upload):
- reject(m)
+ if format >= (1,8):
+ hashes = [("sha1", apt_pkg.sha1sum),
+ ("sha256", apt_pkg.sha256sum)]
+ else:
+ hashes = []
+
+ for x in changes:
+ if x.startswith("checksum-"):
+ h = x.split("-",1)[1]
+ if h not in dict(hashes):
+ reject("Unsupported checksum field in .changes" % (h))
+
+ for x in dsc:
+ if x.startswith("checksum-"):
+ h = x.split("-",1)[1]
+ if h not in dict(hashes):
+ reject("Unsupported checksum field in .dsc" % (h))
+
+ for h,f in hashes:
+ try:
+ fs = utils.build_file_list(changes, 0, "checksums-%s" % h, h)
+ check_hash(".changes %s" % (h), fs, h, f, files)
+ except NoFilesFieldError:
+ reject("No Checksums-%s: field in .changes" % (h))
+ except UnknownFormatError, format:
+ reject("%s: unknown format of .changes" % (format))
+ except ParseChangesError, line:
+ reject("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
+
+ if "source" not in changes["architecture"]: continue
+
+ try:
+ fs = utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
+ check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
+ except UnknownFormatError, format:
+ reject("%s: unknown format of .dsc" % (format))
+ except NoFilesFieldError:
+ reject("No Checksums-%s: field in .dsc" % (h))
+ except ParseChangesError, line:
+ reject("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
+
+################################################################################
+
+def check_hash (where, lfiles, key, testfn, basedict = None):
+ if basedict:
+ for f in basedict.keys():
+ if f not in lfiles:
+ reject("%s: no %s checksum" % (f, key))
+
+ for f in lfiles.keys():
+ if basedict and f not in basedict:
+ reject("%s: extraneous entry in %s checksums" % (f, key))
+
+ try:
+ file_handle = utils.open_file(f)
+ except CantOpenError:
+ continue
+
+ # Check hash
+ if testfn(file_handle) != lfiles[f][key]:
+ reject("%s: %s check failed." % (f, key))
+ file_handle.close()
+ # Check size
+ actual_size = os.stat(f)[stat.ST_SIZE]
+ size = int(lfiles[f]["size"])
+ if size != actual_size:
+ reject("%s: actual file size (%s) does not match size (%s) in %s"
+ % (f, actual_size, size, where))
################################################################################