# Check the md5sum & size against existing files (if any)
files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
- files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
+ files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"])
if files_id == -1:
reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
elif files_id == -2:
- reject("md5sum and/or size mismatch on existing copy of %s." % (f))
+ reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f))
files[f]["files id"] = files_id
# Check for packages that have moved from one component to another
################################################################################
def check_hashes ():
- utils.check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
- utils.check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
+ # Make sure we recognise the format of the Files: field
+ format = changes.get("format", "0.0").split(".",1)
+ if len(format) == 2:
+ format = int(format[0]), int(format[1])
+ else:
+ format = int(float(format[0])), 0
+
+ check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
+ check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
+
+ # (hashname, function, originate)
+ # If originate is true, we have to calculate it because
+ # the changes file version is too early for it to be
+ # included
+ hashes = [("sha1", apt_pkg.sha1sum, False),
+ ("sha256", apt_pkg.sha256sum, False)]
+
+ if format <= (1,8):
+ hashes["sha1"] = True
+ hashes["sha256"] = True
+
+ for x in changes:
+ if x.startswith("checksum-"):
+ h = x.split("-",1)[1]
+ if h not in dict(hashes):
+ reject("Unsupported checksum field in .changes" % (h))
+
+ for x in dsc:
+ if x.startswith("checksum-"):
+ h = x.split("-",1)[1]
+ if h not in dict(hashes):
+ reject("Unsupported checksum field in .dsc" % (h))
+
+ for h,f,o in hashes:
+ try:
+ fs = utils.build_file_list(changes, 0, "checksums-%s" % h, h)
+ if o:
+ create_hash(fs, h, f, files)
+ else:
+ check_hash(".changes %s" % (h), fs, h, f, files)
+ except NoFilesFieldError:
+ reject("No Checksums-%s: field in .changes" % (h))
+ except UnknownFormatError, format:
+ reject("%s: unknown format of .changes" % (format))
+ except ParseChangesError, line:
+ reject("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
+
+ if "source" not in changes["architecture"]: continue
+
+ try:
+ fs = utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
+ if o:
+ create_hash(fs, h, f, dsc_files)
+ else:
+ check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
+ except UnknownFormatError, format:
+ reject("%s: unknown format of .dsc" % (format))
+ except NoFilesFieldError:
+ reject("No Checksums-%s: field in .dsc" % (h))
+ except ParseChangesError, line:
+ reject("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
+
+################################################################################
+
+def create_hash (lfiles, key, testfn, basedict = None):
+ for f in lfiles.keys():
+ try:
+ file_handle = utils.open_file(f)
+ except CantOpenError:
+ continue
+
+ # Check hash
+ basedict[f]['%ssum' % key] = testfn(file_handle)
+ file_handle.close()
+
+
+################################################################################
+
+def check_hash (where, lfiles, key, testfn, basedict = None):
+ if basedict:
+ for f in basedict.keys():
+ if f not in lfiles:
+ reject("%s: no %s checksum" % (f, key))
+
+ for f in lfiles.keys():
+ if basedict and f not in basedict:
+ reject("%s: extraneous entry in %s checksums" % (f, key))
+
+ try:
+ file_handle = utils.open_file(f)
+ except CantOpenError:
+ continue
- # This is stupid API, but it'll have to do for now until
- # we actually have proper abstraction
- for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
- reject(m)
+ # Check hash
+ if testfn(file_handle) != lfiles[f][key]:
+ reject("%s: %s check failed." % (f, key))
+ file_handle.close()
+ # Store the hashes for later use
+ basedict[f]['%ssum' % key] = lfiles[f][key]
+ # Check size
+ actual_size = os.stat(f)[stat.ST_SIZE]
+ size = int(lfiles[f]["size"])
+ if size != actual_size:
+ reject("%s: actual file size (%s) does not match size (%s) in %s"
+ % (f, actual_size, size, where))
################################################################################