for f in files.keys():
print " %s:" % (f)
for i in [ "package", "version", "architecture", "type", "size",
- "md5sum", "component", "location id", "source package",
- "source version", "maintainer", "dbtype", "files id",
- "new", "section", "priority", "pool name" ]:
+ "md5sum", "sha1sum", "sha256sum", "component", "location id",
+ "source package", "source version", "maintainer", "dbtype",
+ "files id", "new", "section", "priority", "pool name" ]:
if files[f].has_key(i):
print " %s: %s" % (i.capitalize(), files[f][i])
del files[f][i]
dsc_component = files[file]["component"]
dsc_location_id = files[file]["location id"]
if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], dsc_location_id)
+ files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, '%s', %s)"
% (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id))
# files id is stored in dsc_files by check_dsc().
files_id = dsc_files[dsc_file].get("files id", None)
if files_id == None:
- files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
+ files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
# FIXME: needs to check for -1/-2 and or handle exception
if files_id == None:
- files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
+ files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id))
# Add the src_uploaders to the DB
#
if changes["architecture"].has_key("source") and orig_tar_id and \
orig_tar_location != "legacy" and orig_tar_location != dsc_location_id:
- q = projectB.query("SELECT l.path, f.filename, f.size, f.md5sum FROM files f, location l WHERE f.id = %s AND f.location = l.id" % (orig_tar_id))
+ q = projectB.query("SELECT l.path, f.filename, f.size, f.md5sum, f.sha1sum, f.sha256sum FROM files f, location l WHERE f.id = %s AND f.location = l.id" % (orig_tar_id))
ql = q.getresult()[0]
old_filename = ql[0] + ql[1]
file_size = ql[2]
file_md5sum = ql[3]
+ file_sha1sum = ql[4]
+ file_sha256sum = ql[5]
new_filename = utils.poolify(changes["source"], dsc_component) + os.path.basename(old_filename)
new_files_id = database.get_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
if new_files_id == None:
utils.copy(old_filename, Cnf["Dir::Pool"] + new_filename)
- new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
+ new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, file_sha1sum, file_sha256sum, dsc_location_id)
projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, source_id, orig_tar_id))
# Install the files into the pool
# Check the md5sum & size against existing files (if any)
files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
- files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
+ files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"])
if files_id == -1:
reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
elif files_id == -2:
- reject("md5sum and/or size mismatch on existing copy of %s." % (f))
+ reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f))
files[f]["files id"] = files_id
# Check for packages that have moved from one component to another
files[orig_tar_gz] = {}
files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
+ files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
+ files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
- if format >= (1,8):
- hashes = [("sha1", apt_pkg.sha1sum),
- ("sha256", apt_pkg.sha256sum)]
- else:
- hashes = []
+ # (hashname, function, originate)
+ # If originate is true, we have to calculate it because
+ # the changes file version is too early for it to be
+ # included
+ hashes = [("sha1", apt_pkg.sha1sum, False),
+ ("sha256", apt_pkg.sha256sum, False)]
+
+ if format <= (1,8):
+ hashes["sha1"] = True
+ hashes["sha256"] = True
for x in changes:
if x.startswith("checksum-"):
if h not in dict(hashes):
reject("Unsupported checksum field in .dsc" % (h))
- for h,f in hashes:
+ for h,f,o in hashes:
try:
fs = utils.build_file_list(changes, 0, "checksums-%s" % h, h)
- check_hash(".changes %s" % (h), fs, h, f, files)
+ if o:
+ create_hash(fs, h, f, files)
+ else:
+ check_hash(".changes %s" % (h), fs, h, f, files)
except NoFilesFieldError:
reject("No Checksums-%s: field in .changes" % (h))
except UnknownFormatError, format:
try:
fs = utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
- check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
+ if o:
+ create_hash(fs, h, f, dsc_files)
+ else:
+ check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
except UnknownFormatError, format:
reject("%s: unknown format of .dsc" % (format))
except NoFilesFieldError:
################################################################################
+def create_hash (lfiles, key, testfn, basedict = None):
+ for f in lfiles.keys():
+ try:
+ file_handle = utils.open_file(f)
+ except CantOpenError:
+ continue
+
+ # Check hash
+ basedict[f]['%ssum' % key] = testfn(file_handle)
+ file_handle.close()
+
+
+################################################################################
+
def check_hash (where, lfiles, key, testfn, basedict = None):
if basedict:
for f in basedict.keys():
if testfn(file_handle) != lfiles[f][key]:
reject("%s: %s check failed." % (f, key))
file_handle.close()
+ # Store the hashes for later use
+ basedict[f]['%ssum' % key] = lfiles[f][key]
# Check size
actual_size = os.stat(f)[stat.ST_SIZE]
size = int(lfiles[f]["size"])
################################################################################
-def get_files_id (filename, size, md5sum, location_id):
+def get_files_id (filename, size, md5sum, sha1sum, sha256sum location_id):
global files_id_cache
cache_key = "%s_%d" % (filename, location_id)
return files_id_cache[cache_key]
size = int(size)
- q = projectB.query("SELECT id, size, md5sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
+ q = projectB.query("SELECT id, size, md5sum, sha1sum, sha256sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
ql = q.getresult()
if ql:
if len(ql) != 1:
ql = ql[0]
orig_size = int(ql[1])
orig_md5sum = ql[2]
- if orig_size != size or orig_md5sum != md5sum:
+ orig_sha1sum = ql[3]
+ orig_sha256sum = ql[4]
+ if orig_size != size or orig_md5sum != md5sum or orig_sha1sum != sha1sum or orig_sha256sum != sha256sum:
return -2
files_id_cache[cache_key] = ql[0]
return files_id_cache[cache_key]
################################################################################
-def set_files_id (filename, size, md5sum, location_id):
+def set_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id):
global files_id_cache
- projectB.query("INSERT INTO files (filename, size, md5sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, location_id))
+ projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum location_id))
- return get_files_id (filename, size, md5sum, location_id)
+ return get_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id)
### currval has issues with postgresql 7.1.3 when the table is big
### it was taking ~3 seconds to return on auric which is very Not
for file_entry in files.keys():
d_files[file_entry] = {}
for i in [ "package", "version", "architecture", "type", "size",
- "md5sum", "component", "location id", "source package",
- "source version", "maintainer", "dbtype", "files id",
- "new", "section", "priority", "othercomponents",
+ "md5sum", "sha1sum", "sha256sum", "component",
+ "location id", "source package", "source version",
+ "maintainer", "dbtype", "files id", "new",
+ "section", "priority", "othercomponents",
"pool name", "original component" ]:
if files[file_entry].has_key(i):
d_files[file_entry][i] = files[file_entry][i]