import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback
import apt_inst, apt_pkg
-import daklib.database as database
-import daklib.logging as logging
-import daklib.queue as queue
-import daklib.utils as utils
+from daklib import database
+from daklib import logging
+from daklib import queue
+from daklib import utils
from daklib.dak_exceptions import *
from types import *
# Parse the .changes field into a dictionary
try:
changes.update(utils.parse_changes(filename))
- except utils.cant_open_exc:
+ except CantOpenError:
reject("%s: can't read file." % (filename))
return 0
- except utils.changes_parse_error_exc, line:
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
return 0
# Parse the Files field from the .changes into another dictionary
try:
files.update(utils.build_file_list(changes))
- except utils.changes_parse_error_exc, line:
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
- except utils.nk_format_exc, format:
+ except UnknownFormatError, format:
reject("%s: unknown format '%s'." % (filename, format))
return 0
# Check the md5sum & size against existing files (if any)
files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
- files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
+ files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["sha1sum"], files[f]["sha256sum"], files[f]["location id"])
if files_id == -1:
reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
elif files_id == -2:
- reject("md5sum and/or size mismatch on existing copy of %s." % (f))
+ reject("md5sum, sha1sum, sha256sum and/or size mismatch on existing copy of %s." % (f))
files[f]["files id"] = files_id
# Check for packages that have moved from one component to another
# Parse the .dsc file
try:
dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
- except utils.cant_open_exc:
+ except CantOpenError:
# if not -n copy_to_holding() will have done this for us...
if Options["No-Action"]:
reject("%s: can't read file." % (dsc_filename))
- except utils.changes_parse_error_exc, line:
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
- except utils.invalid_dsc_format_exc, line:
+ except InvalidDscError, line:
reject("%s: syntax error on line %s." % (dsc_filename, line))
# Build up the file list of files mentioned by the .dsc
try:
dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
- except utils.no_files_exc:
+ except NoFilesFieldError:
reject("%s: no Files: field." % (dsc_filename))
return 0
- except utils.changes_parse_error_exc, line:
+ except UnknownFormatError, format:
+ reject("%s: unknown format '%s'." % (dsc_filename, format))
+ return 0
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
return 0
files[orig_tar_gz] = {}
files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
+ files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
+ files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
if changes["architecture"].has_key("source"):
if not changes.has_key("urgency"):
changes["urgency"] = Cnf["Urgency::Default"]
+ changes["urgency"] = changes["urgency"].lower()
if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
changes["urgency"] = Cnf["Urgency::Default"]
- changes["urgency"] = changes["urgency"].lower()
################################################################################
check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
- if format >= (1,8):
- hashes = [("sha1", apt_pkg.sha1sum),
- ("sha256", apt_pkg.sha256sum)]
- else:
- hashes = []
+ # (hashname, function, originate)
+ # If originate is true, we have to calculate it because
+ # the changes file version is too early for it to be
+ # included
+ hashes = [("sha1", apt_pkg.sha1sum, False),
+ ("sha256", apt_pkg.sha256sum, False)]
+
+ if format <= (1,8):
+ hashes["sha1"] = True
+ hashes["sha256"] = True
for x in changes:
if x.startswith("checksum-"):
if h not in dict(hashes):
reject("Unsupported checksum field in .dsc" % (h))
- for h,f in hashes:
+ for h,f,o in hashes:
try:
fs = utils.build_file_list(changes, 0, "checksums-%s" % h, h)
- check_hash(".changes %s" % (h), fs, h, f, files)
- except utils.no_files_exc:
+ if o:
+ create_hash(fs, h, f, files)
+ else:
+ check_hash(".changes %s" % (h), fs, h, f, files)
+ except NoFilesFieldError:
reject("No Checksums-%s: field in .changes" % (h))
- except utils.changes_parse_error_exc, line:
+ except UnknownFormatError, format:
+ reject("%s: unknown format of .changes" % (format))
+ except ParseChangesError, line:
reject("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
if "source" not in changes["architecture"]: continue
try:
fs = utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
- check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
- except utils.no_files_exc:
+ if o:
+ create_hash(fs, h, f, dsc_files)
+ else:
+ check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
+ except UnknownFormatError, format:
+ reject("%s: unknown format of .dsc" % (format))
+ except NoFilesFieldError:
reject("No Checksums-%s: field in .dsc" % (h))
- except utils.changes_parse_error_exc, line:
+ except ParseChangesError, line:
reject("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
################################################################################
+def create_hash (lfiles, key, testfn, basedict = None):
+ for f in lfiles.keys():
+ try:
+ file_handle = utils.open_file(f)
+ except CantOpenError:
+ continue
+
+ # Check hash
+ basedict[f]['%ssum' % key] = testfn(file_handle)
+ file_handle.close()
+
+
+################################################################################
+
def check_hash (where, lfiles, key, testfn, basedict = None):
if basedict:
for f in basedict.keys():
try:
file_handle = utils.open_file(f)
- except utils.cant_open_exc:
+ except CantOpenError:
continue
# Check hash
if testfn(file_handle) != lfiles[f][key]:
reject("%s: %s check failed." % (f, key))
file_handle.close()
+ # Store the hashes for later use
+ basedict[f]['%ssum' % key] = lfiles[f][key]
# Check size
actual_size = os.stat(f)[stat.ST_SIZE]
size = int(lfiles[f]["size"])