from daklib.dbconn import *
from daklib import utils
-from daklib.regexes import re_issource
from daklib.config import Config
+from daklib.dak_exceptions import InvalidDscError, ChangesUnicodeError, CantOpenError
################################################################################
missing-overrides - check for missing overrides
source-in-one-dir - ensure the source for each package is in one directory
timestamps - check for future timestamps in .deb's
- tar-gz-in-dsc - ensure each .dsc lists a .tar.gz file
+ files-in-dsc - ensure each .dsc references appropriate Files
validate-indices - ensure files mentioned in Packages & Sources exist
files-not-symlinks - check files in the database aren't symlinks
validate-builddeps - validate build-dependencies of .dsc files in the archive
+ add-missing-source-checksums - add missing checksums for source packages
"""
sys.exit(exit_code)
if dirname.find('proposed-updates') != -1:
return
for name in filenames:
- filename = os.path.abspath(dirname+'/'+name)
- filename = filename.replace('potato-proposed-updates', 'proposed-updates')
+ filename = os.path.abspath(os.path.join(dirname,name))
if os.path.isfile(filename) and not os.path.islink(filename) and not db_files.has_key(filename) and not excluded.has_key(filename):
waste += os.stat(filename)[stat.ST_SIZE]
print "%s" % (filename)
db_files.clear()
for f in q.all():
- filename = os.path.abspath(f.location.path, f.filename)
+ filename = os.path.abspath(os.path.join(f.location.path, f.filename))
db_files[filename] = ""
if os.access(filename, os.R_OK) == 0:
if f.last_used:
print "Existent files not in db:"
- os.path.walk(cnf["Dir::Root"] + 'pool/', process_dir, None)
+ os.path.walk(os.path.join(cnf["Dir::Root"], 'pool/'), process_dir, None)
print
print "%s wasted..." % (utils.size_type(waste))
Parse every .dsc file in the archive and check for it's validity.
"""
- cnf = Config()
-
count = 0
- suite = 'unstable'
-
- for component in cnf.SubTree("Component").List():
- component = component.lower()
- list_filename = '%s%s_%s_source.list' % (cnf["Dir::Lists"], suite, component)
- list_file = utils.open_file(list_filename)
-
- for line in list_file.readlines():
- f = line[:-1]
- try:
- utils.parse_changes(f, signing_rules=1)
- except InvalidDscError, line:
- utils.warn("syntax error in .dsc file '%s', line %s." % (f, line))
- count += 1
- except ChangesUnicodeError:
- utils.warn("found invalid changes file, not properly utf-8 encoded")
- count += 1
+
+ for src in DBConn().session().query(DBSource).order_by(DBSource.source, DBSource.version):
+ f = src.poolfile.fullpath
+ try:
+ utils.parse_changes(f, signing_rules=1, dsc_file=1)
+ except InvalidDscError:
+ utils.warn("syntax error in .dsc file %s" % f)
+ count += 1
+ except ChangesUnicodeError:
+ utils.warn("found invalid dsc file (%s), not properly utf-8 encoded" % f)
+ count += 1
+ except CantOpenError:
+ utils.warn("missing dsc file (%s)" % f)
+ count += 1
+ except Exception as e:
+ utils.warn("miscellaneous error parsing dsc file (%s): %s" % (f, str(e)))
+ count += 1
if count:
utils.warn("Found %s invalid .dsc files." % (count))
print suite_name
print "-" * len(suite_name)
print
- suite = get_suite(suite)
- q = s.execute("""
+ suite = get_suite(suite_name)
+ q = session.execute("""
SELECT DISTINCT b.package FROM binaries b, bin_associations ba
WHERE b.id = ba.bin AND ba.suite = :suiteid AND NOT EXISTS
(SELECT 1 FROM override o WHERE o.suite = :suiteid AND o.package = b.package)"""
for j in q.fetchall():
print j[0]
- q = s.execute("""
+ q = session.execute("""
SELECT DISTINCT s.source FROM source s, src_associations sa
WHERE s.id = sa.source AND sa.suite = :suiteid AND NOT EXISTS
(SELECT 1 FROM override o WHERE o.suite = :suiteid and o.package = s.source)"""
################################################################################
-def check_missing_tar_gz_in_dsc():
+def check_files_in_dsc():
"""
- Ensure each .dsc lists a .tar.gz file
+ Ensure each .dsc lists appropriate files in its Files field (according
+ to the format announced in its Format field).
"""
count = 0
try:
# NB: don't enforce .dsc syntax
- dsc = utils.parse_changes(filename)
+ dsc = utils.parse_changes(filename, dsc_file=1)
except:
utils.fubar("error parsing .dsc file '%s'." % (filename))
- dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
- has_tar = 0
-
- for f in dsc_files.keys():
- m = re_issource.match(f)
- if not m:
- utils.fubar("%s not recognised as source." % (f))
- ftype = m.group(3)
- if ftype == "orig.tar.gz" or ftype == "tar.gz":
- has_tar = 1
+ reasons = utils.check_dsc_files(filename, dsc)
+ for r in reasons:
+ utils.warn(r)
- if not has_tar:
- utils.warn("%s has no .tar.gz in the .dsc file." % (f))
+ if len(reasons) > 0:
count += 1
if count:
Ensure files mentioned in Packages & Sources exist
"""
for suite in [ "stable", "testing", "unstable" ]:
- for component in Cnf.ValueList("Suite::%s::Components" % (suite)):
- architectures = database.get_suite_architectures(suite)
- for arch in [ i.lower() for i in architectures ]:
+ for component in get_component_names():
+ architectures = get_suite_architectures(suite)
+ for arch in [ i.arch_string.lower() for i in architectures ]:
if arch == "source":
validate_sources(suite, component)
elif arch == "all":
if not name.endswith(".dsc"):
continue
filename = os.path.abspath(dirname+'/'+name)
- dsc = utils.parse_changes(filename)
+ dsc = utils.parse_changes(filename, dsc_file=1)
for field_name in [ "build-depends", "build-depends-indep" ]:
field = dsc.get(field_name)
if field:
def check_build_depends():
""" Validate build-dependencies of .dsc files in the archive """
+ cnf = Config()
os.path.walk(cnf["Dir::Root"], chk_bd_process_dir, None)
################################################################################
+_add_missing_source_checksums_query = R"""
+INSERT INTO source_metadata
+ (src_id, key_id, value)
+SELECT
+ s.id,
+ :checksum_key,
+ E'\n' ||
+ (SELECT STRING_AGG(' ' || tmp.checksum || ' ' || tmp.size || ' ' || tmp.basename, E'\n' ORDER BY tmp.basename)
+ FROM
+ (SELECT
+ CASE :checksum_type
+ WHEN 'Files' THEN f.md5sum
+ WHEN 'Checksums-Sha1' THEN f.sha1sum
+ WHEN 'Checksums-Sha256' THEN f.sha256sum
+ END AS checksum,
+ f.size,
+ SUBSTRING(f.filename FROM E'/([^/]*)\\Z') AS basename
+ FROM files f JOIN dsc_files ON f.id = dsc_files.file
+ WHERE dsc_files.source = s.id AND f.id != s.file
+ ) AS tmp
+ )
+
+ FROM
+ source s
+ WHERE NOT EXISTS (SELECT 1 FROM source_metadata md WHERE md.src_id=s.id AND md.key_id = :checksum_key);
+"""
+
+def add_missing_source_checksums():
+ """ Add missing source checksums to source_metadata """
+ session = DBConn().session()
+ for checksum in ['Files', 'Checksums-Sha1', 'Checksums-Sha256']:
+ checksum_key = get_or_set_metadatakey(checksum, session).key_id
+ rows = session.execute(_add_missing_source_checksums_query,
+ {'checksum_key': checksum_key, 'checksum_type': checksum}).rowcount
+ if rows > 0:
+ print "Added {0} missing entries for {1}".format(rows, checksum)
+ session.commit()
+
+################################################################################
+
def main ():
global db_files, waste, excluded
check_source_in_one_dir()
elif mode == "timestamps":
check_timestamps()
- elif mode == "tar-gz-in-dsc":
- check_missing_tar_gz_in_dsc()
+ elif mode == "files-in-dsc":
+ check_files_in_dsc()
elif mode == "validate-indices":
check_indices_files_exist()
elif mode == "files-not-symlinks":
check_files_not_symlinks()
elif mode == "validate-builddeps":
check_build_depends()
+ elif mode == "add-missing-source-checksums":
+ add_missing_source_checksums()
else:
utils.warn("unknown mode '%s'" % (mode))
usage(1)