#!/usr/bin/env python
-# Various different sanity checks
-# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+""" Various different sanity checks
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
################################################################################
-import commands, os, pg, stat, sys, time
-import apt_pkg, apt_inst
-import daklib.database as database
-import daklib.utils as utils
+import commands
+import os
+import stat
+import sys
+import time
+import apt_pkg
+import apt_inst
+
+from daklib.dbconn import *
+from daklib import utils
+from daklib.config import Config
+from daklib.dak_exceptions import InvalidDscError, ChangesUnicodeError, CantOpenError
################################################################################
-Cnf = None
-projectB = None
-db_files = {}
-waste = 0.0
-excluded = {}
+db_files = {} #: Cache of filenames as known by the database
+waste = 0.0 #: How many bytes are "wasted" by files not referenced in database
+excluded = {} #: List of files which are excluded from files check
current_file = None
future_files = {}
-current_time = time.time()
+current_time = time.time() #: now()
################################################################################
The following MODEs are available:
- md5sums - validate the md5sums stored in the database
+ checksums - validate the checksums stored in the database
files - check files in the database against what's in the archive
dsc-syntax - validate the syntax of .dsc files in the archive
missing-overrides - check for missing overrides
source-in-one-dir - ensure the source for each package is in one directory
timestamps - check for future timestamps in .deb's
- tar-gz-in-dsc - ensure each .dsc lists a .tar.gz file
+ files-in-dsc - ensure each .dsc references appropriate Files
validate-indices - ensure files mentioned in Packages & Sources exist
files-not-symlinks - check files in the database aren't symlinks
validate-builddeps - validate build-dependencies of .dsc files in the archive
+ add-missing-source-checksums - add missing checksums for source packages
"""
sys.exit(exit_code)
################################################################################
def process_dir (unused, dirname, filenames):
+ """
+ Process a directory and output every files name which is not listed already
+ in the C{filenames} or global C{excluded} dictionaries.
+
+ @type dirname: string
+ @param dirname: the directory to look at
+
+ @type filenames: dict
+ @param filenames: Known filenames to ignore
+ """
global waste, db_files, excluded
if dirname.find('/disks-') != -1 or dirname.find('upgrade-') != -1:
if dirname.find('proposed-updates') != -1:
return
for name in filenames:
- filename = os.path.abspath(dirname+'/'+name)
- filename = filename.replace('potato-proposed-updates', 'proposed-updates')
+ filename = os.path.abspath(os.path.join(dirname,name))
if os.path.isfile(filename) and not os.path.islink(filename) and not db_files.has_key(filename) and not excluded.has_key(filename):
waste += os.stat(filename)[stat.ST_SIZE]
print "%s" % (filename)
################################################################################
def check_files():
- global db_files
-
- print "Building list of database files..."
- q = projectB.query("SELECT l.path, f.filename, f.last_used FROM files f, location l WHERE f.location = l.id ORDER BY l.path, f.filename")
- ql = q.getresult()
-
- print "Missing files:"
- db_files.clear()
- for i in ql:
- filename = os.path.abspath(i[0] + i[1])
- db_files[filename] = ""
- if os.access(filename, os.R_OK) == 0:
- if i[2]:
- print "(last used: %s) %s" % (i[2], filename)
- else:
- print "%s" % (filename)
-
-
- filename = Cnf["Dir::Override"]+'override.unreferenced'
- if os.path.exists(filename):
- f = utils.open_file(filename)
- for filename in f.readlines():
- filename = filename[:-1]
- excluded[filename] = ""
-
- print "Existent files not in db:"
-
- os.path.walk(Cnf["Dir::Root"]+'pool/', process_dir, None)
-
- print
- print "%s wasted..." % (utils.size_type(waste))
+ """
+ Prepare the dictionary of existing filenames, then walk through the archive
+ pool/ directory to compare it.
+ """
+ cnf = Config()
+ session = DBConn().session()
+
+ query = """
+ SELECT archive.name, suite.suite_name, f.filename
+ FROM binaries b
+ JOIN bin_associations ba ON b.id = ba.bin
+ JOIN suite ON ba.suite = suite.id
+ JOIN archive ON suite.archive_id = archive.id
+ JOIN files f ON b.file = f.id
+ WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af
+ WHERE af.archive_id = suite.archive_id
+ AND af.file_id = b.file)
+ ORDER BY archive.name, suite.suite_name, f.filename
+ """
+ for row in session.execute(query):
+ print "MISSING-ARCHIVE-FILE {0} {1} {2}".vformat(row)
+
+ query = """
+ SELECT archive.name, suite.suite_name, f.filename
+ FROM source s
+ JOIN src_associations sa ON s.id = sa.source
+ JOIN suite ON sa.suite = suite.id
+ JOIN archive ON suite.archive_id = archive.id
+ JOIN dsc_files df ON s.id = df.source
+ JOIN files f ON df.file = f.id
+ WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af
+ WHERE af.archive_id = suite.archive_id
+ AND af.file_id = df.file)
+ ORDER BY archive.name, suite.suite_name, f.filename
+ """
+ for row in session.execute(query):
+ print "MISSING-ARCHIVE-FILE {0} {1} {2}".vformat(row)
+
+ archive_files = session.query(ArchiveFile) \
+ .join(ArchiveFile.archive).join(ArchiveFile.file) \
+ .order_by(Archive.archive_name, PoolFile.filename)
+
+ expected_files = set()
+ for af in archive_files:
+ path = af.path
+ expected_files.add(af.path)
+ if not os.path.exists(path):
+ print "MISSING-FILE {0} {1} {2}".format(af.archive.archive_name, af.file.filename, path)
+
+ archives = session.query(Archive).order_by(Archive.archive_name)
+
+ for a in archives:
+ top = os.path.join(a.path, 'pool')
+ for dirpath, dirnames, filenames in os.walk(top):
+ for fn in filenames:
+ path = os.path.join(dirpath, fn)
+ if path in expected_files:
+ continue
+ print "UNEXPECTED-FILE {0} {1}".format(a.archive_name, path)
################################################################################
def check_dscs():
+ """
+ Parse every .dsc file in the archive and check for it's validity.
+ """
+
count = 0
- suite = 'unstable'
- for component in Cnf.SubTree("Component").List():
- if component == "mixed":
- continue
- component = component.lower()
- list_filename = '%s%s_%s_source.list' % (Cnf["Dir::Lists"], suite, component)
- list_file = utils.open_file(list_filename)
- for line in list_file.readlines():
- f = line[:-1]
- try:
- utils.parse_changes(f, signing_rules=1)
- except utils.invalid_dsc_format_exc, line:
- utils.warn("syntax error in .dsc file '%s', line %s." % (f, line))
- count += 1
+
+ for src in DBConn().session().query(DBSource).order_by(DBSource.source, DBSource.version):
+ f = src.poolfile.fullpath
+ try:
+ utils.parse_changes(f, signing_rules=1, dsc_file=1)
+ except InvalidDscError:
+ utils.warn("syntax error in .dsc file %s" % f)
+ count += 1
+ except ChangesUnicodeError:
+ utils.warn("found invalid dsc file (%s), not properly utf-8 encoded" % f)
+ count += 1
+ except CantOpenError:
+ utils.warn("missing dsc file (%s)" % f)
+ count += 1
+ except Exception as e:
+ utils.warn("miscellaneous error parsing dsc file (%s): %s" % (f, str(e)))
+ count += 1
if count:
utils.warn("Found %s invalid .dsc files." % (count))
################################################################################
def check_override():
- for suite in [ "stable", "unstable" ]:
- print suite
- print "-"*len(suite)
+ """
+ Check for missing overrides in stable and unstable.
+ """
+ session = DBConn().session()
+
+ for suite_name in [ "stable", "unstable" ]:
+ print suite_name
+ print "-" * len(suite_name)
print
- suite_id = database.get_suite_id(suite)
- q = projectB.query("""
+ suite = get_suite(suite_name)
+ q = session.execute("""
SELECT DISTINCT b.package FROM binaries b, bin_associations ba
- WHERE b.id = ba.bin AND ba.suite = %s AND NOT EXISTS
- (SELECT 1 FROM override o WHERE o.suite = %s AND o.package = b.package)"""
- % (suite_id, suite_id))
- print q
- q = projectB.query("""
+ WHERE b.id = ba.bin AND ba.suite = :suiteid AND NOT EXISTS
+ (SELECT 1 FROM override o WHERE o.suite = :suiteid AND o.package = b.package)"""
+ % {'suiteid': suite.suite_id})
+
+ for j in q.fetchall():
+ print j[0]
+
+ q = session.execute("""
SELECT DISTINCT s.source FROM source s, src_associations sa
- WHERE s.id = sa.source AND sa.suite = %s AND NOT EXISTS
- (SELECT 1 FROM override o WHERE o.suite = %s and o.package = s.source)"""
- % (suite_id, suite_id))
- print q
+ WHERE s.id = sa.source AND sa.suite = :suiteid AND NOT EXISTS
+ (SELECT 1 FROM override o WHERE o.suite = :suiteid and o.package = s.source)"""
+ % {'suiteid': suite.suite_id})
+ for j in q.fetchall():
+ print j[0]
################################################################################
-# Ensure that the source files for any given package is all in one
-# directory so that 'apt-get source' works...
def check_source_in_one_dir():
+ """
+ Ensure that the source files for any given package is all in one
+ directory so that 'apt-get source' works...
+ """
+
# Not the most enterprising method, but hey...
broken_count = 0
- q = projectB.query("SELECT id FROM source;")
- for i in q.getresult():
- source_id = i[0]
- q2 = projectB.query("""
-SELECT l.path, f.filename FROM files f, dsc_files df, location l WHERE df.source = %s AND f.id = df.file AND l.id = f.location"""
- % (source_id))
+
+ session = DBConn().session()
+
+ q = session.query(DBSource)
+ for s in q.all():
first_path = ""
first_filename = ""
- broken = 0
- for j in q2.getresult():
- filename = j[0] + j[1]
+ broken = False
+
+ qf = session.query(PoolFile).join(Location).join(DSCFile).filter_by(source_id=s.source_id)
+ for f in qf.all():
+ # 0: path
+ # 1: filename
+ filename = os.path.join(f.location.path, f.filename)
path = os.path.dirname(filename)
+
if first_path == "":
first_path = path
first_filename = filename
elif first_path != path:
symlink = path + '/' + os.path.basename(first_filename)
if not os.path.exists(symlink):
- broken = 1
- print "WOAH, we got a live one here... %s [%s] {%s}" % (filename, source_id, symlink)
+ broken = True
+ print "WOAH, we got a live one here... %s [%s] {%s}" % (filename, s.source_id, symlink)
if broken:
broken_count += 1
+
print "Found %d source packages where the source is not all in one directory." % (broken_count)
################################################################################
-
-def check_md5sums():
+def check_checksums():
+ """
+ Validate all files
+ """
print "Getting file information from database..."
- q = projectB.query("SELECT l.path, f.filename, f.md5sum, f.size FROM files f, location l WHERE f.location = l.id")
- ql = q.getresult()
-
- print "Checking file md5sums & sizes..."
- for i in ql:
- filename = os.path.abspath(i[0] + i[1])
- db_md5sum = i[2]
- db_size = int(i[3])
+ q = DBConn().session().query(PoolFile)
+
+ print "Checking file checksums & sizes..."
+ for f in q:
+ filename = os.path.abspath(os.path.join(f.location.path, f.filename))
+
try:
- f = utils.open_file(filename)
+ fi = utils.open_file(filename)
except:
utils.warn("can't open '%s'." % (filename))
continue
- md5sum = apt_pkg.md5sum(f)
+
size = os.stat(filename)[stat.ST_SIZE]
- if md5sum != db_md5sum:
- utils.warn("**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, db_md5sum))
- if size != db_size:
- utils.warn("**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, db_size))
+ if size != f.filesize:
+ utils.warn("**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, f.filesize))
+
+ md5sum = apt_pkg.md5sum(fi)
+ if md5sum != f.md5sum:
+ utils.warn("**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, f.md5sum))
+
+ fi.seek(0)
+ sha1sum = apt_pkg.sha1sum(fi)
+ if sha1sum != f.sha1sum:
+ utils.warn("**WARNING** sha1sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha1sum, f.sha1sum))
+
+ fi.seek(0)
+ sha256sum = apt_pkg.sha256sum(fi)
+ if sha256sum != f.sha256sum:
+ utils.warn("**WARNING** sha256sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha256sum, f.sha256sum))
print "Done."
################################################################################
#
-# Check all files for timestamps in the future; common from hardware
-# (e.g. alpha) which have far-future dates as their default dates.
def Ent(Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
global future_files
print "%s: %s '%s','%s',%u,%u,%u,%u,%u,%u,%u" % (current_file, Kind,Name,Link,Mode,UID,GID,Size, MTime, Major, Minor)
def check_timestamps():
+ """
+ Check all files for timestamps in the future; common from hardware
+ (e.g. alpha) which have far-future dates as their default dates.
+ """
+
global current_file
- q = projectB.query("SELECT l.path, f.filename FROM files f, location l WHERE f.location = l.id AND f.filename ~ '.deb$'")
- ql = q.getresult()
+ q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.deb$'))
+
db_files.clear()
count = 0
- for i in ql:
- filename = os.path.abspath(i[0] + i[1])
+
+ for pf in q.all():
+ filename = os.path.abspath(os.path.join(pf.location.path, pf.filename))
if os.access(filename, os.R_OK):
f = utils.open_file(filename)
current_file = filename
f.seek(0)
apt_inst.debExtract(f, Ent, "data.tar.gz")
count += 1
+
print "Checked %d files (out of %d)." % (count, len(db_files.keys()))
################################################################################
-def check_missing_tar_gz_in_dsc():
+def check_files_in_dsc():
+ """
+ Ensure each .dsc lists appropriate files in its Files field (according
+ to the format announced in its Format field).
+ """
count = 0
print "Building list of database files..."
- q = projectB.query("SELECT l.path, f.filename FROM files f, location l WHERE f.location = l.id AND f.filename ~ '.dsc$'")
- ql = q.getresult()
- if ql:
+ q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.dsc$'))
+
+ if q.count() > 0:
print "Checking %d files..." % len(ql)
else:
print "No files to check."
- for i in ql:
- filename = os.path.abspath(i[0] + i[1])
+
+ for pf in q.all():
+ filename = os.path.abspath(os.path.join(pf.location.path + pf.filename))
+
try:
# NB: don't enforce .dsc syntax
- dsc = utils.parse_changes(filename)
+ dsc = utils.parse_changes(filename, dsc_file=1)
except:
utils.fubar("error parsing .dsc file '%s'." % (filename))
- dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
- has_tar = 0
- for f in dsc_files.keys():
- m = utils.re_issource.match(f)
- if not m:
- utils.fubar("%s not recognised as source." % (f))
- ftype = m.group(3)
- if ftype == "orig.tar.gz" or ftype == "tar.gz":
- has_tar = 1
- if not has_tar:
- utils.warn("%s has no .tar.gz in the .dsc file." % (f))
+
+ reasons = utils.check_dsc_files(filename, dsc)
+ for r in reasons:
+ utils.warn(r)
+
+ if len(reasons) > 0:
count += 1
if count:
################################################################################
def validate_sources(suite, component):
+ """
+ Ensure files mentioned in Sources exist
+ """
filename = "%s/dists/%s/%s/source/Sources.gz" % (Cnf["Dir::Root"], suite, component)
print "Processing %s..." % (filename)
- # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = utils.temp_filename()
+ # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance...
+ (fd, temp_filename) = utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
sources = utils.open_file(temp_filename)
- Sources = apt_pkg.ParseTagFile(sources)
+ Sources = apt_pkg.TagFile(sources)
while Sources.Step():
source = Sources.Section.Find('Package')
directory = Sources.Section.Find('Directory')
########################################
def validate_packages(suite, component, architecture):
+ """
+ Ensure files mentioned in Packages exist
+ """
filename = "%s/dists/%s/%s/binary-%s/Packages.gz" \
% (Cnf["Dir::Root"], suite, component, architecture)
print "Processing %s..." % (filename)
- # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = utils.temp_filename()
+ # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance...
+ (fd, temp_filename) = utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
packages = utils.open_file(temp_filename)
- Packages = apt_pkg.ParseTagFile(packages)
+ Packages = apt_pkg.TagFile(packages)
while Packages.Step():
filename = "%s/%s" % (Cnf["Dir::Root"], Packages.Section.Find('Filename'))
if not os.path.exists(filename):
########################################
def check_indices_files_exist():
+ """
+ Ensure files mentioned in Packages & Sources exist
+ """
for suite in [ "stable", "testing", "unstable" ]:
- for component in Cnf.ValueList("Suite::%s::Components" % (suite)):
- architectures = Cnf.ValueList("Suite::%s::Architectures" % (suite))
- for arch in [ i.lower() for i in architectures ]:
+ for component in get_component_names():
+ architectures = get_suite_architectures(suite)
+ for arch in [ i.arch_string.lower() for i in architectures ]:
if arch == "source":
validate_sources(suite, component)
elif arch == "all":
################################################################################
def check_files_not_symlinks():
+ """
+ Check files in the database aren't symlinks
+ """
print "Building list of database files... ",
before = time.time()
- q = projectB.query("SELECT l.path, f.filename, f.id FROM files f, location l WHERE f.location = l.id")
- print "done. (%d seconds)" % (int(time.time()-before))
- q_files = q.getresult()
+ q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.dsc$'))
- for i in q_files:
- filename = os.path.normpath(i[0] + i[1])
+ for pf in q.all():
+ filename = os.path.abspath(os.path.join(pf.location.path, pf.filename))
if os.access(filename, os.R_OK) == 0:
utils.warn("%s: doesn't exist." % (filename))
else:
if not name.endswith(".dsc"):
continue
filename = os.path.abspath(dirname+'/'+name)
- dsc = utils.parse_changes(filename)
+ dsc = utils.parse_changes(filename, dsc_file=1)
for field_name in [ "build-depends", "build-depends-indep" ]:
field = dsc.get(field_name)
if field:
try:
- apt_pkg.ParseSrcDepends(field)
+ apt_pkg.parse_src_depends(field)
except:
print "E: [%s] %s: %s" % (filename, field_name, field)
pass
################################################################################
def check_build_depends():
- os.path.walk(Cnf["Dir::Root"], chk_bd_process_dir, None)
+ """ Validate build-dependencies of .dsc files in the archive """
+ cnf = Config()
+ os.path.walk(cnf["Dir::Root"], chk_bd_process_dir, None)
+
+################################################################################
+
+_add_missing_source_checksums_query = R"""
+INSERT INTO source_metadata
+ (src_id, key_id, value)
+SELECT
+ s.id,
+ :checksum_key,
+ E'\n' ||
+ (SELECT STRING_AGG(' ' || tmp.checksum || ' ' || tmp.size || ' ' || tmp.basename, E'\n' ORDER BY tmp.basename)
+ FROM
+ (SELECT
+ CASE :checksum_type
+ WHEN 'Files' THEN f.md5sum
+ WHEN 'Checksums-Sha1' THEN f.sha1sum
+ WHEN 'Checksums-Sha256' THEN f.sha256sum
+ END AS checksum,
+ f.size,
+ SUBSTRING(f.filename FROM E'/([^/]*)\\Z') AS basename
+ FROM files f JOIN dsc_files ON f.id = dsc_files.file
+ WHERE dsc_files.source = s.id AND f.id != s.file
+ ) AS tmp
+ )
+
+ FROM
+ source s
+ WHERE NOT EXISTS (SELECT 1 FROM source_metadata md WHERE md.src_id=s.id AND md.key_id = :checksum_key);
+"""
+
+def add_missing_source_checksums():
+ """ Add missing source checksums to source_metadata """
+ session = DBConn().session()
+ for checksum in ['Files', 'Checksums-Sha1', 'Checksums-Sha256']:
+ checksum_key = get_or_set_metadatakey(checksum, session).key_id
+ rows = session.execute(_add_missing_source_checksums_query,
+ {'checksum_key': checksum_key, 'checksum_type': checksum}).rowcount
+ if rows > 0:
+ print "Added {0} missing entries for {1}".format(rows, checksum)
+ session.commit()
################################################################################
def main ():
- global Cnf, projectB, db_files, waste, excluded
+ global db_files, waste, excluded
+
+ cnf = Config()
- Cnf = utils.get_conf()
Arguments = [('h',"help","Check-Archive::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Check-Archive::Options::%s" % (i)):
- Cnf["Check-Archive::Options::%s" % (i)] = ""
+ if not cnf.has_key("Check-Archive::Options::%s" % (i)):
+ cnf["Check-Archive::Options::%s" % (i)] = ""
- args = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
+ args = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
- Options = Cnf.SubTree("Check-Archive::Options")
+ Options = cnf.subtree("Check-Archive::Options")
if Options["Help"]:
usage()
usage(1)
mode = args[0].lower()
- projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ # Initialize DB
+ DBConn()
- if mode == "md5sums":
- check_md5sums()
+ if mode == "checksums":
+ check_checksums()
elif mode == "files":
check_files()
elif mode == "dsc-syntax":
check_source_in_one_dir()
elif mode == "timestamps":
check_timestamps()
- elif mode == "tar-gz-in-dsc":
- check_missing_tar_gz_in_dsc()
+ elif mode == "files-in-dsc":
+ check_files_in_dsc()
elif mode == "validate-indices":
check_indices_files_exist()
elif mode == "files-not-symlinks":
check_files_not_symlinks()
elif mode == "validate-builddeps":
check_build_depends()
+ elif mode == "add-missing-source-checksums":
+ add_missing_source_checksums()
else:
utils.warn("unknown mode '%s'" % (mode))
usage(1)