validate-indices - ensure files mentioned in Packages & Sources exist
files-not-symlinks - check files in the database aren't symlinks
validate-builddeps - validate build-dependencies of .dsc files in the archive
+ add-missing-source-checksums - add missing checksums for source packages
"""
sys.exit(exit_code)
Prepare the dictionary of existing filenames, then walk through the archive
pool/ directory to compare it.
"""
- global db_files
-
cnf = Config()
+ session = DBConn().session()
- print "Building list of database files..."
- q = DBConn().session().query(PoolFile).join(Location).order_by('path', 'location')
-
- print "Missing files:"
- db_files.clear()
-
- for f in q.all():
- filename = os.path.abspath(os.path.join(f.location.path, f.filename))
- db_files[filename] = ""
- if os.access(filename, os.R_OK) == 0:
- if f.last_used:
- print "(last used: %s) %s" % (f.last_used, filename)
- else:
- print "%s" % (filename)
-
-
- filename = os.path.join(cnf["Dir::Override"], 'override.unreferenced')
- if os.path.exists(filename):
- f = utils.open_file(filename)
- for filename in f.readlines():
- filename = filename[:-1]
- excluded[filename] = ""
-
- print "Existent files not in db:"
-
- os.path.walk(os.path.join(cnf["Dir::Root"], 'pool/'), process_dir, None)
-
- print
- print "%s wasted..." % (utils.size_type(waste))
+ query = """
+ SELECT archive.name, suite.suite_name, f.filename
+ FROM binaries b
+ JOIN bin_associations ba ON b.id = ba.bin
+ JOIN suite ON ba.suite = suite.id
+ JOIN archive ON suite.archive_id = archive.id
+ JOIN files f ON b.file = f.id
+ WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af
+ WHERE af.archive_id = suite.archive_id
+ AND af.file_id = b.file)
+ ORDER BY archive.name, suite.suite_name, f.filename
+ """
+ for row in session.execute(query):
+ print "MISSING-ARCHIVE-FILE {0} {1} {2}".vformat(row)
+
+ query = """
+ SELECT archive.name, suite.suite_name, f.filename
+ FROM source s
+ JOIN src_associations sa ON s.id = sa.source
+ JOIN suite ON sa.suite = suite.id
+ JOIN archive ON suite.archive_id = archive.id
+ JOIN dsc_files df ON s.id = df.source
+ JOIN files f ON df.file = f.id
+ WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af
+ WHERE af.archive_id = suite.archive_id
+ AND af.file_id = df.file)
+ ORDER BY archive.name, suite.suite_name, f.filename
+ """
+ for row in session.execute(query):
+ print "MISSING-ARCHIVE-FILE {0} {1} {2}".vformat(row)
+
+ archive_files = session.query(ArchiveFile) \
+ .join(ArchiveFile.archive).join(ArchiveFile.file) \
+ .order_by(Archive.archive_name, PoolFile.filename)
+
+ expected_files = set()
+ for af in archive_files:
+ path = af.path
+ expected_files.add(af.path)
+ if not os.path.exists(path):
+ print "MISSING-FILE {0} {1} {2}".format(af.archive.archive_name, af.file.filename, path)
+
+ archives = session.query(Archive).order_by(Archive.archive_name)
+
+ for a in archives:
+ top = os.path.join(a.path, 'pool')
+ for dirpath, dirnames, filenames in os.walk(top):
+ for fn in filenames:
+ path = os.path.join(dirpath, fn)
+ if path in expected_files:
+ continue
+ print "UNEXPECTED-FILE {0} {1}".format(a.archive_name, path)
################################################################################
################################################################################
+_add_missing_source_checksums_query = R"""
+INSERT INTO source_metadata
+ (src_id, key_id, value)
+SELECT
+ s.id,
+ :checksum_key,
+ E'\n' ||
+ (SELECT STRING_AGG(' ' || tmp.checksum || ' ' || tmp.size || ' ' || tmp.basename, E'\n' ORDER BY tmp.basename)
+ FROM
+ (SELECT
+ CASE :checksum_type
+ WHEN 'Files' THEN f.md5sum
+ WHEN 'Checksums-Sha1' THEN f.sha1sum
+ WHEN 'Checksums-Sha256' THEN f.sha256sum
+ END AS checksum,
+ f.size,
+ SUBSTRING(f.filename FROM E'/([^/]*)\\Z') AS basename
+ FROM files f JOIN dsc_files ON f.id = dsc_files.file
+ WHERE dsc_files.source = s.id AND f.id != s.file
+ ) AS tmp
+ )
+
+ FROM
+ source s
+ WHERE NOT EXISTS (SELECT 1 FROM source_metadata md WHERE md.src_id=s.id AND md.key_id = :checksum_key);
+"""
+
+def add_missing_source_checksums():
+ """ Add missing source checksums to source_metadata """
+ session = DBConn().session()
+ for checksum in ['Files', 'Checksums-Sha1', 'Checksums-Sha256']:
+ checksum_key = get_or_set_metadatakey(checksum, session).key_id
+ rows = session.execute(_add_missing_source_checksums_query,
+ {'checksum_key': checksum_key, 'checksum_type': checksum}).rowcount
+ if rows > 0:
+ print "Added {0} missing entries for {1}".format(rows, checksum)
+ session.commit()
+
+################################################################################
+
def main ():
global db_files, waste, excluded
if not cnf.has_key("Check-Archive::Options::%s" % (i)):
cnf["Check-Archive::Options::%s" % (i)] = ""
- args = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
+ args = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
- Options = cnf.SubTree("Check-Archive::Options")
+ Options = cnf.subtree("Check-Archive::Options")
if Options["Help"]:
usage()
check_files_not_symlinks()
elif mode == "validate-builddeps":
check_build_depends()
+ elif mode == "add-missing-source-checksums":
+ add_missing_source_checksums()
else:
utils.warn("unknown mode '%s'" % (mode))
usage(1)