X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fcheck_archive.py;h=926b4fb5009cf18a36ea34c8bb4f4165a7a58731;hb=17c5cab4eb8d5181ec7a81267a4e2e6b43c0fc65;hp=89c3dcbd82d690141e01bc2c05d981d8f5f99b72;hpb=6cc75beccd14c9b39621cb5894d67cec24750405;p=dak.git diff --git a/dak/check_archive.py b/dak/check_archive.py index 89c3dcbd..926b4fb5 100755 --- a/dak/check_archive.py +++ b/dak/check_archive.py @@ -72,6 +72,7 @@ The following MODEs are available: validate-indices - ensure files mentioned in Packages & Sources exist files-not-symlinks - check files in the database aren't symlinks validate-builddeps - validate build-dependencies of .dsc files in the archive + add-missing-source-checksums - add missing checksums for source packages """ sys.exit(exit_code) @@ -108,39 +109,61 @@ def check_files(): Prepare the dictionary of existing filenames, then walk through the archive pool/ directory to compare it. """ - global db_files - cnf = Config() + session = DBConn().session() - print "Building list of database files..." - q = DBConn().session().query(PoolFile).join(Location).order_by('path', 'location') - - print "Missing files:" - db_files.clear() - - for f in q.all(): - filename = os.path.abspath(os.path.join(f.location.path, f.filename)) - db_files[filename] = "" - if os.access(filename, os.R_OK) == 0: - if f.last_used: - print "(last used: %s) %s" % (f.last_used, filename) - else: - print "%s" % (filename) - - - filename = os.path.join(cnf["Dir::Override"], 'override.unreferenced') - if os.path.exists(filename): - f = utils.open_file(filename) - for filename in f.readlines(): - filename = filename[:-1] - excluded[filename] = "" - - print "Existent files not in db:" - - os.path.walk(os.path.join(cnf["Dir::Root"], 'pool/'), process_dir, None) - - print - print "%s wasted..." % (utils.size_type(waste)) + query = """ + SELECT archive.name, suite.suite_name, f.filename + FROM binaries b + JOIN bin_associations ba ON b.id = ba.bin + JOIN suite ON ba.suite = suite.id + JOIN archive ON suite.archive_id = archive.id + JOIN files f ON b.file = f.id + WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af + WHERE af.archive_id = suite.archive_id + AND af.file_id = b.file) + ORDER BY archive.name, suite.suite_name, f.filename + """ + for row in session.execute(query): + print "MISSING-ARCHIVE-FILE {0} {1} {2}".vformat(row) + + query = """ + SELECT archive.name, suite.suite_name, f.filename + FROM source s + JOIN src_associations sa ON s.id = sa.source + JOIN suite ON sa.suite = suite.id + JOIN archive ON suite.archive_id = archive.id + JOIN dsc_files df ON s.id = df.source + JOIN files f ON df.file = f.id + WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af + WHERE af.archive_id = suite.archive_id + AND af.file_id = df.file) + ORDER BY archive.name, suite.suite_name, f.filename + """ + for row in session.execute(query): + print "MISSING-ARCHIVE-FILE {0} {1} {2}".vformat(row) + + archive_files = session.query(ArchiveFile) \ + .join(ArchiveFile.archive).join(ArchiveFile.file) \ + .order_by(Archive.archive_name, PoolFile.filename) + + expected_files = set() + for af in archive_files: + path = af.path + expected_files.add(af.path) + if not os.path.exists(path): + print "MISSING-FILE {0} {1} {2}".format(af.archive.archive_name, af.file.filename, path) + + archives = session.query(Archive).order_by(Archive.archive_name) + + for a in archives: + top = os.path.join(a.path, 'pool') + for dirpath, dirnames, filenames in os.walk(top): + for fn in filenames: + path = os.path.join(dirpath, fn) + if path in expected_files: + continue + print "UNEXPECTED-FILE {0} {1}".format(a.archive_name, path) ################################################################################ @@ -251,7 +274,7 @@ def check_checksums(): print "Checking file checksums & sizes..." for f in q: - filename = os.path.abspath(os.path.join(f.location.path, f.filename)) + filename = f.fullpath try: fi = utils.open_file(filename) @@ -360,18 +383,18 @@ def validate_sources(suite, component): """ filename = "%s/dists/%s/%s/source/Sources.gz" % (Cnf["Dir::Root"], suite, component) print "Processing %s..." % (filename) - # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance... + # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) sources = utils.open_file(temp_filename) - Sources = apt_pkg.ParseTagFile(sources) - while Sources.Step(): - source = Sources.Section.Find('Package') - directory = Sources.Section.Find('Directory') - files = Sources.Section.Find('Files') + Sources = apt_pkg.TagFile(sources) + while Sources.step(): + source = Sources.section.find('Package') + directory = Sources.section.find('Directory') + files = Sources.section.find('Files') for i in files.split('\n'): (md5, size, name) = i.split() filename = "%s/%s/%s" % (Cnf["Dir::Root"], directory, name) @@ -402,16 +425,16 @@ def validate_packages(suite, component, architecture): filename = "%s/dists/%s/%s/binary-%s/Packages.gz" \ % (Cnf["Dir::Root"], suite, component, architecture) print "Processing %s..." % (filename) - # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance... + # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) packages = utils.open_file(temp_filename) - Packages = apt_pkg.ParseTagFile(packages) - while Packages.Step(): - filename = "%s/%s" % (Cnf["Dir::Root"], Packages.Section.Find('Filename')) + Packages = apt_pkg.TagFile(packages) + while Packages.step(): + filename = "%s/%s" % (Cnf["Dir::Root"], Packages.section.find('Filename')) if not os.path.exists(filename): print "W: %s missing." % (filename) packages.close() @@ -464,7 +487,7 @@ def chk_bd_process_dir (unused, dirname, filenames): field = dsc.get(field_name) if field: try: - apt_pkg.ParseSrcDepends(field) + apt_pkg.parse_src_depends(field) except: print "E: [%s] %s: %s" % (filename, field_name, field) pass @@ -478,6 +501,46 @@ def check_build_depends(): ################################################################################ +_add_missing_source_checksums_query = R""" +INSERT INTO source_metadata + (src_id, key_id, value) +SELECT + s.id, + :checksum_key, + E'\n' || + (SELECT STRING_AGG(' ' || tmp.checksum || ' ' || tmp.size || ' ' || tmp.basename, E'\n' ORDER BY tmp.basename) + FROM + (SELECT + CASE :checksum_type + WHEN 'Files' THEN f.md5sum + WHEN 'Checksums-Sha1' THEN f.sha1sum + WHEN 'Checksums-Sha256' THEN f.sha256sum + END AS checksum, + f.size, + SUBSTRING(f.filename FROM E'/([^/]*)\\Z') AS basename + FROM files f JOIN dsc_files ON f.id = dsc_files.file + WHERE dsc_files.source = s.id AND f.id != s.file + ) AS tmp + ) + + FROM + source s + WHERE NOT EXISTS (SELECT 1 FROM source_metadata md WHERE md.src_id=s.id AND md.key_id = :checksum_key); +""" + +def add_missing_source_checksums(): + """ Add missing source checksums to source_metadata """ + session = DBConn().session() + for checksum in ['Files', 'Checksums-Sha1', 'Checksums-Sha256']: + checksum_key = get_or_set_metadatakey(checksum, session).key_id + rows = session.execute(_add_missing_source_checksums_query, + {'checksum_key': checksum_key, 'checksum_type': checksum}).rowcount + if rows > 0: + print "Added {0} missing entries for {1}".format(rows, checksum) + session.commit() + +################################################################################ + def main (): global db_files, waste, excluded @@ -488,9 +551,9 @@ def main (): if not cnf.has_key("Check-Archive::Options::%s" % (i)): cnf["Check-Archive::Options::%s" % (i)] = "" - args = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv) + args = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) - Options = cnf.SubTree("Check-Archive::Options") + Options = cnf.subtree("Check-Archive::Options") if Options["Help"]: usage() @@ -525,6 +588,8 @@ def main (): check_files_not_symlinks() elif mode == "validate-builddeps": check_build_depends() + elif mode == "add-missing-source-checksums": + add_missing_source_checksums() else: utils.warn("unknown mode '%s'" % (mode)) usage(1)