################################################################################
+import errno
import os
import stat
import sys
################################################################################
def check_binaries(now_date, session):
- print "Checking for orphaned binary packages..."
+ Logger.log(["Checking for orphaned binary packages..."])
# Get the list of binary packages not in a suite and mark them for
# deletion.
########################################
def check_sources(now_date, session):
- print "Checking for orphaned source packages..."
+ Logger.log(["Checking for orphaned source packages..."])
# Get the list of source packages not in a suite and not used by
# any binaries.
- #### XXX: this should ignore cases where the files for the binary b
- #### have been marked for deletion (so the delay between bins go
- #### byebye and sources go byebye is 0 instead of StayOfExecution)
-
# Check for any sources which are marked for deletion but which
# are now used again.
- #### XXX: this should also handle deleted binaries specially (ie, not
- #### reinstate sources because of them
-
# TODO: the UPDATE part is the same as in check_binaries. Merge?
query = """
JOIN binaries b ON af_bin.file_id = b.file
WHERE b.source = df.source
AND af_bin.archive_id = af.archive_id
- AND af_bin.last_used > ad.delete_date)
+ AND (af_bin.last_used IS NULL OR af_bin.last_used > ad.delete_date))
OR EXISTS (SELECT 1 FROM extra_src_references esr
JOIN bin_associations ba ON esr.bin_id = ba.bin
JOIN binaries b ON ba.bin = b.id
# and then mark the file for deletion. This probably masks a bug somwhere
# else but is better than collecting cruft forever
- print "Checking for unused files..."
+ Logger.log(["Checking for unused files..."])
q = session.execute("""
UPDATE files_archive_map af
SET last_used = :last_used
# XXX: why doesn't this remove the files here as well? I don't think it
# buys anything keeping this separate
- print "Deleting from binaries table... "
+ Logger.log(["Deleting from binaries table... "])
q = session.execute("""
DELETE FROM binaries b
USING files f
count = 0
size = 0
- print "Cleaning out packages..."
+ Logger.log(["Cleaning out packages..."])
morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue'))
morguesubdir = cnf.get("Clean-Suites::MorgueSubDir", 'pool')
os.makedirs(dest)
# Delete from source
- print "Deleting from source table... "
+ Logger.log(["Deleting from source table..."])
q = session.execute("""
WITH
deleted_sources AS (
old_files = session.query(ArchiveFile).filter('files_archive_map.last_used <= (SELECT delete_date FROM archive_delete_date ad WHERE ad.archive_id = files_archive_map.archive_id)').join(Archive)
if max_delete is not None:
old_files = old_files.limit(max_delete)
- print "Limiting removals to %d" % max_delete
+ Logger.log(["Limiting removals to %d" % max_delete])
if archives is not None:
archive_ids = [ a.archive_id for a in archives ]
dest_filename = dest + '/' + os.path.basename(filename)
# If the destination file exists; try to find another filename to use
- if os.path.exists(dest_filename):
+ if os.path.lexists(dest_filename):
dest_filename = utils.find_next_free(dest_filename)
if not Options["No-Action"]:
if count > 0:
Logger.log(["total", count, utils.size_type(size)])
- print "Cleaned %d files, %s." % (count, utils.size_type(size))
# Delete entries in files no longer referenced by any archive
query = """
################################################################################
def clean_maintainers(now_date, session):
- print "Cleaning out unused Maintainer entries..."
+ Logger.log(["Cleaning out unused Maintainer entries..."])
# TODO Replace this whole thing with one SQL statement
q = session.execute("""
if count > 0:
Logger.log(["total", count])
- print "Cleared out %d maintainer entries." % (count)
################################################################################
def clean_fingerprints(now_date, session):
- print "Cleaning out unused fingerprint entries..."
+ Logger.log(["Cleaning out unused fingerprint entries..."])
# TODO Replace this whole thing with one SQL statement
q = session.execute("""
SELECT f.id, f.fingerprint FROM fingerprint f
WHERE f.keyring IS NULL
AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.sig_fpr = f.id)
- AND NOT EXISTS (SELECT 1 FROM source s WHERE s.sig_fpr = f.id)""")
+ AND NOT EXISTS (SELECT 1 FROM source s WHERE s.sig_fpr = f.id)
+ AND NOT EXISTS (SELECT 1 FROM acl_per_source aps WHERE aps.created_by_id = f.id)""")
count = 0
if count > 0:
Logger.log(["total", count])
- print "Cleared out %d fingerprint entries." % (count)
+
+################################################################################
+
+def clean_byhash(now_date, session):
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ Logger.log(["Cleaning out unused by-hash files..."])
+
+ q = session.execute("""
+ DELETE FROM hashfile h
+ USING suite s, archive a
+ WHERE s.id = h.suite_id
+ AND a.id = s.archive_id
+ AND h.unreferenced + a.stayofexecution < CURRENT_TIMESTAMP
+ RETURNING a.path, s.suite_name, h.path""")
+ count = q.rowcount
+
+ if not Options["No-Action"]:
+ for base, suite, path in q:
+ filename = os.path.join(base, 'dists', suite, suite_suffix, path)
+ try:
+ os.unlink(filename)
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ raise
+ Logger.log(['database referred to non-existing file', filename])
+ else:
+ Logger.log(['delete hashfile', suite, path])
+ session.commit()
+
+ if count > 0:
+ Logger.log(["total", count])
################################################################################
Removes empty directories from pool directories.
"""
- print "Cleaning out empty directories..."
+ Logger.log(["Cleaning out empty directories..."])
count = 0
if Options["Help"]:
usage()
- Logger = daklog.Logger("clean-suites", debug=Options["No-Action"])
+ program = "clean-suites"
+ if Options['No-Action']:
+ program = "clean-suites (no action)"
+ Logger = daklog.Logger(program, debug=Options["No-Action"])
session = DBConn().session()
clean(now_date, archives, max_delete, session)
clean_maintainers(now_date, session)
clean_fingerprints(now_date, session)
+ clean_byhash(now_date, session)
clean_empty_directories(session)
session.rollback()