#!/usr/bin/env python
-""" Cleans up unassociated binary and source packages """
-# Copyright (C) 2000, 2001, 2002, 2003, 2006 James Troup <james@nocrew.org>
+""" Cleans up unassociated binary and source packages
+
+@contact: Debian FTPMaster <ftpmaster@debian.org>
+@copyright: 2000, 2001, 2002, 2003, 2006 James Troup <james@nocrew.org>
+@copyright: 2009 Mark Hymers <mhy@debian.org>
+@copyright: 2010 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
################################################################################
-import os, pg, stat, sys, time
+import errno
+import os
+import stat
+import sys
+import time
import apt_pkg
+from datetime import datetime, timedelta
+
+from daklib.config import Config
+from daklib.dbconn import *
from daklib import utils
+from daklib import daklog
################################################################################
-projectB = None
-Cnf = None
Options = None
-now_date = None; # mark newly "deleted" things as deleted "now"
-delete_date = None; # delete things marked "deleted" earler than this
-max_delete = None
+Logger = None
################################################################################
################################################################################
-def check_binaries():
- global delete_date, now_date
-
- print "Checking for orphaned binary packages..."
+def check_binaries(now_date, session):
+ Logger.log(["Checking for orphaned binary packages..."])
# Get the list of binary packages not in a suite and mark them for
# deletion.
- q = projectB.query("""
-SELECT b.file FROM binaries b, files f
- WHERE f.last_used IS NULL AND b.file = f.id
- AND NOT EXISTS (SELECT 1 FROM bin_associations ba WHERE ba.bin = b.id)""")
- ql = q.getresult()
-
- projectB.query("BEGIN WORK")
- for i in ql:
- file_id = i[0]
- projectB.query("UPDATE files SET last_used = '%s' WHERE id = %s AND last_used IS NULL" % (now_date, file_id))
- projectB.query("COMMIT WORK")
-
# Check for any binaries which are marked for eventual deletion
# but are now used again.
- q = projectB.query("""
-SELECT b.file FROM binaries b, files f
- WHERE f.last_used IS NOT NULL AND f.id = b.file
- AND EXISTS (SELECT 1 FROM bin_associations ba WHERE ba.bin = b.id)""")
- ql = q.getresult()
-
- projectB.query("BEGIN WORK")
- for i in ql:
- file_id = i[0]
- projectB.query("UPDATE files SET last_used = NULL WHERE id = %s" % (file_id))
- projectB.query("COMMIT WORK")
-########################################
+ query = """
+ WITH usage AS (
+ SELECT
+ af.archive_id AS archive_id,
+ af.file_id AS file_id,
+ af.component_id AS component_id,
+ BOOL_OR(EXISTS (SELECT 1 FROM bin_associations ba
+ JOIN suite s ON ba.suite = s.id
+ WHERE ba.bin = b.id
+ AND s.archive_id = af.archive_id))
+ AS in_use
+ FROM files_archive_map af
+ JOIN binaries b ON af.file_id = b.file
+ GROUP BY af.archive_id, af.file_id, af.component_id
+ )
+
+ UPDATE files_archive_map af
+ SET last_used = CASE WHEN usage.in_use THEN NULL ELSE :last_used END
+ FROM usage, files f, archive
+ WHERE af.archive_id = usage.archive_id AND af.file_id = usage.file_id AND af.component_id = usage.component_id
+ AND ((af.last_used IS NULL AND NOT usage.in_use) OR (af.last_used IS NOT NULL AND usage.in_use))
+ AND af.file_id = f.id
+ AND af.archive_id = archive.id
+ RETURNING archive.name, f.filename, af.last_used IS NULL"""
+
+ res = session.execute(query, {'last_used': now_date})
+ for i in res:
+ op = "set lastused"
+ if i[2]:
+ op = "unset lastused"
+ Logger.log([op, i[0], i[1]])
-def check_sources():
- global delete_date, now_date
+########################################
- print "Checking for orphaned source packages..."
+def check_sources(now_date, session):
+ Logger.log(["Checking for orphaned source packages..."])
# Get the list of source packages not in a suite and not used by
# any binaries.
- q = projectB.query("""
-SELECT s.id, s.file FROM source s, files f
- WHERE f.last_used IS NULL AND s.file = f.id
- AND NOT EXISTS (SELECT 1 FROM src_associations sa WHERE sa.source = s.id)
- AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.source = s.id)""")
-
- #### XXX: this should ignore cases where the files for the binary b
- #### have been marked for deletion (so the delay between bins go
- #### byebye and sources go byebye is 0 instead of StayOfExecution)
-
- ql = q.getresult()
-
- projectB.query("BEGIN WORK")
- for i in ql:
- source_id = i[0]
- dsc_file_id = i[1]
-
- # Mark the .dsc file for deletion
- projectB.query("UPDATE files SET last_used = '%s' WHERE id = %s AND last_used IS NULL" % (now_date, dsc_file_id))
- # Mark all other files references by .dsc too if they're not used by anyone else
- x = projectB.query("SELECT f.id FROM files f, dsc_files d WHERE d.source = %s AND d.file = f.id" % (source_id))
- for j in x.getresult():
- file_id = j[0]
- y = projectB.query("SELECT id FROM dsc_files d WHERE d.file = %s" % (file_id))
- if len(y.getresult()) == 1:
- projectB.query("UPDATE files SET last_used = '%s' WHERE id = %s AND last_used IS NULL" % (now_date, file_id))
- projectB.query("COMMIT WORK")
# Check for any sources which are marked for deletion but which
# are now used again.
- q = projectB.query("""
-SELECT f.id FROM source s, files f, dsc_files df
- WHERE f.last_used IS NOT NULL AND s.id = df.source AND df.file = f.id
- AND ((EXISTS (SELECT 1 FROM src_associations sa WHERE sa.source = s.id))
- OR (EXISTS (SELECT 1 FROM binaries b WHERE b.source = s.id)))""")
-
- #### XXX: this should also handle deleted binaries specially (ie, not
- #### reinstate sources because of them
-
- ql = q.getresult()
- # Could be done in SQL; but left this way for hysterical raisins
- # [and freedom to innovate don'cha know?]
- projectB.query("BEGIN WORK")
- for i in ql:
- file_id = i[0]
- projectB.query("UPDATE files SET last_used = NULL WHERE id = %s" % (file_id))
- projectB.query("COMMIT WORK")
+ # TODO: the UPDATE part is the same as in check_binaries. Merge?
+
+ query = """
+ WITH usage AS (
+ SELECT
+ af.archive_id AS archive_id,
+ af.file_id AS file_id,
+ af.component_id AS component_id,
+ BOOL_OR(EXISTS (SELECT 1 FROM src_associations sa
+ JOIN suite s ON sa.suite = s.id
+ WHERE sa.source = df.source
+ AND s.archive_id = af.archive_id)
+ OR EXISTS (SELECT 1 FROM files_archive_map af_bin
+ JOIN binaries b ON af_bin.file_id = b.file
+ WHERE b.source = df.source
+ AND af_bin.archive_id = af.archive_id
+ AND (af_bin.last_used IS NULL OR af_bin.last_used > ad.delete_date))
+ OR EXISTS (SELECT 1 FROM extra_src_references esr
+ JOIN bin_associations ba ON esr.bin_id = ba.bin
+ JOIN binaries b ON ba.bin = b.id
+ JOIN suite s ON ba.suite = s.id
+ WHERE esr.src_id = df.source
+ AND s.archive_id = af.archive_id))
+ AS in_use
+ FROM files_archive_map af
+ JOIN dsc_files df ON af.file_id = df.file
+ JOIN archive_delete_date ad ON af.archive_id = ad.archive_id
+ GROUP BY af.archive_id, af.file_id, af.component_id
+ )
+
+ UPDATE files_archive_map af
+ SET last_used = CASE WHEN usage.in_use THEN NULL ELSE :last_used END
+ FROM usage, files f, archive
+ WHERE af.archive_id = usage.archive_id AND af.file_id = usage.file_id AND af.component_id = usage.component_id
+ AND ((af.last_used IS NULL AND NOT usage.in_use) OR (af.last_used IS NOT NULL AND usage.in_use))
+ AND af.file_id = f.id
+ AND af.archive_id = archive.id
+
+ RETURNING archive.name, f.filename, af.last_used IS NULL
+ """
+
+ res = session.execute(query, {'last_used': now_date})
+ for i in res:
+ op = "set lastused"
+ if i[2]:
+ op = "unset lastused"
+ Logger.log([op, i[0], i[1]])
########################################
-def check_files():
- global delete_date, now_date
-
+def check_files(now_date, session):
# FIXME: this is evil; nothing should ever be in this state. if
- # they are, it's a bug and the files should not be auto-deleted.
-
- return
-
- print "Checking for unused files..."
- q = projectB.query("""
-SELECT id FROM files f
- WHERE NOT EXISTS (SELECT 1 FROM binaries b WHERE b.file = f.id)
- AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = f.id)""")
-
- projectB.query("BEGIN WORK")
- for i in q.getresult():
- file_id = i[0]
- projectB.query("UPDATE files SET last_used = '%s' WHERE id = %s" % (now_date, file_id))
- projectB.query("COMMIT WORK")
+ # they are, it's a bug.
+
+ # However, we've discovered it happens sometimes so we print a huge warning
+ # and then mark the file for deletion. This probably masks a bug somwhere
+ # else but is better than collecting cruft forever
+
+ Logger.log(["Checking for unused files..."])
+ q = session.execute("""
+ UPDATE files_archive_map af
+ SET last_used = :last_used
+ FROM files f, archive
+ WHERE af.file_id = f.id
+ AND af.archive_id = archive.id
+ AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.file = af.file_id)
+ AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = af.file_id)
+ AND af.last_used IS NULL
+ RETURNING archive.name, f.filename""", {'last_used': now_date})
+
+ for x in q:
+ utils.warn("orphaned file: {0}".format(x))
+ Logger.log(["set lastused", x[0], x[1], "ORPHANED FILE"])
-def clean_binaries():
- global delete_date, now_date
+ if not Options["No-Action"]:
+ session.commit()
+def clean_binaries(now_date, session):
# We do this here so that the binaries we remove will have their
# source also removed (if possible).
# XXX: why doesn't this remove the files here as well? I don't think it
# buys anything keeping this separate
- print "Cleaning binaries from the DB..."
- if not Options["No-Action"]:
- before = time.time()
- sys.stdout.write("[Deleting from binaries table... ")
- projectB.query("DELETE FROM binaries WHERE EXISTS (SELECT 1 FROM files WHERE binaries.file = files.id AND files.last_used <= '%s')" % (delete_date))
- sys.stdout.write("done. (%d seconds)]\n" % (int(time.time()-before)))
+
+ Logger.log(["Deleting from binaries table... "])
+ q = session.execute("""
+ DELETE FROM binaries b
+ USING files f
+ WHERE f.id = b.file
+ AND NOT EXISTS (SELECT 1 FROM files_archive_map af
+ JOIN archive_delete_date ad ON af.archive_id = ad.archive_id
+ WHERE af.file_id = b.file
+ AND (af.last_used IS NULL OR af.last_used > ad.delete_date))
+ RETURNING f.filename
+ """)
+ for b in q:
+ Logger.log(["delete binary", b[0]])
########################################
-def clean():
- global delete_date, now_date, max_delete
+def clean(now_date, archives, max_delete, session):
+ cnf = Config()
+
count = 0
size = 0
- print "Cleaning out packages..."
+ Logger.log(["Cleaning out packages..."])
+
+ morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue'))
+ morguesubdir = cnf.get("Clean-Suites::MorgueSubDir", 'pool')
- date = time.strftime("%Y-%m-%d")
- dest = Cnf["Dir::Morgue"] + '/' + Cnf["Clean-Suites::MorgueSubDir"] + '/' + date
- if not os.path.exists(dest):
- os.mkdir(dest)
+ # Build directory as morguedir/morguesubdir/year/month/day
+ dest = os.path.join(morguedir,
+ morguesubdir,
+ str(now_date.year),
+ '%.2d' % now_date.month,
+ '%.2d' % now_date.day)
+
+ if not Options["No-Action"] and not os.path.exists(dest):
+ os.makedirs(dest)
# Delete from source
+ Logger.log(["Deleting from source table..."])
+ q = session.execute("""
+ WITH
+ deleted_sources AS (
+ DELETE FROM source
+ USING files f
+ WHERE source.file = f.id
+ AND NOT EXISTS (SELECT 1 FROM files_archive_map af
+ JOIN archive_delete_date ad ON af.archive_id = ad.archive_id
+ WHERE af.file_id = source.file
+ AND (af.last_used IS NULL OR af.last_used > ad.delete_date))
+ RETURNING source.id AS id, f.filename AS filename
+ ),
+ deleted_dsc_files AS (
+ DELETE FROM dsc_files df WHERE df.source IN (SELECT id FROM deleted_sources)
+ RETURNING df.file AS file_id
+ ),
+ now_unused_source_files AS (
+ UPDATE files_archive_map af
+ SET last_used = '1977-03-13 13:37:42' -- Kill it now. We waited long enough before removing the .dsc.
+ WHERE af.file_id IN (SELECT file_id FROM deleted_dsc_files)
+ AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = af.file_id)
+ )
+ SELECT filename FROM deleted_sources""")
+ for s in q:
+ Logger.log(["delete source", s[0]])
+
if not Options["No-Action"]:
- before = time.time()
- sys.stdout.write("[Deleting from source table... ")
- projectB.query("DELETE FROM dsc_files WHERE EXISTS (SELECT 1 FROM source s, files f, dsc_files df WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = df.source AND df.id = dsc_files.id)" % (delete_date))
- projectB.query("DELETE FROM src_uploaders WHERE EXISTS (SELECT 1 FROM source s, files f WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = src_uploaders.source)" % (delete_date))
- projectB.query("DELETE FROM source WHERE EXISTS (SELECT 1 FROM files WHERE source.file = files.id AND files.last_used <= '%s')" % (delete_date))
- sys.stdout.write("done. (%d seconds)]\n" % (int(time.time()-before)))
+ session.commit()
# Delete files from the pool
- query = "SELECT l.path, f.filename FROM location l, files f WHERE f.last_used <= '%s' AND l.id = f.location" % (delete_date)
+ old_files = session.query(ArchiveFile).filter('files_archive_map.last_used <= (SELECT delete_date FROM archive_delete_date ad WHERE ad.archive_id = files_archive_map.archive_id)').join(Archive)
if max_delete is not None:
- query += " LIMIT %d" % max_delete
- sys.stdout.write("Limiting removals to %d\n" % max_delete)
+ old_files = old_files.limit(max_delete)
+ Logger.log(["Limiting removals to %d" % max_delete])
- q=projectB.query(query)
- for i in q.getresult():
- filename = i[0] + i[1]
+ if archives is not None:
+ archive_ids = [ a.archive_id for a in archives ]
+ old_files = old_files.filter(ArchiveFile.archive_id.in_(archive_ids))
+
+ for af in old_files:
+ filename = af.path
if not os.path.exists(filename):
- utils.warn("can not find '%s'." % (filename))
+ Logger.log(["database referred to non-existing file", af.path])
+ session.delete(af)
continue
+ Logger.log(["delete archive file", filename])
if os.path.isfile(filename):
if os.path.islink(filename):
count += 1
- if Options["No-Action"]:
- print "Removing symlink %s..." % (filename)
- else:
+ Logger.log(["delete symlink", filename])
+ if not Options["No-Action"]:
os.unlink(filename)
else:
size += os.stat(filename)[stat.ST_SIZE]
dest_filename = dest + '/' + os.path.basename(filename)
# If the destination file exists; try to find another filename to use
- if os.path.exists(dest_filename):
+ if os.path.lexists(dest_filename):
dest_filename = utils.find_next_free(dest_filename)
- if Options["No-Action"]:
- print "Cleaning %s -> %s ..." % (filename, dest_filename)
- else:
- utils.move(filename, dest_filename)
+ if not Options["No-Action"]:
+ if af.archive.use_morgue:
+ Logger.log(["move to morgue", filename, dest_filename])
+ utils.move(filename, dest_filename)
+ else:
+ Logger.log(["removed file", filename])
+ os.unlink(filename)
+
+ if not Options["No-Action"]:
+ session.delete(af)
+ session.commit()
+
else:
utils.fubar("%s is neither symlink nor file?!" % (filename))
- # Delete from the 'files' table
- if not Options["No-Action"]:
- before = time.time()
- sys.stdout.write("[Deleting from files table... ")
- projectB.query("DELETE FROM files WHERE last_used <= '%s'" % (delete_date))
- sys.stdout.write("done. (%d seconds)]\n" % (int(time.time()-before)))
if count > 0:
- sys.stderr.write("Cleaned %d files, %s.\n" % (count, utils.size_type(size)))
+ Logger.log(["total", count, utils.size_type(size)])
+
+ # Delete entries in files no longer referenced by any archive
+ query = """
+ DELETE FROM files f
+ WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af WHERE af.file_id = f.id)
+ """
+ session.execute(query)
+
+ if not Options["No-Action"]:
+ session.commit()
################################################################################
-def clean_maintainers():
- print "Cleaning out unused Maintainer entries..."
+def clean_maintainers(now_date, session):
+ Logger.log(["Cleaning out unused Maintainer entries..."])
- q = projectB.query("""
-SELECT m.id FROM maintainer m
+ # TODO Replace this whole thing with one SQL statement
+ q = session.execute("""
+SELECT m.id, m.name FROM maintainer m
WHERE NOT EXISTS (SELECT 1 FROM binaries b WHERE b.maintainer = m.id)
AND NOT EXISTS (SELECT 1 FROM source s WHERE s.maintainer = m.id OR s.changedby = m.id)
AND NOT EXISTS (SELECT 1 FROM src_uploaders u WHERE u.maintainer = m.id)""")
- ql = q.getresult()
count = 0
- projectB.query("BEGIN WORK")
- for i in ql:
+
+ for i in q.fetchall():
maintainer_id = i[0]
+ Logger.log(["delete maintainer", i[1]])
if not Options["No-Action"]:
- projectB.query("DELETE FROM maintainer WHERE id = %s" % (maintainer_id))
- count += 1
- projectB.query("COMMIT WORK")
+ session.execute("DELETE FROM maintainer WHERE id = :maint", {'maint': maintainer_id})
+ count += 1
+
+ if not Options["No-Action"]:
+ session.commit()
if count > 0:
- sys.stderr.write("Cleared out %d maintainer entries.\n" % (count))
+ Logger.log(["total", count])
################################################################################
-def clean_fingerprints():
- print "Cleaning out unused fingerprint entries..."
+def clean_fingerprints(now_date, session):
+ Logger.log(["Cleaning out unused fingerprint entries..."])
- q = projectB.query("""
-SELECT f.id FROM fingerprint f
+ # TODO Replace this whole thing with one SQL statement
+ q = session.execute("""
+SELECT f.id, f.fingerprint FROM fingerprint f
WHERE f.keyring IS NULL
AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.sig_fpr = f.id)
- AND NOT EXISTS (SELECT 1 FROM source s WHERE s.sig_fpr = f.id)""")
- ql = q.getresult()
+ AND NOT EXISTS (SELECT 1 FROM source s WHERE s.sig_fpr = f.id)
+ AND NOT EXISTS (SELECT 1 FROM acl_per_source aps WHERE aps.created_by_id = f.id)""")
count = 0
- projectB.query("BEGIN WORK")
- for i in ql:
+
+ for i in q.fetchall():
fingerprint_id = i[0]
+ Logger.log(["delete fingerprint", i[1]])
if not Options["No-Action"]:
- projectB.query("DELETE FROM fingerprint WHERE id = %s" % (fingerprint_id))
- count += 1
- projectB.query("COMMIT WORK")
+ session.execute("DELETE FROM fingerprint WHERE id = :fpr", {'fpr': fingerprint_id})
+ count += 1
+
+ if not Options["No-Action"]:
+ session.commit()
if count > 0:
- sys.stderr.write("Cleared out %d fingerprint entries.\n" % (count))
+ Logger.log(["total", count])
################################################################################
-def clean_queue_build():
- global now_date
+def clean_byhash(now_date, session):
+ cnf = Config()
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ Logger.log(["Cleaning out unused by-hash files..."])
- if not Cnf.ValueList("Dinstall::QueueBuildSuites") or Options["No-Action"]:
- return
+ q = session.execute("""
+ DELETE FROM hashfile h
+ USING suite s, archive a
+ WHERE s.id = h.suite_id
+ AND a.id = s.archive_id
+ AND h.unreferenced + a.stayofexecution < CURRENT_TIMESTAMP
+ RETURNING a.path, s.suite_name, h.path""")
+ count = q.rowcount
- print "Cleaning out queue build symlinks..."
+ if not Options["No-Action"]:
+ for base, suite, path in q:
+ filename = os.path.join(base, 'dists', suite, suite_suffix, path)
+ try:
+ os.unlink(filename)
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ raise
+ Logger.log(['database referred to non-existing file', filename])
+ else:
+ Logger.log(['delete hashfile', suite, path])
+ session.commit()
+
+ if count > 0:
+ Logger.log(["total", count])
+
+################################################################################
+
+def clean_empty_directories(session):
+ """
+ Removes empty directories from pool directories.
+ """
+
+ Logger.log(["Cleaning out empty directories..."])
- our_delete_date = time.strftime("%Y-%m-%d %H:%M", time.localtime(time.time()-int(Cnf["Clean-Suites::QueueBuildStayOfExecution"])))
count = 0
- q = projectB.query("SELECT filename FROM queue_build WHERE last_used <= '%s'" % (our_delete_date))
- for i in q.getresult():
- filename = i[0]
- if not os.path.exists(filename):
- utils.warn("%s (from queue_build) doesn't exist." % (filename))
- continue
- if not Cnf.FindB("Dinstall::SecurityQueueBuild") and not os.path.islink(filename):
- utils.fubar("%s (from queue_build) should be a symlink but isn't." % (filename))
- os.unlink(filename)
- count += 1
- projectB.query("DELETE FROM queue_build WHERE last_used <= '%s'" % (our_delete_date))
+ cursor = session.execute(
+ """SELECT DISTINCT(path) FROM archive"""
+ )
+ bases = [x[0] for x in cursor.fetchall()]
+
+ for base in bases:
+ for dirpath, dirnames, filenames in os.walk(base, topdown=False):
+ if not filenames and not dirnames:
+ to_remove = os.path.join(base, dirpath)
+ if not Options["No-Action"]:
+ Logger.log(["removing directory", to_remove])
+ os.removedirs(to_remove)
+ count += 1
if count:
- sys.stderr.write("Cleaned %d queue_build files.\n" % (count))
+ Logger.log(["total removed directories", count])
+
+################################################################################
+
+def set_archive_delete_dates(now_date, session):
+ session.execute("""
+ CREATE TEMPORARY TABLE archive_delete_date (
+ archive_id INT NOT NULL,
+ delete_date TIMESTAMP NOT NULL
+ )""")
+
+ session.execute("""
+ INSERT INTO archive_delete_date
+ (archive_id, delete_date)
+ SELECT
+ archive.id, :now_date - archive.stayofexecution
+ FROM archive""", {'now_date': now_date})
+
+ session.flush()
################################################################################
def main():
- global Cnf, Options, projectB, delete_date, now_date, max_delete
+ global Options, Logger
+
+ cnf = Config()
- Cnf = utils.get_conf()
for i in ["Help", "No-Action", "Maximum" ]:
- if not Cnf.has_key("Clean-Suites::Options::%s" % (i)):
- Cnf["Clean-Suites::Options::%s" % (i)] = ""
+ if not cnf.has_key("Clean-Suites::Options::%s" % (i)):
+ cnf["Clean-Suites::Options::%s" % (i)] = ""
Arguments = [('h',"help","Clean-Suites::Options::Help"),
+ ('a','archive','Clean-Suites::Options::Archive','HasArg'),
('n',"no-action","Clean-Suites::Options::No-Action"),
('m',"maximum","Clean-Suites::Options::Maximum", "HasArg")]
- apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
- Options = Cnf.SubTree("Clean-Suites::Options")
+ apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
+ Options = cnf.subtree("Clean-Suites::Options")
- if Cnf["Clean-Suites::Options::Maximum"] != "":
+ if cnf["Clean-Suites::Options::Maximum"] != "":
try:
# Only use Maximum if it's an integer
- max_delete = int(Cnf["Clean-Suites::Options::Maximum"])
+ max_delete = int(cnf["Clean-Suites::Options::Maximum"])
if max_delete < 1:
utils.fubar("If given, Maximum must be at least 1")
- except ValueError, e:
+ except ValueError as e:
utils.fubar("If given, Maximum must be an integer")
else:
max_delete = None
if Options["Help"]:
usage()
- projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
+ program = "clean-suites"
+ if Options['No-Action']:
+ program = "clean-suites (no action)"
+ Logger = daklog.Logger(program, debug=Options["No-Action"])
+
+ session = DBConn().session()
+
+ archives = None
+ if 'Archive' in Options:
+ archive_names = Options['Archive'].split(',')
+ archives = session.query(Archive).filter(Archive.archive_name.in_(archive_names)).all()
+ if len(archives) == 0:
+ utils.fubar('Unknown archive.')
+
+ now_date = datetime.now()
+
+ set_archive_delete_dates(now_date, session)
+
+ check_binaries(now_date, session)
+ clean_binaries(now_date, session)
+ check_sources(now_date, session)
+ check_files(now_date, session)
+ clean(now_date, archives, max_delete, session)
+ clean_maintainers(now_date, session)
+ clean_fingerprints(now_date, session)
+ clean_byhash(now_date, session)
+ clean_empty_directories(session)
- now_date = time.strftime("%Y-%m-%d %H:%M")
- delete_date = time.strftime("%Y-%m-%d %H:%M", time.localtime(time.time()-int(Cnf["Clean-Suites::StayOfExecution"])))
+ session.rollback()
- check_binaries()
- clean_binaries()
- check_sources()
- check_files()
- clean()
- clean_maintainers()
- clean_fingerprints()
- clean_queue_build()
+ Logger.close()
################################################################################