From 0b38b655a1f91653b66589e43880cea0d1b74157 Mon Sep 17 00:00:00 2001 From: Mark Hymers Date: Sat, 31 Oct 2009 09:16:47 +0000 Subject: [PATCH] Try to sanitise changes and queue handling Signed-off-by: Mark Hymers --- dak/clean_suites.py | 2 +- dak/dakdb/update22.py | 225 ++++++++++++++++++++++++++++++++ dak/update_db.py | 2 +- daklib/dbconn.py | 294 +++++++++++++++++++++++++----------------- 4 files changed, 400 insertions(+), 123 deletions(-) create mode 100755 dak/dakdb/update22.py diff --git a/dak/clean_suites.py b/dak/clean_suites.py index 0d3b4732..99f0c8b4 100755 --- a/dak/clean_suites.py +++ b/dak/clean_suites.py @@ -338,7 +338,7 @@ def clean_queue_build(now_date, delete_date, max_delete, session): our_delete_date = now_date - timedelta(seconds = int(cnf["Clean-Suites::QueueBuildStayOfExecution"])) count = 0 - for qf in session.query(QueueBuild).filter(QueueBuild.last_used <= our_delete_date): + for qf in session.query(BuildQueueFile).filter(BuildQueueFile.last_used <= our_delete_date): if not os.path.exists(qf.filename): utils.warn("%s (from queue_build) doesn't exist." % (qf.filename)) continue diff --git a/dak/dakdb/update22.py b/dak/dakdb/update22.py new file mode 100755 index 00000000..758430c1 --- /dev/null +++ b/dak/dakdb/update22.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +# coding=utf8 + +""" +Clean up queue SQL + +@contact: Debian FTP Master +@copyright: 2009 Mark Hymers +@license: GNU General Public License version 2 or later +""" + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +################################################################################ + + +################################################################################ + +import psycopg2 +import time +import os +import datetime +import traceback + +from daklib.dak_exceptions import DBUpdateError +from daklib.config import Config + +################################################################################ + +def do_update(self): + print "Splitting up queues and fixing general design mistakes" + + try: + c = self.db.cursor() + + cnf = Config() + + print "Adding build_queue table" + c.execute("""CREATE TABLE build_queue ( + id SERIAL PRIMARY KEY, + queue_name TEXT NOT NULL UNIQUE, + path TEXT NOT NULL, + copy_files BOOL DEFAULT FALSE NOT NULL)""") + + print "Adding policy_queue table" + c.execute("""CREATE TABLE policy_queue ( + id SERIAL PRIMARY KEY, + queue_name TEXT NOT NULL UNIQUE, + path TEXT NOT NULL)""") + + print "Copying queues" + queues = {} + c.execute("""SELECT queue.id, queue.queue_name, queue.path, queue.copy_pool_files FROM queue""") + + for q in c.fetchall(): + queues[q[0]] = q[1] + if q[1] in ['accepted', 'buildd']: + # Move to build_queue_table + c.execute("""INSERT INTO build_queue (queue_name, path, copy_files) + VALUES ('%s', '%s', '%s')""" % (q[1], q[2], q[3])) + + else: + # Move to policy_queue_table + c.execute("""INSERT INTO policy_queue (queue_name, path) + VALUES ('%s', '%s')""" % (q[1], q[2])) + + + print "Fixing up build_queue_files" + c.execute("""ALTER TABLE queue_files DROP CONSTRAINT queue_files_queueid_fkey""") + c.execute("""ALTER TABLE queue_files RENAME TO build_queue_files""") + c.execute("""ALTER TABLE build_queue_files RENAME COLUMN queueid TO build_queue_id""") + + c.execute("""UPDATE build_queue_files + SET build_queue_id = (SELECT build_queue.id FROM build_queue + WHERE build_queue.queue_name = + (SELECT queue.queue_name FROM queue + WHERE queue.id = build_queue_files.build_queue_id))""") + + c.execute("""ALTER TABLE build_queue_files + ADD CONSTRAINT build_queue_files_build_queue_id_fkey + FOREIGN KEY (build_queue_id) + REFERENCES build_queue(id) + ON DELETE CASCADE""") + + + c.execute("""ALTER TABLE suite DROP CONSTRAINT suite_policy_queue_fkey""") + + c.execute("""UPDATE suite + SET policy_queue_id = (SELECT policy_queue.id FROM policy_queue + WHERE policy_queue.queue_name = + (SELECT queue.queue_name FROM queue + WHERE queue.id = suite.policy_queue_id))""") + + c.execute("""ALTER TABLE suite + ADD CONSTRAINT suite_policy_queue_fkey + FOREIGN KEY (policy_queue_id) + REFERENCES policy_queue (id) + ON DELETE RESTRICT""") + + c.execute("""ALTER TABLE known_changes DROP CONSTRAINT known_changes_approved_for_fkey""") + c.execute("""ALTER TABLE known_changes DROP CONSTRAINT known_changes_in_queue_fkey""") + + c.execute("""UPDATE known_changes + SET in_queue = (SELECT policy_queue.id FROM policy_queue + WHERE policy_queue.queue_name = + (SELECT queue.queue_name FROM queue + WHERE queue.id = known_changes.in_queue))""") + + c.execute("""ALTER TABLE known_changes + ADD CONSTRAINT known_changes_in_queue_fkey + FOREIGN KEY (in_queue) + REFERENCES policy_queue (id) + ON DELETE RESTRICT""") + + + + c.execute("""UPDATE known_changes + SET approved_for = (SELECT policy_queue.id FROM policy_queue + WHERE policy_queue.queue_name = + (SELECT queue.queue_name FROM queue + WHERE queue.id = known_changes.approved_for))""") + + c.execute("""ALTER TABLE known_changes + ADD CONSTRAINT known_changes_approved_for_fkey + FOREIGN KEY (in_queue) + REFERENCES policy_queue (id) + ON DELETE RESTRICT""") + + c.execute("""ALTER TABLE suite_queue_copy RENAME TO suite_build_queue_copy""") + + c.execute("""ALTER TABLE suite_build_queue_copy DROP CONSTRAINT suite_queue_copy_queue_fkey""") + + c.execute("""ALTER TABLE suite_build_queue_copy RENAME COLUMN queue TO build_queue_id""") + + c.execute("""UPDATE suite_build_queue_copy + SET build_queue_id = (SELECT build_queue.id FROM build_queue + WHERE build_queue.queue_name = + (SELECT queue.queue_name FROM queue + WHERE queue.id = suite_build_queue_copy.build_queue_id))""") + + c.execute("""ALTER TABLE suite_build_queue_copy + ADD CONSTRAINT suite_build_queue_copy_build_queue_id_fkey + FOREIGN KEY (build_queue_id) + REFERENCES build_queue (id) + ON DELETE RESTRICT""") + + c.execute("""DROP TABLE changes_pending_files""") + + c.execute("""CREATE TABLE changes_pending_files ( + id SERIAL PRIMARY KEY, + filename TEXT NOT NULL UNIQUE, + size BIGINT NOT NULL, + md5sum TEXT NOT NULL, + sha1sum TEXT NOT NULL, + sha256sum TEXT NOT NULL )""") + + c.execute("""CREATE TABLE changes_pending_source ( + id SERIAL PRIMARY KEY, + change_id INT4 NOT NULL REFERENCES known_changes (id), + source TEXT NOT NULL, + version DEBVERSION NOT NULL, + maintainer_id INT4 NOT NULL REFERENCES maintainer (id), + changedby_id INT4 NOT NULL REFERENCES maintainer (id), + sig_fpr INT4 NOT NULL REFERENCES fingerprint (id), + dm_upload_allowed BOOL NOT NULL DEFAULT FALSE )""") + + c.execute("""CREATE TABLE changes_pending_source_files ( + pending_source_id INT4 REFERENCES changes_pending_source (id) NOT NULL, + pending_file_id INT4 REFERENCES changes_pending_files (id) NOT NULL, + + PRIMARY KEY (pending_source_id, pending_file_id) )""") + + c.execute("""CREATE TABLE changes_pending_binaries ( + id SERIAL PRIMARY KEY, + change_id INT4 NOT NULL REFERENCES known_changes (id), + package TEXT NOT NULL, + version DEBVERSION NOT NULL, + architecture_id INT4 REFERENCES architecture (id) NOT NULL, + source_id INT4 REFERENCES source (id), + pending_source_id INT4 REFERENCES changes_pending_source (id), + pending_file_id INT4 REFERENCES changes_pending_files (id), + + UNIQUE (package, version, architecture_id), + CHECK (source_id IS NOT NULL or pending_source_id IS NOT NULL ) )""") + + print "Getting rid of old queue table" + c.execute("""DROP TABLE queue""") + + print "Sorting out permissions" + + for t in ['build_queue', 'policy_queue', 'build_queue_files', + 'changes_pending_binaries', 'changes_pending_source_files', + 'changes_pending_source', 'changes_pending_files', + 'changes_pool_files', 'suite_build_queue_copy']: + c.execute("GRANT SELECT ON %s TO public" % t) + c.execute("GRANT ALL ON %s TO ftpmaster" % t) + + for s in ['queue_files_id_seq', 'build_queue_id_seq', + 'changes_pending_source_id_seq', + 'changes_pending_binaries_id_seq', + 'changes_pending_files_id_seq', + 'changes_pending_source_id_seq', + 'known_changes_id_seq', + 'policy_queue_id_seq']: + c.execute("GRANT USAGE ON %s TO ftpmaster" % s) + + print "Committing" + c.execute("UPDATE config SET value = '22' WHERE name = 'db_revision'") + self.db.commit() + + except psycopg2.InternalError, msg: + self.db.rollback() + raise DBUpdateError, "Unable to apply queue_build 21, rollback issued. Error message : %s" % (str(msg)) diff --git a/dak/update_db.py b/dak/update_db.py index 4e7704e4..a296bdd2 100755 --- a/dak/update_db.py +++ b/dak/update_db.py @@ -44,7 +44,7 @@ from daklib.dak_exceptions import DBUpdateError ################################################################################ Cnf = None -required_database_schema = 21 +required_database_schema = 22 ################################################################################ diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 95e30c7c..0f4d4ede 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -50,8 +50,6 @@ from sqlalchemy import types as sqltypes from sqlalchemy.exc import * from sqlalchemy.orm.exc import NoResultFound -# Only import Config until Queue stuff is changed to store its config -# in the database from config import Config from singleton import Singleton from textutils import fix_maintainer @@ -432,6 +430,132 @@ __all__.append('BinaryACLMap') ################################################################################ +class BuildQueue(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % self.queue_name + + def add_file_from_pool(self, poolfile): + """Copies a file into the pool. Assumes that the PoolFile object is + attached to the same SQLAlchemy session as the Queue object is. + + The caller is responsible for committing after calling this function.""" + poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:] + + # Check if we have a file of this name or this ID already + for f in self.queuefiles: + if f.fileid is not None and f.fileid == poolfile.file_id or \ + f.poolfile.filename == poolfile_basename: + # In this case, update the QueueFile entry so we + # don't remove it too early + f.lastused = datetime.now() + DBConn().session().object_session(pf).add(f) + return f + + # Prepare QueueFile object + qf = QueueFile() + qf.queue_id = self.queue_id + qf.lastused = datetime.now() + qf.filename = dest + + targetpath = qf.fullpath + queuepath = os.path.join(self.path, poolfile_basename) + + try: + if self.copy_pool_files: + # We need to copy instead of symlink + import utils + utils.copy(targetfile, queuepath) + # NULL in the fileid field implies a copy + qf.fileid = None + else: + os.symlink(targetfile, queuepath) + qf.fileid = poolfile.file_id + except OSError: + return None + + # Get the same session as the PoolFile is using and add the qf to it + DBConn().session().object_session(poolfile).add(qf) + + return qf + + +__all__.append('BuildQueue') + +@session_wrapper +def get_queue(queuename, session=None): + """ + Returns Queue object for given C{queue name}, creating it if it does not + exist. + + @type queuename: string + @param queuename: The name of the queue + + @type session: Session + @param session: Optional SQLA session object (a temporary one will be + generated if not supplied) + + @rtype: Queue + @return: Queue object for the given queue + """ + + q = session.query(Queue).filter_by(queue_name=queuename) + + try: + return q.one() + except NoResultFound: + return None + +__all__.append('get_queue') + +################################################################################ + +class BuildQueueFile(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % (self.filename, self.queue_id) + +__all__.append('BuildQueueFile') + +################################################################################ + +class ChangePendingBinary(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % self.change_pending_binary_id + +__all__.append('ChangePendingBinary') + +################################################################################ + +class ChangePendingFile(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % self.change_pending_file_id + +__all__.append('ChangePendingFile') + +################################################################################ + +class ChangePendingSource(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % self.change_pending_source_id + +__all__.append('ChangePendingSource') + +################################################################################ + class Component(object): def __init__(self, *args, **kwargs): pass @@ -1160,17 +1284,6 @@ __all__.append('get_knownchange') ################################################################################ -class KnownChangePendingFile(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % self.known_change_pending_file_id - -__all__.append('KnownChangePendingFile') - -################################################################################ - class Location(object): def __init__(self, *args, **kwargs): pass @@ -1537,6 +1650,17 @@ __all__.append('insert_pending_content_paths') ################################################################################ +class PolicyQueue(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % self.queue_name + +__all__.append('PolicyQueue') + +################################################################################ + class Priority(object): def __init__(self, *args, **kwargs): pass @@ -1607,99 +1731,6 @@ __all__.append('get_priorities') ################################################################################ -class Queue(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % self.queue_name - - def add_file_from_pool(self, poolfile): - """Copies a file into the pool. Assumes that the PoolFile object is - attached to the same SQLAlchemy session as the Queue object is. - - The caller is responsible for committing after calling this function.""" - poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:] - - # Check if we have a file of this name or this ID already - for f in self.queuefiles: - if f.fileid is not None and f.fileid == poolfile.file_id or \ - f.poolfile.filename == poolfile_basename: - # In this case, update the QueueFile entry so we - # don't remove it too early - f.lastused = datetime.now() - DBConn().session().object_session(pf).add(f) - return f - - # Prepare QueueFile object - qf = QueueFile() - qf.queue_id = self.queue_id - qf.lastused = datetime.now() - qf.filename = dest - - targetpath = qf.fullpath - queuepath = os.path.join(self.path, poolfile_basename) - - try: - if self.copy_pool_files: - # We need to copy instead of symlink - import utils - utils.copy(targetfile, queuepath) - # NULL in the fileid field implies a copy - qf.fileid = None - else: - os.symlink(targetfile, queuepath) - qf.fileid = poolfile.file_id - except OSError: - return None - - # Get the same session as the PoolFile is using and add the qf to it - DBConn().session().object_session(poolfile).add(qf) - - return qf - - -__all__.append('Queue') - -@session_wrapper -def get_queue(queuename, session=None): - """ - Returns Queue object for given C{queue name}, creating it if it does not - exist. - - @type queuename: string - @param queuename: The name of the queue - - @type session: Session - @param session: Optional SQLA session object (a temporary one will be - generated if not supplied) - - @rtype: Queue - @return: Queue object for the given queue - """ - - q = session.query(Queue).filter_by(queue_name=queuename) - - try: - return q.one() - except NoResultFound: - return None - -__all__.append('get_queue') - -################################################################################ - -class QueueFile(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % (self.filename, self.queue_id) - -__all__.append('QueueFile') - -################################################################################ - class Section(object): def __init__(self, *args, **kwargs): pass @@ -2459,12 +2490,17 @@ class DBConn(Singleton): self.tbl_binaries = Table('binaries', self.db_meta, autoload=True) self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True) self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True) + self.tbl_build_queue = Table('build_queue', self.db_meta, autoload=True) + self.tbl_build_queue_files = Table('build_queue_files', self.db_meta, autoload=True) self.tbl_component = Table('component', self.db_meta, autoload=True) self.tbl_config = Table('config', self.db_meta, autoload=True) self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True) self.tbl_content_file_names = Table('content_file_names', self.db_meta, autoload=True) self.tbl_content_file_paths = Table('content_file_paths', self.db_meta, autoload=True) + self.tbl_changes_pending_binary = Table('changes_pending_binaries', self.db_meta, autoload=True) self.tbl_changes_pending_files = Table('changes_pending_files', self.db_meta, autoload=True) + self.tbl_changes_pending_source = Table('changes_pending_source', self.db_meta, autoload=True) + self.tbl_changes_pending_source_files = Table('changes_pending_source_files', self.db_meta, autoload=True) self.tbl_changes_pool_files = Table('changes_pool_files', self.db_meta, autoload=True) self.tbl_dsc_files = Table('dsc_files', self.db_meta, autoload=True) self.tbl_files = Table('files', self.db_meta, autoload=True) @@ -2478,9 +2514,8 @@ class DBConn(Singleton): self.tbl_override = Table('override', self.db_meta, autoload=True) self.tbl_override_type = Table('override_type', self.db_meta, autoload=True) self.tbl_pending_content_associations = Table('pending_content_associations', self.db_meta, autoload=True) + self.tbl_policy_queue = Table('policy_queue', self.db_meta, autoload=True) self.tbl_priority = Table('priority', self.db_meta, autoload=True) - self.tbl_queue = Table('queue', self.db_meta, autoload=True) - self.tbl_queue_files = Table('queue_files', self.db_meta, autoload=True) self.tbl_section = Table('section', self.db_meta, autoload=True) self.tbl_source = Table('source', self.db_meta, autoload=True) self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True) @@ -2490,7 +2525,7 @@ class DBConn(Singleton): self.tbl_suite = Table('suite', self.db_meta, autoload=True) self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True) self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True) - self.tbl_suite_queue_copy = Table('suite_queue_copy', self.db_meta, autoload=True) + self.tbl_suite_build_queue_copy = Table('suite_build_queue_copy', self.db_meta, autoload=True) self.tbl_uid = Table('uid', self.db_meta, autoload=True) self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True) @@ -2509,6 +2544,12 @@ class DBConn(Singleton): binary_id = self.tbl_bin_associations.c.bin, binary = relation(DBBinary))) + mapper(BuildQueue, self.tbl_build_queue, + properties = dict(queue_id = self.tbl_build_queue.c.id)) + + mapper(BuildQueueFile, self.tbl_build_queue_files, + properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'), + poolfile = relation(PoolFile, backref='buildqueueinstances'))) mapper(DBBinary, self.tbl_binaries, properties = dict(binary_id = self.tbl_binaries.c.id, @@ -2575,10 +2616,25 @@ class DBConn(Singleton): poolfiles = relation(PoolFile, secondary=self.tbl_changes_pool_files, backref="changeslinks"), - files = relation(KnownChangePendingFile, backref="changesfile"))) + files = relation(ChangePendingFile, backref="changesfile"))) + + mapper(ChangePendingBinary, self.tbl_changes_pending_binary, + properties = dict(change_pending_binary_id = self.tbl_changes_pending_binary.c.id)) - mapper(KnownChangePendingFile, self.tbl_changes_pending_files, - properties = dict(known_change_pending_file_id = self.tbl_changes_pending_files.id)) + mapper(ChangePendingFile, self.tbl_changes_pending_files, + properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id)) + + mapper(ChangePendingSource, self.tbl_changes_pending_source, + properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id, + change = relation(KnownChange), + maintainer = relation(Maintainer, + primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)), + changedby = relation(Maintainer, + primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)), + fingerprint = relation(Fingerprint), + source_files = relation(ChangePendingFile, + secondary=self.tbl_changes_pending_source_files, + backref="pending_sources"))) mapper(KeyringACLMap, self.tbl_keyring_acl_map, properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id, @@ -2615,16 +2671,12 @@ class DBConn(Singleton): properties = dict(overridetype = self.tbl_override_type.c.type, overridetype_id = self.tbl_override_type.c.id)) + mapper(PolicyQueue, self.tbl_policy_queue, + properties = dict(policy_queue_id = self.tbl_policy_queue.c.id)) + mapper(Priority, self.tbl_priority, properties = dict(priority_id = self.tbl_priority.c.id)) - mapper(Queue, self.tbl_queue, - properties = dict(queue_id = self.tbl_queue.c.id)) - - mapper(QueueFile, self.tbl_queue_files, - properties = dict(queue = relation(Queue, backref='queuefiles'), - poolfile = relation(PoolFile, backref='queueinstances'))) - mapper(Section, self.tbl_section, properties = dict(section_id = self.tbl_section.c.id)) @@ -2672,8 +2724,8 @@ class DBConn(Singleton): mapper(Suite, self.tbl_suite, properties = dict(suite_id = self.tbl_suite.c.id, - policy_queue = relation(Queue), - copy_queues = relation(Queue, secondary=self.tbl_suite_queue_copy))) + policy_queue = relation(PolicyQueue), + copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy))) mapper(SuiteArchitecture, self.tbl_suite_architectures, properties = dict(suite_id = self.tbl_suite_architectures.c.suite, -- 2.39.2