--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Add table for build queue files from policy queues.
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2011 Ansgar Burchardt <ansgar@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+def do_update(self):
+ """
+ Add table for build queue files from policy queues.
+ """
+ print __doc__
+ try:
+ c = self.db.cursor()
+
+ c.execute("""
+ CREATE TABLE build_queue_policy_files (
+ build_queue_id INTEGER NOT NULL REFERENCES build_queue(id) ON DELETE CASCADE,
+ file_id INTEGER NOT NULL REFERENCES changes_pending_files(id) ON DELETE CASCADE,
+ filename TEXT NOT NULL,
+ created TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
+ lastused TIMESTAMP WITHOUT TIME ZONE,
+ PRIMARY KEY (build_queue_id, file_id)
+ )""")
+
+ c.execute("UPDATE config SET value = '53' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, 'Unable to apply sick update 53, rollback issued. Error message : %s' % (str(msg))
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Add send_to_build_queues to policy_queue table
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2011 Ansgar Burchardt <ansgar@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+def do_update(self):
+ """
+ Add send_to_build_queues to policy_queue table
+ """
+ print __doc__
+ try:
+ c = self.db.cursor()
+
+ c.execute("""
+ ALTER TABLE policy_queue ADD COLUMN send_to_build_queues BOOLEAN NOT NULL DEFAULT 'f'
+ """)
+ c.execute("""
+ UPDATE policy_queue SET send_to_build_queues='t' WHERE queue_name IN ('embargo', 'disembargo')
+ """)
+
+ c.execute("UPDATE config SET value = '54' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, 'Unable to apply sick update 54, rollback issued. Error message : %s' % (str(msg))
################################################################################
Cnf = None
-required_database_schema = 52
+required_database_schema = 54
################################################################################
if (not self.changes.has_key(key)) or (not self.changes[key]):
self.changes[key]='missing'
+ def __get_file_from_pool(self, filename, entry, session):
+ cnf = Config()
+
+ poolname = poolify(entry["source"], entry["component"])
+ l = get_location(cnf["Dir::Pool"], entry["component"], session=session)
+
+ found, poolfile = check_poolfile(os.path.join(poolname, filename),
+ entry['size'],
+ entry["md5sum"],
+ l.location_id,
+ session=session)
+
+ if found is None:
+ Logger.log(["E: Found multiple files for pool (%s) for %s" % (chg_fn, entry["component"])])
+ return None
+ elif found is False and poolfile is not None:
+ Logger.log(["E: md5sum/size mismatch for %s in pool" % (chg_fn)])
+ return None
+ else:
+ if poolfile is None:
+ Logger.log(["E: Could not find %s in pool" % (chg_fn)])
+ return None
+ else:
+ return poolfile
+
@session_wrapper
def add_known_changes(self, dirpath, in_queue=None, session=None):
"""add "missing" in fields which we will require for the known_changes table"""
except IOError:
# Can't find the file, try to look it up in the pool
- poolname = poolify(entry["source"], entry["component"])
- l = get_location(cnf["Dir::Pool"], entry["component"], session=session)
-
- found, poolfile = check_poolfile(os.path.join(poolname, chg_fn),
- entry['size'],
- entry["md5sum"],
- l.location_id,
- session=session)
-
- if found is None:
- Logger.log(["E: Found multiple files for pool (%s) for %s" % (chg_fn, entry["component"])])
- elif found is False and poolfile is not None:
- Logger.log(["E: md5sum/size mismatch for %s in pool" % (chg_fn)])
- else:
- if poolfile is None:
- Logger.log(["E: Could not find %s in pool" % (chg_fn)])
- else:
- chg.poolfiles.append(poolfile)
+ poolfile = self.__get_file_from_pool(chg_fn, entry, session)
+ if poolfile:
+ chg.poolfiles.append(poolfile)
chg.files = files
+ # Add files referenced in .dsc, but not included in .changes
+ for name, entry in self.dsc_files.items():
+ if self.files.has_key(name):
+ continue
+
+ entry['source'] = self.changes['source']
+ poolfile = self.__get_file_from_pool(name, entry, session)
+ if poolfile:
+ chg.poolfiles.append(poolfile)
+
session.commit()
chg = session.query(DBChange).filter_by(changesname = self.changes_file).one();
try:
# Grab files we want to include
newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
+ newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
# Write file list with newer files
(fl_fd, fl_name) = mkstemp()
for n in newer:
# Grab files older than our execution time
older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
+ older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
for o in older:
killdb = False
if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
continue
- try:
- r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
- except NoResultFound:
+ if not self.contains_filename(f):
fp = os.path.join(self.path, f)
if dryrun:
Logger.log(["I: Would remove unused link %s" % fp])
except OSError:
Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
+ def contains_filename(self, filename):
+ """
+ @rtype Boolean
+ @returns True if filename is supposed to be in the queue; False otherwise
+ """
+ session = DBConn().session().object_session(self)
+ if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0:
+ return True
+ elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0:
+ return True
+ return False
+
def add_file_from_pool(self, poolfile):
"""Copies a file into the pool. Assumes that the PoolFile object is
attached to the same SQLAlchemy session as the Queue object is.
return qf
+ def add_changes_from_policy_queue(self, policyqueue, changes):
+ """
+ Copies a changes from a policy queue together with its poolfiles.
+
+ @type policyqueue: PolicyQueue
+ @param policyqueue: policy queue to copy the changes from
+
+ @type changes: DBChange
+ @param changes: changes to copy to this build queue
+ """
+ for policyqueuefile in changes.files:
+ self.add_file_from_policy_queue(policyqueue, policyqueuefile)
+ for poolfile in changes.poolfiles:
+ self.add_file_from_pool(poolfile)
+
+ def add_file_from_policy_queue(self, policyqueue, policyqueuefile):
+ """
+ Copies a file from a policy queue.
+ Assumes that the policyqueuefile is attached to the same SQLAlchemy
+ session as the Queue object is. The caller is responsible for
+ committing after calling this function.
+
+ @type policyqueue: PolicyQueue
+ @param policyqueue: policy queue to copy the file from
+
+ @type policyqueuefile: ChangePendingFile
+ @param policyqueuefile: file to be added to the build queue
+ """
+ session = DBConn().session().object_session(policyqueuefile)
+
+ # Is the file already there?
+ try:
+ f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one()
+ f.lastused = datetime.now()
+ return f
+ except NoResultFound:
+ pass # continue below
+
+ # We have to add the file.
+ f = BuildQueuePolicyFile()
+ f.build_queue = self
+ f.file = policyqueuefile
+ f.filename = policyqueuefile.filename
+
+ source = os.path.join(policyqueue.path, policyqueuefile.filename)
+ target = f.fullpath
+ try:
+ # Always copy files from policy queues as they might move around.
+ import utils
+ utils.copy(source, target)
+ except OSError:
+ return None
+
+ session.add(f)
+ return f
__all__.append('BuildQueue')
################################################################################
class BuildQueueFile(object):
+ """
+ BuildQueueFile represents a file in a build queue coming from a pool.
+ """
+
def __init__(self, *args, **kwargs):
pass
################################################################################
+class BuildQueuePolicyFile(object):
+ """
+ BuildQueuePolicyFile represents a file in a build queue that comes from a
+ policy queue (and not a pool).
+ """
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ #@property
+ #def filename(self):
+ # return self.file.filename
+
+ @property
+ def fullpath(self):
+ return os.path.join(self.build_queue.path, self.filename)
+
+__all__.append('BuildQueuePolicyFile')
+
+################################################################################
+
class ChangePendingBinary(object):
def __init__(self, *args, **kwargs):
pass
'binary_acl_map',
'build_queue',
'build_queue_files',
+ 'build_queue_policy_files',
'changelogs_text',
'changes',
'component',
properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
poolfile = relation(PoolFile, backref='buildqueueinstances')))
+ mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files,
+ properties = dict(
+ build_queue = relation(BuildQueue, backref='policy_queue_files'),
+ file = relation(ChangePendingFile, lazy='joined')))
+
mapper(DBBinary, self.tbl_binaries,
properties = dict(binary_id = self.tbl_binaries.c.id,
package = self.tbl_binaries.c.package,
u.move_to_queue(queue)
chg.in_queue_id = queue.policy_queue_id
session.add(chg)
+
+ # send to build queues
+ if queue.send_to_build_queues:
+ for suite_name in u.pkg.changes["distribution"].keys():
+ suite = get_suite(suite_name, session)
+ for q in suite.copy_queues:
+ q.add_changes_from_policy_queue(queue, chg)
+
session.commit()
# Check for override disparities
package_to_queue(u, summary, short_summary,
polq, chg, session,
announce=None)
- for suite_name in u.pkg.changes["distribution"].keys():
- suite = get_suite(suite_name, session)
- for q in suite.copy_queues:
- for f in u.pkg.files.keys():
- copyfile(os.path.join(polq.path, f), os.path.join(q.path, f))
#
#################################################################################
#
package_to_queue(u, summary, short_summary,
polq, chg, session,
announce=None)
- for suite_name in u.pkg.changes["distribution"].keys():
- suite = get_suite(suite_name, session)
- for q in suite.copy_queues:
- for f in u.pkg.files.keys():
- copyfile(os.path.join(polq.path, f), os.path.join(q.path, f))
################################################################################