From: Mike O'Connor Date: Sat, 31 Oct 2009 10:29:34 +0000 (+0000) Subject: merge from ftp-master X-Git-Url: https://git.decadent.org.uk/gitweb/?p=dak.git;a=commitdiff_plain;h=3b50b545815298b77b8eb68930acb6fde01ea4d4 merge from ftp-master --- 3b50b545815298b77b8eb68930acb6fde01ea4d4 diff --cc dak/contents.py index 4211e98e,58c3aa6b..53d74227 --- a/dak/contents.py +++ b/dak/contents.py @@@ -39,10 -39,9 +39,11 @@@ import o import logging import gzip import threading + import traceback import Queue import apt_pkg +import datetime +import traceback from daklib import utils from daklib.binary import Binary from daklib.config import Config diff --cc dak/dakdb/update17.py index b5bbb3cc,d75bdb5b..beca9425 mode 100644,100755..100755 --- a/dak/dakdb/update17.py +++ b/dak/dakdb/update17.py @@@ -52,9 -52,7 +52,10 @@@ def do_update(self) c.execute("""CREATE INDEX ind_bin_contents_binary ON bin_contents(binary_id);""" ) + c.execute("GRANT ALL ON bin_contents TO ftpmaster;") + c.execute("GRANT SELECT ON bin_contents TO public;") + c.execute("UPDATE config SET value = '17' WHERE name = 'db_revision'") + self.db.commit() except psycopg2.ProgrammingError, msg: diff --cc dak/dakdb/update19.py index f530375c,49a4dbc7..49a4dbc7 mode 100644,100755..100644 --- a/dak/dakdb/update19.py +++ b/dak/dakdb/update19.py diff --cc dak/dakdb/update23.py index 00000000,00000000..9d97172b new file mode 100644 --- /dev/null +++ b/dak/dakdb/update23.py @@@ -1,0 -1,0 +1,239 @@@ ++#!/usr/bin/env python ++# coding=utf8 ++ ++""" ++Adding a trainee field to the process-new notes ++ ++@contact: Debian FTP Master ++@copyright: 2009 Mike O'Connor ++@license: GNU General Public License version 2 or later ++""" ++ ++# This program is free software; you can redistribute it and/or modify ++# it under the terms of the GNU General Public License as published by ++# the Free Software Foundation; either version 2 of the License, or ++# (at your option) any later version. ++ ++# This program is distributed in the hope that it will be useful, ++# but WITHOUT ANY WARRANTY; without even the implied warranty of ++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++# GNU General Public License for more details. ++ ++# You should have received a copy of the GNU General Public License ++# along with this program; if not, write to the Free Software ++# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ++ ++################################################################################ ++ ++ ++################################################################################ ++ ++import psycopg2 ++import time ++from daklib.dak_exceptions import DBUpdateError ++ ++################################################################################ ++ ++def suites(): ++ """ ++ return a list of suites to operate on ++ """ ++ if Config().has_key( "%s::%s" %(options_prefix,"Suite")): ++ suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")]) ++ else: ++ suites = [ 'unstable', 'testing' ] ++# suites = Config().SubTree("Suite").List() ++ ++ return suites ++ ++def arches(cursor, suite): ++ """ ++ return a list of archs to operate on ++ """ ++ arch_list = [] ++ cursor.execute("""SELECT s.architecture, a.arch_string ++ FROM suite_architectures s ++ JOIN architecture a ON (s.architecture=a.id) ++ WHERE suite = :suite""", {'suite' : suite }) ++ ++ while True: ++ r = cursor.fetchone() ++ if not r: ++ break ++ ++ if r[1] != "source" and r[1] != "all": ++ arch_list.append((r[0], r[1])) ++ ++ return arch_list ++ ++def do_update(self): ++ """ ++ Adding contents table as first step to maybe, finally getting rid ++ of apt-ftparchive ++ """ ++ ++ print __doc__ ++ ++ try: ++ c = self.db.cursor() ++ ++ c.execute("""CREATE TABLE pending_bin_contents ( ++ id serial NOT NULL, ++ package text NOT NULL, ++ version debversion NOT NULL, ++ arch int NOT NULL, ++ filename text NOT NULL, ++ type int NOT NULL, ++ PRIMARY KEY(id))""" ); ++ ++ c.execute("""CREATE TABLE deb_contents ( ++ filename text, ++ section text, ++ package text, ++ binary_id integer, ++ arch integer, ++ suite integer, ++ component text)""" ) ++ ++ c.execute("""CREATE TABLE udeb_contents ( ++ filename text, ++ section text, ++ package text, ++ binary_id integer, ++ suite integer, ++ arch integer, ++ component text )""" ) ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_arch_fkey ++ FOREIGN KEY (arch) REFERENCES architecture(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_arch_fkey ++ FOREIGN KEY (arch) REFERENCES architecture(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_pkey ++ PRIMARY KEY (filename,package,arch,suite);""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_pkey ++ PRIMARY KEY (filename,package,arch,suite);""") ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_suite_fkey ++ FOREIGN KEY (suite) REFERENCES suite(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_suite_fkey ++ FOREIGN KEY (suite) REFERENCES suite(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY deb_contents ++ ADD CONSTRAINT deb_contents_binary_fkey ++ FOREIGN KEY (binary_id) REFERENCES binaries(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""ALTER TABLE ONLY udeb_contents ++ ADD CONSTRAINT udeb_contents_binary_fkey ++ FOREIGN KEY (binary_id) REFERENCES binaries(id) ++ ON DELETE CASCADE;""") ++ ++ c.execute("""CREATE INDEX ind_deb_contents_binary ON deb_contents(binary_id);""" ) ++ ++ ++ suites = self.suites() ++ ++ for suite in [i.lower() for i in suites]: ++ suite_id = DBConn().get_suite_id(suite) ++ arch_list = arches(c, suite_id) ++ arch_list = arches(c, suite_id) ++ ++ for (arch_id,arch_str) in arch_list: ++ c.execute( "CREATE INDEX ind_deb_contents_%s_%s ON deb_contents (arch,suite) WHERE (arch=2 OR arch=%d) AND suite=$d"%(arch_str,suite,arch_id,suite_id) ) ++ ++ for section, sname in [("debian-installer","main"), ++ ("non-free/debian-installer", "nonfree")]: ++ c.execute( "CREATE INDEX ind_udeb_contents_%s_%s ON udeb_contents (section,suite) WHERE section=%s AND suite=$d"%(sname,suite,section,suite_id) ) ++ ++ ++ c.execute( """CREATE OR REPLACE FUNCTION update_contents_for_bin_a() RETURNS trigger AS $$ ++ event = TD["event"] ++ if event == "DELETE" or event == "UPDATE": ++ ++ plpy.execute(plpy.prepare("DELETE FROM deb_contents WHERE binary_id=$1 and suite=$2", ++ ["int","int"]), ++ [TD["old"]["bin"], TD["old"]["suite"]]) ++ ++ if event == "INSERT" or event == "UPDATE": ++ ++ content_data = plpy.execute(plpy.prepare( ++ """SELECT s.section, b.package, b.architecture, c.name, ot.type ++ FROM override o ++ JOIN override_type ot on o.type=ot.id ++ JOIN binaries b on b.package=o.package ++ JOIN files f on b.file=f.id ++ JOIN location l on l.id=f.location ++ JOIN section s on s.id=o.section ++ JOIN component c on c.id=l.component ++ WHERE b.id=$1 ++ AND o.suite=$2 ++ """, ++ ["int", "int"]), ++ [TD["new"]["bin"], TD["new"]["suite"]])[0] ++ ++ component_str = ""; ++ if not content_data["name"] === "main": ++ component_str=content_data["name"]+"/" ++ ++ filenames = plpy.execute(plpy.prepare( ++ "SELECT bc.file FROM bin_contents bc where bc.binary_id=$1", ++ ["int"]), ++ [TD["new"]["bin"]]) ++ ++ for filename in filenames: ++ plpy.execute(plpy.prepare( ++ """INSERT INTO deb_contents ++ (file,section,package,binary_id,arch,suite,component) ++ VALUES($1,$2,$3,$4,$5,$6,$7)""", ++ ["text","text","text","int","int","int","text"]), ++ [filename["filename"], ++ content_data["section"], ++ content_data["package"], ++ TD["new"]["bin"], ++ content_data["architecture"], ++ TD["new"]["suite"], ++ component_str]) ++$$ LANGUAGE plpythonu VOLATILE SECURITY DEFINER; ++""") ++ ++ ++ c.execute( """CREATE OR REPLACE FUNCTION update_contents_for_override() RETURNS trigger AS $$ ++ event = TD["event"] ++ if event == "UPDATE": ++ ++ otype = plpy.execute(plpy.prepare("SELECT type from override_type where id=$1",["int"]),TD["new"]["type"] )[0]; ++ if otype["type"].endswith("deb"): ++ table_name = "%s_contents" % otype["type"] ++ plpy.execute(plpy.prepare("UPDATE %s set sections=$1" % table_name ++ ["text"]), ++ [TD["new"]["section"]]) ++ ++$$ LANGUAGE plpythonu VOLATILE SECURITY DEFINER; ++""") ++ c.execute( """CREATE TRIGGER bin_associations_contents_trigger ++ AFTER INSERT OR UPDATE OR DELETE ON bin_associations ++ FOR EACH ROW EXECUTE PROCEDURE update_contents_for_bin_a();""") ++ c.execute("""CREATE TRIGGER override_contents_trigger ++ AFTER UPDATE ON override ++ FOR EACH ROW EXECUTE PROCEDURE update_contents_for_override();""") ++ ++ self.db.commit() ++ ++ except psycopg2.ProgrammingError, msg: ++ self.db.rollback() ++ raise DBUpdateError, "Unable to apply process-new update 14, rollback issued. Error message : %s" % (str(msg)) ++ diff --cc daklib/binary.py index 8a0cf092,c6ee96f8..a70aadb9 --- a/daklib/binary.py +++ b/daklib/binary.py @@@ -257,10 -256,11 +259,12 @@@ class Binary(object) os.chdir(cwd) + return result + __all__.append('Binary') -def copy_temporary_contents(package, version, archname, deb, reject, session=None): + +def copy_temporary_contents(binary, bin_association, reject, session=None): """ copy the previously stored contents from the temp table to the permanant one diff --cc daklib/dbconn.py index 18f427d4,9e5afec7..921f1daa --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@@ -2094,16 -2252,18 +2274,20 @@@ class DBConn(Singleton) def __setuptables(self): self.tbl_architecture = Table('architecture', self.db_meta, autoload=True) self.tbl_archive = Table('archive', self.db_meta, autoload=True) + self.tbl_bin_contents = Table('bin_contents', self.db_meta, autoload=True) self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True) self.tbl_binaries = Table('binaries', self.db_meta, autoload=True) + self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True) + self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True) self.tbl_component = Table('component', self.db_meta, autoload=True) self.tbl_config = Table('config', self.db_meta, autoload=True) self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True) self.tbl_content_file_names = Table('content_file_names', self.db_meta, autoload=True) self.tbl_content_file_paths = Table('content_file_paths', self.db_meta, autoload=True) + self.tbl_changes_pending_files = Table('changes_pending_files', self.db_meta, autoload=True) + self.tbl_changes_pool_files = Table('changes_pool_files', self.db_meta, autoload=True) self.tbl_dsc_files = Table('dsc_files', self.db_meta, autoload=True) + self.tbl_deb_contents = Table('deb_contents', self.db_meta, autoload=True) self.tbl_files = Table('files', self.db_meta, autoload=True) self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True) self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True) @@@ -2112,20 -2274,22 +2298,23 @@@ self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True) self.tbl_override = Table('override', self.db_meta, autoload=True) self.tbl_override_type = Table('override_type', self.db_meta, autoload=True) - self.tbl_pending_content_associations = Table('pending_content_associations', self.db_meta, autoload=True) + self.tbl_pending_bin_contents = Table('pending_bin_contents', self.db_meta, autoload=True) self.tbl_priority = Table('priority', self.db_meta, autoload=True) self.tbl_queue = Table('queue', self.db_meta, autoload=True) - self.tbl_queue_build = Table('queue_build', self.db_meta, autoload=True) + self.tbl_queue_files = Table('queue_files', self.db_meta, autoload=True) self.tbl_section = Table('section', self.db_meta, autoload=True) self.tbl_source = Table('source', self.db_meta, autoload=True) + self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True) self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True) self.tbl_src_format = Table('src_format', self.db_meta, autoload=True) self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True) self.tbl_suite = Table('suite', self.db_meta, autoload=True) self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True) self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True) + self.tbl_suite_queue_copy = Table('suite_queue_copy', self.db_meta, autoload=True) + self.tbl_udeb_contents = Table('udeb_contents', self.db_meta, autoload=True) self.tbl_uid = Table('uid', self.db_meta, autoload=True) + self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True) def __setupmappers(self): mapper(Architecture, self.tbl_architecture, @@@ -2253,14 -2418,12 +2467,13 @@@ mapper(Queue, self.tbl_queue, properties = dict(queue_id = self.tbl_queue.c.id)) - mapper(QueueBuild, self.tbl_queue_build, - properties = dict(suite_id = self.tbl_queue_build.c.suite, - queue_id = self.tbl_queue_build.c.queue, - queue = relation(Queue, backref='queuebuild'))) + mapper(QueueFile, self.tbl_queue_files, + properties = dict(queue = relation(Queue, backref='queuefiles'), + poolfile = relation(PoolFile, backref='queueinstances'))) mapper(Section, self.tbl_section, - properties = dict(section_id = self.tbl_section.c.id)) + properties = dict(section_id = self.tbl_section.c.id, + section=self.tbl_section.c.section)) mapper(DBSource, self.tbl_source, properties = dict(source_id = self.tbl_source.c.id,