]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/dbconn.py
Merge remote branch 'origin/master' into contents
[dak.git] / daklib / dbconn.py
index fd8ab7caa9f467c19c70026184d49c5f7dfad892..d4a1d99e5eb7cd9aa8d384988143965a9e090711 100644 (file)
@@ -37,7 +37,9 @@ import os
 import re
 import psycopg2
 import traceback
-from datetime import datetime
+from datetime import datetime, timedelta
+from errno import ENOENT
+from tempfile import mkstemp, mkdtemp
 
 from inspect import getargspec
 
@@ -50,6 +52,8 @@ from sqlalchemy import types as sqltypes
 from sqlalchemy.exc import *
 from sqlalchemy.orm.exc import NoResultFound
 
+# Only import Config until Queue stuff is changed to store its config
+# in the database
 from config import Config
 from textutils import fix_maintainer
 
@@ -429,12 +433,188 @@ __all__.append('BinaryACLMap')
 
 ################################################################################
 
+MINIMAL_APT_CONF="""
+Dir
+{
+   ArchiveDir "%(archivepath)s";
+   OverrideDir "/srv/ftp.debian.org/scripts/override/";
+   CacheDir "/srv/ftp.debian.org/database/";
+};
+
+Default
+{
+   Packages::Compress ". bzip2 gzip";
+   Sources::Compress ". bzip2 gzip";
+   DeLinkLimit 0;
+   FileMode 0664;
+}
+
+bindirectory "incoming"
+{
+   Packages "Packages";
+   Contents " ";
+
+   BinOverride "override.sid.all3";
+   BinCacheDB "packages-accepted.db";
+
+   FileList "%(filelist)s";
+
+   PathPrefix "";
+   Packages::Extensions ".deb .udeb";
+};
+
+bindirectory "incoming/"
+{
+   Sources "Sources";
+   BinOverride "override.sid.all3";
+   SrcOverride "override.sid.all3.src";
+   FileList "%(filelist)s";
+};
+"""
+
 class BuildQueue(object):
     def __init__(self, *args, **kwargs):
         pass
 
     def __repr__(self):
-        return '<Queue %s>' % self.queue_name
+        return '<BuildQueue %s>' % self.queue_name
+
+    def write_metadata(self, starttime, force=False):
+        # Do we write out metafiles?
+        if not (force or self.generate_metadata):
+            return
+
+        session = DBConn().session().object_session(self)
+
+        fl_fd = fl_name = ac_fd = ac_name = None
+        tempdir = None
+        arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
+        startdir = os.getcwd()
+
+        try:
+            # Grab files we want to include
+            newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
+            # Write file list with newer files
+            (fl_fd, fl_name) = mkstemp()
+            for n in newer:
+                os.write(fl_fd, '%s\n' % n.fullpath)
+            os.close(fl_fd)
+
+            # Write minimal apt.conf
+            # TODO: Remove hardcoding from template
+            (ac_fd, ac_name) = mkstemp()
+            os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
+                                                'filelist': fl_name})
+            os.close(ac_fd)
+
+            # Run apt-ftparchive generate
+            os.chdir(os.path.dirname(ac_name))
+            os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
+
+            # Run apt-ftparchive release
+            # TODO: Eww - fix this
+            bname = os.path.basename(self.path)
+            os.chdir(self.path)
+            os.chdir('..')
+
+            # We have to remove the Release file otherwise it'll be included in the
+            # new one
+            try:
+                os.unlink(os.path.join(bname, 'Release'))
+            except OSError:
+                pass
+
+            os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
+
+            # Sign if necessary
+            if self.signingkey:
+                cnf = Config()
+                keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
+                if cnf.has_key("Dinstall::SigningPubKeyring"):
+                    keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
+
+                os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
+
+            # Move the files if we got this far
+            os.rename('Release', os.path.join(bname, 'Release'))
+            if self.signingkey:
+                os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
+
+        # Clean up any left behind files
+        finally:
+            os.chdir(startdir)
+            if fl_fd:
+                try:
+                    os.close(fl_fd)
+                except OSError:
+                    pass
+
+            if fl_name:
+                try:
+                    os.unlink(fl_name)
+                except OSError:
+                    pass
+
+            if ac_fd:
+                try:
+                    os.close(ac_fd)
+                except OSError:
+                    pass
+
+            if ac_name:
+                try:
+                    os.unlink(ac_name)
+                except OSError:
+                    pass
+
+    def clean_and_update(self, starttime, Logger, dryrun=False):
+        """WARNING: This routine commits for you"""
+        session = DBConn().session().object_session(self)
+
+        if self.generate_metadata and not dryrun:
+            self.write_metadata(starttime)
+
+        # Grab files older than our execution time
+        older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
+
+        for o in older:
+            killdb = False
+            try:
+                if dryrun:
+                    Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
+                else:
+                    Logger.log(["I: Removing %s from the queue" % o.fullpath])
+                    os.unlink(o.fullpath)
+                    killdb = True
+            except OSError, e:
+                # If it wasn't there, don't worry
+                if e.errno == ENOENT:
+                    killdb = True
+                else:
+                    # TODO: Replace with proper logging call
+                    Logger.log(["E: Could not remove %s" % o.fullpath])
+
+            if killdb:
+                session.delete(o)
+
+        session.commit()
+
+        for f in os.listdir(self.path):
+            if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'):
+                continue
+
+            try:
+                r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
+            except NoResultFound:
+                fp = os.path.join(self.path, f)
+                if dryrun:
+                    Logger.log(["I: Would remove unused link %s" % fp])
+                else:
+                    Logger.log(["I: Removing unused link %s" % fp])
+                    try:
+                        os.unlink(fp)
+                    except OSError:
+                        Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
 
     def add_file_from_pool(self, poolfile):
         """Copies a file into the pool.  Assumes that the PoolFile object is
@@ -450,7 +630,7 @@ class BuildQueue(object):
                    # In this case, update the BuildQueueFile entry so we
                    # don't remove it too early
                    f.lastused = datetime.now()
-                   DBConn().session().object_session(pf).add(f)
+                   DBConn().session().object_session(poolfile).add(f)
                    return f
 
         # Prepare BuildQueueFile object
@@ -516,7 +696,12 @@ class BuildQueueFile(object):
         pass
 
     def __repr__(self):
-        return '<BuildQueueFile %s (%s)>' % (self.filename, self.queue_id)
+        return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
+
+    @property
+    def fullpath(self):
+        return os.path.join(self.buildqueue.path, self.filename)
+
 
 __all__.append('BuildQueueFile')
 
@@ -778,11 +963,16 @@ def insert_content_paths(binary_id, fullpaths, session=None):
     try:
         # Insert paths
         pathcache = {}
-        for fullpath in fullpaths:
-            if fullpath.startswith( './' ):
-                fullpath = fullpath[2:]
 
-            session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id}  )
+        def generate_path_dicts():
+            for fullpath in fullpaths:
+                if fullpath.startswith( './' ):
+                    fullpath = fullpath[2:]
+
+                yield {'fulename':fullpath, 'id': binary_id }
+
+        session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
+                         generate_path_dicts() )
 
         session.commit()
         if privatetrans:
@@ -1561,16 +1751,38 @@ __all__.append('get_override_type')
 
 ################################################################################
 
-class PendingContentAssociation(object):
+class DebContents(object):
     def __init__(self, *args, **kwargs):
         pass
 
     def __repr__(self):
-        return '<PendingContentAssociation %s>' % self.pca_id
+        return '<DebConetnts %s: %s>' % (self.package.package,self.file)
+
+__all__.append('DebContents')
 
-__all__.append('PendingContentAssociation')
 
-def insert_pending_content_paths(package, fullpaths, session=None):
+class UdebContents(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
+
+__all__.append('UdebContents')
+
+class PendingBinContents(object):
+    def __init__(self, *args, **kwargs):
+        pass
+
+    def __repr__(self):
+        return '<PendingBinContents %s>' % self.contents_id
+
+__all__.append('PendingBinContents')
+
+def insert_pending_content_paths(package,
+                                 is_udeb,
+                                 fullpaths,
+                                 session=None):
     """
     Make sure given paths are temporarily associated with given
     package
@@ -1599,32 +1811,27 @@ def insert_pending_content_paths(package, fullpaths, session=None):
         arch_id = arch.arch_id
 
         # Remove any already existing recorded files for this package
-        q = session.query(PendingContentAssociation)
+        q = session.query(PendingBinContents)
         q = q.filter_by(package=package['Package'])
         q = q.filter_by(version=package['Version'])
         q = q.filter_by(architecture=arch_id)
         q.delete()
 
-        # Insert paths
-        pathcache = {}
         for fullpath in fullpaths:
-            (path, filename) = os.path.split(fullpath)
-
-            if path.startswith( "./" ):
-                path = path[2:]
-
-            filepath_id = get_or_set_contents_path_id(path, session)
-            filename_id = get_or_set_contents_file_id(filename, session)
 
-            pathcache[fullpath] = (filepath_id, filename_id)
+            if fullpath.startswith( "./" ):
+                fullpath = fullpath[2:]
 
-        for fullpath, dat in pathcache.items():
-            pca = PendingContentAssociation()
+            pca = PendingBinContents()
             pca.package = package['Package']
             pca.version = package['Version']
-            pca.filepath_id = dat[0]
-            pca.filename_id = dat[1]
+            pca.file = fullpath
             pca.architecture = arch_id
+
+            if isudeb:
+                pca.type = 8 # gross
+            else:
+                pca.type = 7 # also gross
             session.add(pca)
 
         # Only commit if we set up the session ourself
@@ -2062,6 +2269,11 @@ def add_dsc_to_db(u, filename, session=None):
                 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
                 pfs.append(poolfile)
                 files_id = poolfile.file_id
+        else:
+            poolfile = get_poolfile_by_id(files_id, session)
+            if poolfile is None:
+                utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
+            pfs.append(poolfile)
 
         df.poolfile_id = files_id
         session.add(df)
@@ -2516,51 +2728,57 @@ class DBConn(object):
             self.__createconn()
 
     def __setuptables(self):
-        self.tbl_architecture = Table('architecture', self.db_meta, autoload=True)
-        self.tbl_archive = Table('archive', self.db_meta, autoload=True)
-        self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True)
-        self.tbl_binaries = Table('binaries', self.db_meta, autoload=True)
-        self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True)
-        self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True)
-        self.tbl_build_queue = Table('build_queue', self.db_meta, autoload=True)
-        self.tbl_build_queue_files = Table('build_queue_files', self.db_meta, autoload=True)
-        self.tbl_component = Table('component', self.db_meta, autoload=True)
-        self.tbl_config = Table('config', self.db_meta, autoload=True)
-        self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True)
-        self.tbl_content_file_names = Table('content_file_names', self.db_meta, autoload=True)
-        self.tbl_content_file_paths = Table('content_file_paths', self.db_meta, autoload=True)
-        self.tbl_changes_pending_binary = Table('changes_pending_binaries', self.db_meta, autoload=True)
-        self.tbl_changes_pending_files = Table('changes_pending_files', self.db_meta, autoload=True)
-        self.tbl_changes_pending_files_map = Table('changes_pending_files_map', self.db_meta, autoload=True)
-        self.tbl_changes_pending_source = Table('changes_pending_source', self.db_meta, autoload=True)
-        self.tbl_changes_pending_source_files = Table('changes_pending_source_files', self.db_meta, autoload=True)
-        self.tbl_changes_pool_files = Table('changes_pool_files', self.db_meta, autoload=True)
-        self.tbl_dsc_files = Table('dsc_files', self.db_meta, autoload=True)
-        self.tbl_files = Table('files', self.db_meta, autoload=True)
-        self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
-        self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
-        self.tbl_changes = Table('changes', self.db_meta, autoload=True)
-        self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True)
-        self.tbl_location = Table('location', self.db_meta, autoload=True)
-        self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
-        self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True)
-        self.tbl_override = Table('override', self.db_meta, autoload=True)
-        self.tbl_override_type = Table('override_type', self.db_meta, autoload=True)
-        self.tbl_pending_content_associations = Table('pending_content_associations', self.db_meta, autoload=True)
-        self.tbl_policy_queue = Table('policy_queue', self.db_meta, autoload=True)
-        self.tbl_priority = Table('priority', self.db_meta, autoload=True)
-        self.tbl_section = Table('section', self.db_meta, autoload=True)
-        self.tbl_source = Table('source', self.db_meta, autoload=True)
-        self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True)
-        self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
-        self.tbl_src_format = Table('src_format', self.db_meta, autoload=True)
-        self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
-        self.tbl_suite = Table('suite', self.db_meta, autoload=True)
-        self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
-        self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True)
-        self.tbl_suite_build_queue_copy = Table('suite_build_queue_copy', self.db_meta, autoload=True)
-        self.tbl_uid = Table('uid', self.db_meta, autoload=True)
-        self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True)
+        tables = (
+            'architecture',
+            'archive',
+            'bin_associations',
+            'binaries',
+            'binary_acl',
+            'binary_acl_map',
+            'bin_contents'
+            'build_queue',
+            'build_queue_files',
+            'component',
+            'config',
+            'changes_pending_binaries',
+            'changes_pending_files',
+            'changes_pending_files_map',
+            'changes_pending_source',
+            'changes_pending_source_files',
+            'changes_pool_files',
+            'deb_contents',
+            'dsc_files',
+            'files',
+            'fingerprint',
+            'keyrings',
+            'changes',
+            'keyring_acl_map',
+            'location',
+            'maintainer',
+            'new_comments',
+            'override',
+            'override_type',
+            'pending_bin_contents',
+            'policy_queue',
+            'priority',
+            'section',
+            'source',
+            'source_acl',
+            'src_associations',
+            'src_format',
+            'src_uploaders',
+            'suite',
+            'suite_architectures',
+            'suite_src_formats',
+            'suite_build_queue_copy',
+            'udeb_contents',
+            'uid',
+            'upload_blocks',
+        )
+
+        for table_name in tables:
+            table = Table(table_name, self.db_meta, autoload=True)
+            setattr(self, 'tbl_%s' % table_name, table)
 
     def __setupmappers(self):
         mapper(Architecture, self.tbl_architecture,
@@ -2577,12 +2795,29 @@ class DBConn(object):
                                  binary_id = self.tbl_bin_associations.c.bin,
                                  binary = relation(DBBinary)))
 
-        mapper(BuildQueue, self.tbl_build_queue,
-               properties = dict(queue_id = self.tbl_build_queue.c.id))
-
-        mapper(BuildQueueFile, self.tbl_build_queue_files,
-               properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
-                                 poolfile = relation(PoolFile, backref='buildqueueinstances')))
+        mapper(PendingBinContents, self.tbl_pending_bin_contents,
+               properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
+                                 filename = self.tbl_pending_bin_contents.c.filename,
+                                 package = self.tbl_pending_bin_contents.c.package,
+                                 version = self.tbl_pending_bin_contents.c.version,
+                                 arch = self.tbl_pending_bin_contents.c.arch,
+                                 otype = self.tbl_pending_bin_contents.c.type))
+
+        mapper(DebContents, self.tbl_deb_contents,
+               properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
+                                 package=self.tbl_deb_contents.c.package,
+                                 component=self.tbl_deb_contents.c.component,
+                                 arch=self.tbl_deb_contents.c.arch,
+                                 section=self.tbl_deb_contents.c.section,
+                                 filename=self.tbl_deb_contents.c.filename))
+
+        mapper(UdebContents, self.tbl_udeb_contents,
+               properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
+                                 package=self.tbl_udeb_contents.c.package,
+                                 component=self.tbl_udeb_contents.c.component,
+                                 arch=self.tbl_udeb_contents.c.arch,
+                                 section=self.tbl_udeb_contents.c.section,
+                                 filename=self.tbl_udeb_contents.c.filename))
 
         mapper(DBBinary, self.tbl_binaries,
                properties = dict(binary_id = self.tbl_binaries.c.id,
@@ -2657,8 +2892,8 @@ class DBConn(object):
                                                      primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
                                  approved_for_id = self.tbl_changes.c.approved_for))
 
-        mapper(ChangePendingBinary, self.tbl_changes_pending_binary,
-               properties = dict(change_pending_binary_id = self.tbl_changes_pending_binary.c.id))
+        mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
+               properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
 
         mapper(ChangePendingFile, self.tbl_changes_pending_files,
                properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id))
@@ -2674,6 +2909,11 @@ class DBConn(object):
                                  source_files = relation(ChangePendingFile,
                                                          secondary=self.tbl_changes_pending_source_files,
                                                          backref="pending_sources")))
+                                 files = relation(KnownChangePendingFile, backref="changesfile")))
+
+        mapper(KnownChangePendingFile, self.tbl_changes_pending_files,
+               properties = dict(known_change_pending_file_id = self.tbl_changes_pending_files.c.id))
+
         mapper(KeyringACLMap, self.tbl_keyring_acl_map,
                properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
                                  keyring = relation(Keyring, backref="keyring_acl_map"),
@@ -2696,6 +2936,7 @@ class DBConn(object):
         mapper(Override, self.tbl_override,
                properties = dict(suite_id = self.tbl_override.c.suite,
                                  suite = relation(Suite),
+                                 package = self.tbl_override.c.package,
                                  component_id = self.tbl_override.c.component,
                                  component = relation(Component),
                                  priority_id = self.tbl_override.c.priority,
@@ -2716,7 +2957,8 @@ class DBConn(object):
                properties = dict(priority_id = self.tbl_priority.c.id))
 
         mapper(Section, self.tbl_section,
-               properties = dict(section_id = self.tbl_section.c.id))
+               properties = dict(section_id = self.tbl_section.c.id,
+                                 section=self.tbl_section.c.section))
 
         mapper(DBSource, self.tbl_source,
                properties = dict(source_id = self.tbl_source.c.id,