]> git.decadent.org.uk Git - dak.git/blobdiff - dak/contents.py
marge from master
[dak.git] / dak / contents.py
index 073eb5015cfddab316f62d54cd11021760d42575..c435afc574879963c71b83218347fd744f2a3f7f 100755 (executable)
@@ -37,20 +37,16 @@ Create all the contents files
 import sys
 import os
 import logging
-import math
 import gzip
 import threading
-import traceback
 import Queue
 import apt_pkg
-import datetime #just for debugging, can be removed
 from daklib import utils
 from daklib.binary import Binary
 from daklib.config import Config
-from daklib.dbconn import DBConn
-################################################################################
+from daklib.dbconn import *
 
-log=None
+################################################################################
 
 def usage (exit_code=0):
     print """Usage: dak contents [options] command [arguments]
@@ -87,264 +83,115 @@ OPTIONS
 options_prefix = "Contents"
 options_prefix = "%s::Options" % options_prefix
 
-#log = logging.getLogger()
+log = logging.getLogger()
 
 ################################################################################
 
-# get all the arches delivered for a given suite
-# this should probably exist somehere common
-arches_q = """PREPARE arches_q(int) as
-              SELECT s.architecture, a.arch_string
-              FROM suite_architectures s
-              JOIN architecture a ON (s.architecture=a.id)
-                  WHERE suite = $1"""
-
-# find me the .deb for a given binary id
-debs_q = """PREPARE debs_q(int, int) as
-              SELECT b.id, f.filename FROM bin_assoc_by_arch baa
-              JOIN binaries b ON baa.bin=b.id
-              JOIN files f ON b.file=f.id
-              WHERE suite = $1
-                  AND arch = $2"""
-
-# find me all of the contents for a given .deb
-contents_q = """PREPARE contents_q(int,int) as
-                SELECT file, section, package
-                FROM deb_contents
-                WHERE suite = $1
-                AND (arch = $2 or arch=2)"""
-#                ORDER BY file"""
-                
-# find me all of the contents for a given .udeb
-udeb_contents_q = """PREPARE udeb_contents_q(int,int,text, int) as
-                SELECT file, section, package, arch
-                FROM udeb_contents
-                WHERE suite = $1
-                AND otype = $2
-                AND section = $3
-                and arch = $4
-                ORDER BY file"""
-
-
-# clear out all of the temporarily stored content associations
-# this should be run only after p-a has run.  after a p-a
-# run we should have either accepted or rejected every package
-# so there should no longer be anything in the queue
-remove_pending_contents_cruft_q = """DELETE FROM pending_content_associations"""
-
 class EndOfContents(object):
-    pass
-
-class OneAtATime(object):
     """
+    A sentry object for the end of the filename stream
     """
-    def __init__(self):
-        self.next_in_line = None
-        self.next_lock = threading.Condition()
-
-    def enqueue(self, next):
-        self.next_lock.acquire()
-        while self.next_in_line:
-            self.next_lock.wait()
-            
-        assert( not self.next_in_line )
-        self.next_in_line = next
-        self.next_lock.notify()
-        self.next_lock.release()
-
-    def dequeue(self):
-        self.next_lock.acquire()
-        while not self.next_in_line:
-            self.next_lock.wait()
-        result = self.next_in_line
-        self.next_in_line = None
-        self.next_lock.notify()
-        self.next_lock.release()
-        return result
-        
-
-class ContentsWorkThread(threading.Thread):
+    pass
+
+class GzippedContentWriter(object):
     """
+    An object which will write contents out to a Contents-$arch.gz
+    file on a separate thread
     """
-    def __init__(self, upstream, downstream):
-        threading.Thread.__init__(self)
-        self.upstream = upstream
-        self.downstream = downstream
 
-    def run(self):
-        while True:
-            try:
-                contents_file = self.upstream.dequeue()
-                if isinstance(contents_file,EndOfContents):
-                    if self.downstream:
-                        self.downstream.enqueue(contents_file)
-                    break
-
-                s = datetime.datetime.now()
-                print("%s start: %s" % (self,contents_file) )
-                self._run(contents_file)
-                print("%s finished: %s in %d seconds" % (self, contents_file, (datetime.datetime.now()-s).seconds ))
-                if self.downstream:
-                    self.downstream.enqueue(contents_file)
-            except:
-                traceback.print_exc()
-
-class QueryThread(ContentsWorkThread):
-    def __init__(self, upstream, downstream):
-        ContentsWorkThread.__init__(self, upstream, downstream)
-
-    def __str__(self):
-        return "QueryThread"
-    __repr__ = __str__
-
-    def _run(self, contents_file):
-        contents_file.query()
-
-class IngestThread(ContentsWorkThread):
-    def __init__(self, upstream, downstream):
-        ContentsWorkThread.__init__(self, upstream, downstream)
-
-    def __str__(self):
-        return "IngestThread"
-    __repr__ = __str__
-
-    def _run(self, contents_file):
-        contents_file.ingest()
-
-class SortThread(ContentsWorkThread):
-    def __init__(self, upstream, downstream):
-        ContentsWorkThread.__init__(self, upstream, downstream)
-
-    def __str__(self):
-        return "SortThread"
-    __repr__ = __str__
-
-    def _run(self, contents_file):
-        contents_file.sorted_keys = sorted(contents_file.filenames.keys())
-
-class OutputThread(ContentsWorkThread):
-    def __init__(self, upstream, downstream):
-        ContentsWorkThread.__init__(self, upstream, downstream)
-
-    def __str__(self):
-        return "OutputThread"
-    __repr__ = __str__
-
-    def _run(self, contents_file):
-        contents_file.open_file()
-        for fname in contents_file.sorted_keys:
-            contents_file.filehandle.write("%s\t%s\n" % (fname,contents_file.filenames[fname]))
-        contents_file.sorted_keys = None
-        contents_file.filenames.clear()
-    
-class GzipThread(ContentsWorkThread):
-    def __init__(self, upstream, downstream):
-        ContentsWorkThread.__init__(self, upstream, downstream)
-
-    def __str__(self):
-        return "GzipThread"
-    __repr__ = __str__
-
-    def _run(self, contents_file):
-        os.system("gzip -f %s" % contents_file.filename)
-    
-class ContentFile(object):
-    def __init__(self,
-                 filename,
-                 suite_str,
-                 suite_id,
-                 arch_str,
-                 arch_id):
-
-        self.filename = filename
-        self.filenames = {}
-        self.sorted_keys = None
-        self.suite_str = suite_str
-        self.suite_id = suite_id
-        self.arch_str = arch_str
-        self.arch_id = arch_id
-        self.cursor = None
-        self.filehandle = None
-
-    def __str__(self):
-        return self.filename
-    __repr__ = __str__
-
-
-    def cleanup(self):
-        self.filenames = None
-        self.sortedkeys = None
-        self.filehandle.close()
-        self.cursor.close()
-
-    def query(self):
-        self.cursor = DBConn().cursor();
-
-        self.cursor.execute("""SELECT file, section || '/' || package
-        FROM deb_contents
-        WHERE ( arch=2 or arch = %d) AND suite = %d
-        """ % (self.arch_id, self.suite_id))
-
-    def ingest(self):
-        while True:
-            r = self.cursor.fetchone()
-            if not r:
-                break
-            filename, package = r
-            if self.filenames.has_key(filename):
-                self.filenames[filename] += ",%s" % (package)
-            else:
-                self.filenames[filename] = "%s" % (package)
-        self.cursor.close()
+    header = None # a class object holding the header section of contents file
 
-    def open_file(self):
+    def __init__(self, filename):
+        """
+        @type filename: string
+        @param filename: the name of the file to write to
+        """
+        self.queue = Queue.Queue()
+        self.current_file = None
+        self.first_package = True
+        self.output = self.open_file(filename)
+        self.thread = threading.Thread(target=self.write_thread,
+                                       name='Contents writer')
+        self.thread.start()
+
+    def open_file(self, filename):
         """
         opens a gzip stream to the contents file
         """
-#        filepath = Config()["Contents::Root"] + self.filename
-        self.filename = "/home/stew/contents/" + self.filename
-        filedir = os.path.dirname(self.filename)
+        filepath = Config()["Contents::Root"] + filename
+        filedir = os.path.dirname(filepath)
         if not os.path.isdir(filedir):
             os.makedirs(filedir)
-#        self.filehandle = gzip.open(self.filename, "w")
-        self.filehandle = open(self.filename, "w")
-        self._write_header()
+        return gzip.open(filepath, "w")
 
-    def _write_header(self):
-        self._get_header();
-        self.filehandle.write(ContentFile.header)
+    def write(self, filename, section, package):
+        """
+        enqueue content to be written to the file on a separate thread
+        """
+        self.queue.put((filename,section,package))
 
-    header=None
+    def write_thread(self):
+        """
+        the target of a Thread which will do the actual writing
+        """
+        while True:
+            next = self.queue.get()
+            if isinstance(next, EndOfContents):
+                self.output.write('\n')
+                self.output.close()
+                break
+
+            (filename,section,package)=next
+            if next != self.current_file:
+                # this is the first file, so write the header first
+                if not self.current_file:
+                    self.output.write(self._getHeader())
+
+                self.output.write('\n%s\t' % filename)
+                self.first_package = True
+
+            self.current_file=filename
+
+            if not self.first_package:
+                self.output.write(',')
+            else:
+                self.first_package=False
+            self.output.write('%s/%s' % (section,package))
+
+    def finish(self):
+        """
+        enqueue the sentry object so that writers will know to terminate
+        """
+        self.queue.put(EndOfContents())
 
     @classmethod
-    def _get_header(self):
+    def _getHeader(self):
         """
         Internal method to return the header for Contents.gz files
 
         This is boilerplate which explains the contents of the file and how
         it can be used.
         """
-        if not ContentFile.header:
+        if not GzippedContentWriter.header:
             if Config().has_key("Contents::Header"):
                 try:
                     h = open(os.path.join( Config()["Dir::Templates"],
                                            Config()["Contents::Header"] ), "r")
-                    ContentFile.header = h.read()
+                    GzippedContentWriter.header = h.read()
                     h.close()
                 except:
                     log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
                                                                       traceback.format_exc() ))
-                    ContentFile.header = None
+                    GzippedContentWriter.header = None
             else:
-                ContentFile.header = None
+                GzippedContentWriter.header = None
+
+        return GzippedContentWriter.header
 
-        return ContentFile.header
 
 class Contents(object):
     """
     Class capable of generating Contents-$arch.gz files
-
-    Usage GenerateContents().generateContents( ["main","contrib","non-free"] )
     """
 
     def __init__(self):
@@ -353,20 +200,34 @@ class Contents(object):
     def reject(self, message):
         log.error("E: %s" % message)
 
-    # goal column for section column
-    _goal_column = 54
-
     def cruft(self):
         """
         remove files/paths from the DB which are no longer referenced
         by binaries and clean the temporary table
         """
-        cursor = DBConn().cursor();
-        cursor.execute( "BEGIN WORK" )
-        cursor.execute( remove_pending_contents_cruft_q )
-        cursor.execute( remove_filename_cruft_q )
-        cursor.execute( remove_filepath_cruft_q )
-        cursor.execute( "COMMIT" )
+        s = DBConn().session()
+
+        # clear out all of the temporarily stored content associations
+        # this should be run only after p-a has run.  after a p-a
+        # run we should have either accepted or rejected every package
+        # so there should no longer be anything in the queue
+        s.query(PendingContentAssociation).delete()
+
+        # delete any filenames we are storing which have no binary associated
+        # with them
+        cafq = s.query(ContentAssociation.filename_id).distinct()
+        cfq = s.query(ContentFilename)
+        cfq = cfq.filter(~ContentFilename.cafilename_id.in_(cafq))
+        cfq.delete()
+
+        # delete any paths we are storing which have no binary associated with
+        # them
+        capq = s.query(ContentAssociation.filepath_id).distinct()
+        cpq = s.query(ContentFilepath)
+        cpq = cpq.filter(~ContentFilepath.cafilepath_id.in_(capq))
+        cpq.delete()
+
+        s.commit()
 
 
     def bootstrap(self):
@@ -375,177 +236,83 @@ class Contents(object):
         """
         pooldir = Config()[ 'Dir::Pool' ]
 
-        cursor = DBConn().cursor();
-        DBConn().prepare("debs_q",debs_q)
-        DBConn().prepare("arches_q",arches_q)
-
-        suites = self._suites()
-        for suite in [i.lower() for i in suites]:
-            suite_id = DBConn().get_suite_id(suite)
-
-            arch_list = self._arches(cursor, suite_id)
-            arch_all_id = DBConn().get_architecture_id("all")
-            for arch_id in arch_list:
-                cursor.execute( "EXECUTE debs_q(%d, %d)" % ( suite_id, arch_id[0] ) )
-
-                count = 0
-                while True:
-                    deb = cursor.fetchone()
-                    if not deb:
-                        break
-                    count += 1
-                    cursor1 = DBConn().cursor();
-                    cursor1.execute( "SELECT 1 FROM deb_contents WHERE binary_id = %d LIMIT 1" % (deb[0] ) )
-                    old = cursor1.fetchone()
-                    if old:
-                        log.log( "already imported: %s" % (deb[1]) )
+        s = DBConn().session()
+
+        for suite in s.query(Suite).all():
+            for arch in get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=s):
+                q = s.query(BinAssociation).join(Suite)
+                q = q.join(Suite).filter_by(suite_name=suite.suite_name)
+                q = q.join(DBBinary).join(Architecture).filter_by(arch.arch_string)
+                for ba in q:
+                    filename = ba.binary.poolfile.filename
+                    # Check for existing contents
+                    existingq = s.query(ContentAssociations).filter_by(binary_pkg=ba.binary_id).limit(1)
+                    if existingq.count() > 0:
+                        log.debug( "already imported: %s" % (filename))
                     else:
-#                        log.debug( "scanning: %s" % (deb[1]) )
-                        log.log( "scanning: %s" % (deb[1]) )
-                        debfile = os.path.join( pooldir, deb[1] )
-                        if os.path.exists( debfile ):
-                            Binary(debfile, self.reject).scan_package(deb[0], True)
+                        # We don't have existing contents so import them
+                        log.debug( "scanning: %s" % (filename) )
+                        debfile = os.path.join(pooldir, filename)
+                        if os.path.exists(debfile):
+                            Binary(debfile, self.reject).scan_package(ba.binary_id, True)
                         else:
-                            log.error("missing .deb: %s" % deb[1])
+                            log.error("missing .deb: %s" % filename)
 
 
     def generate(self):
-        """
-        Generate contents files for both deb and udeb
-        """
-        DBConn().prepare("arches_q", arches_q)
-        self.deb_generate()
-#        self.udeb_generate()
-
-    def deb_generate(self):
         """
         Generate Contents-$arch.gz files for every available arch in each given suite.
         """
-        cursor = DBConn().cursor()
-        debtype_id = DBConn().get_override_type_id("deb")
-        suites = self._suites()
-
-        inputtoquery = OneAtATime()
-        querytoingest = OneAtATime()
-        ingesttosort = OneAtATime()
-        sorttooutput = OneAtATime()
-        outputtogzip = OneAtATime()
-
-        qt = QueryThread(inputtoquery,querytoingest)
-        it = IngestThread(querytoingest,ingesttosort)
-# these actually make things worse
-#        it2 = IngestThread(querytoingest,ingesttosort)
-#        it3 = IngestThread(querytoingest,ingesttosort)
-#        it4 = IngestThread(querytoingest,ingesttosort)
-        st = SortThread(ingesttosort,sorttooutput)
-        ot = OutputThread(sorttooutput,outputtogzip)
-        gt = GzipThread(outputtogzip, None)
-
-        qt.start()
-        it.start()
-#        it2.start()
-#        it3.start()
-#        it2.start()
-        st.start()
-        ot.start()
-        gt.start()
-        
-        # Get our suites, and the architectures
-        for suite in [i.lower() for i in suites]:
-            suite_id = DBConn().get_suite_id(suite)
-            arch_list = self._arches(cursor, suite_id)
-
-            for (arch_id,arch_str) in arch_list:
-                print( "suite: %s, arch: %s time: %s" %(suite_id, arch_id, datetime.datetime.now().isoformat()) )
-
-#                filename = "dists/%s/Contents-%s.gz" % (suite, arch_str)
-                filename = "dists/%s/Contents-%s" % (suite, arch_str)
-                cf = ContentFile(filename, suite, suite_id, arch_str, arch_id)
-                inputtoquery.enqueue( cf )
-
-        inputtoquery.enqueue( EndOfContents() )
-        gt.join()
-
-    def udeb_generate(self):
-        """
-        Generate Contents-$arch.gz files for every available arch in each given suite.
-        """
-        cursor = DBConn().cursor()
-
-        DBConn().prepare("udeb_contents_q", udeb_contents_q)
-        udebtype_id=DBConn().get_override_type_id("udeb")
-        suites = self._suites()
+        session = DBConn().session()
 
-#        for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s.gz"),
-#                                    ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
+        arch_all_id = get_architecture("all", session).arch_id
 
-        for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s"),
-                                    ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s")]:
+        # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
+        # This is HORRIBLY debian specific :-/
+        for dtype, section, fn_pattern in \
+              [('deb',  None,                        "dists/%s/Contents-%s.gz"),
+               ('udeb', "debian-installer",          "dists/%s/Contents-udeb-%s.gz"),
+               ('udeb', "non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
 
-            section_id = DBConn().get_section_id(section) # all udebs should be here)
-            if section_id != -1:
+            overridetype = get_override_type(dtype, session)
 
-                # Get our suites, and the architectures
-                for suite in [i.lower() for i in suites]:
-                    suite_id = DBConn().get_suite_id(suite)
-                    arch_list = self._arches(cursor, suite_id)
+            # For udebs, we only look in certain sections (see the for loop above)
+            if section is not None:
+                section = get_section(section, session)
 
-                    for arch_id in arch_list:
+            # Get our suites
+            for suite in which_suites():
+                # Which architectures do we need to work on
+                arch_list = get_suite_architectures(suite.suite_name, skipsrc=True, skipall=True, session=session)
 
-                        writer = GzippedContentWriter(fn_pattern % (suite, arch_id[1]))
-                        try:
-
-                            cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (suite_id, udebtype_id, section_id, arch_id))
+                # Set up our file writer dictionary
+                file_writers = {}
+                try:
+                    # One file writer per arch
+                    for arch in arch_list:
+                        file_writers[arch.arch_id] = GzippedContentWriter(fn_pattern % (suite, arch.arch_string))
 
-                            while True:
-                                r = cursor.fetchone()
-                                if not r:
-                                    break
+                    for r in get_suite_contents(suite, overridetype, section, session=session).fetchall():
+                        filename, section, package, arch_id = r
 
-                                filename, section, package, arch = r
+                        if arch_id == arch_all_id:
+                            # It's arch all, so all contents files get it
+                            for writer in file_writers.values():
                                 writer.write(filename, section, package)
-                        finally:
-                            writer.close()
-
-
-
-################################################################################
-
-    def _suites(self):
-        """
-        return a list of suites to operate on
-        """
-        if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
-            suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
-        else:
-            suites = [ 'unstable', 'testing' ]
-#            suites = Config().SubTree("Suite").List()
-
-        return suites
-
-    def _arches(self, cursor, suite):
-        """
-        return a list of archs to operate on
-        """
-        arch_list = []
-        cursor.execute("EXECUTE arches_q(%d)" % (suite))
-        while True:
-            r = cursor.fetchone()
-            if not r:
-                break
-
-            if r[1] != "source" and r[1] != "all":
-                arch_list.append((r[0], r[1]))
+                        else:
+                            if file_writers.has_key(arch_id):
+                                file_writers[arch_id].write(filename, section, package)
 
-        return arch_list
+                finally:
+                    # close all the files
+                    for writer in file_writers.values():
+                        writer.finish()
 
 ################################################################################
 
-
 def main():
     cnf = Config()
-#    log = logging.Logger(cnf, "contents")
-                         
+
     arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")),
                  ('s',"suite", "%s::%s" % (options_prefix,"Suite"),"HasArg"),
                  ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")),
@@ -565,19 +332,31 @@ def main():
     if cnf.has_key("%s::%s" % (options_prefix,"Help")):
         usage()
 
-    level=logging.INFO
-    if cnf.has_key("%s::%s" % (options_prefix,"Quiet")):
-        level=logging.ERROR
+    level=logging.INFO
+    if cnf.has_key("%s::%s" % (options_prefix,"Quiet")):
+        level=logging.ERROR
 
-    elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")):
-        level=logging.DEBUG
+    elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")):
+        level=logging.DEBUG
 
 
-    logging.basicConfig( level=level,
-                         format='%(asctime)s %(levelname)s %(message)s',
-                         stream = sys.stderr )
+    logging.basicConfig( level=level,
+                         format='%(asctime)s %(levelname)s %(message)s',
+                         stream = sys.stderr )
 
     commands[args[0]](Contents())
 
+def which_suites(session):
+    """
+    return a list of suites to operate on
+    """
+    if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
+        suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
+    else:
+        suites = Config().SubTree("Suite").List()
+
+    return [get_suite(s.lower(), session) for s in suites]
+
+
 if __name__ == '__main__':
     main()