]> git.decadent.org.uk Git - dak.git/commitdiff
merge from master with sqla
authorMike O'Connor <stew@dhcp-101.dfw1.kickstart.lan>
Wed, 28 Oct 2009 16:16:58 +0000 (16:16 +0000)
committerMike O'Connor <stew@dhcp-101.dfw1.kickstart.lan>
Wed, 28 Oct 2009 16:16:58 +0000 (16:16 +0000)
dak/contents.py
dak/dakdb/update17.py [new file with mode: 0644]
dak/update_db.py
daklib/dbconn.py
daklib/utils.py

index 1b3f3e1f0e5dac8100cbf92d952aa19e75bf57f9..073eb5015cfddab316f62d54cd11021760d42575 100755 (executable)
@@ -40,14 +40,18 @@ import logging
 import math
 import gzip
 import threading
 import math
 import gzip
 import threading
+import traceback
 import Queue
 import apt_pkg
 import Queue
 import apt_pkg
+import datetime #just for debugging, can be removed
 from daklib import utils
 from daklib.binary import Binary
 from daklib.config import Config
 from daklib.dbconn import DBConn
 ################################################################################
 
 from daklib import utils
 from daklib.binary import Binary
 from daklib.config import Config
 from daklib.dbconn import DBConn
 ################################################################################
 
+log=None
+
 def usage (exit_code=0):
     print """Usage: dak contents [options] command [arguments]
 
 def usage (exit_code=0):
     print """Usage: dak contents [options] command [arguments]
 
@@ -83,7 +87,7 @@ OPTIONS
 options_prefix = "Contents"
 options_prefix = "%s::Options" % options_prefix
 
 options_prefix = "Contents"
 options_prefix = "%s::Options" % options_prefix
 
-log = logging.getLogger()
+#log = logging.getLogger()
 
 ################################################################################
 
 
 ################################################################################
 
@@ -103,54 +107,23 @@ debs_q = """PREPARE debs_q(int, int) as
               WHERE suite = $1
                   AND arch = $2"""
 
               WHERE suite = $1
                   AND arch = $2"""
 
-# ask if we already have contents associated with this binary
-olddeb_q = """PREPARE olddeb_q(int) as
-              SELECT 1 FROM content_associations
-              WHERE binary_pkg = $1
-              LIMIT 1"""
-
 # find me all of the contents for a given .deb
 contents_q = """PREPARE contents_q(int,int) as
 # find me all of the contents for a given .deb
 contents_q = """PREPARE contents_q(int,int) as
-                SELECT (p.path||'/'||n.file) AS fn,
-                        s.section,
-                        b.package,
-                        b.architecture
-               FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
-               JOIN content_file_names n ON (c.filename=n.id)
-               JOIN binaries b ON (b.id=c.binary_pkg)
-               JOIN override o ON (o.package=b.package)
-               JOIN section s ON (s.id=o.section)
-               WHERE o.suite = $1 AND o.type = $2
-               AND b.type='deb'
-               ORDER BY fn"""
-
+                SELECT file, section, package
+                FROM deb_contents
+                WHERE suite = $1
+                AND (arch = $2 or arch=2)"""
+#                ORDER BY file"""
+                
 # find me all of the contents for a given .udeb
 # find me all of the contents for a given .udeb
-udeb_contents_q = """PREPARE udeb_contents_q(int,int,int) as
-              SELECT (p.path||'/'||n.file) AS fn,
-                        s.section,
-                        b.package,
-                        b.architecture
-               FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
-               JOIN content_file_names n ON (c.filename=n.id)
-               JOIN binaries b ON (b.id=c.binary_pkg)
-               JOIN override o ON (o.package=b.package)
-               JOIN section s ON (s.id=o.section)
-               WHERE o.suite = $1 AND o.type = $2
-               AND s.id = $3
-               AND b.type='udeb'
-               ORDER BY fn"""
-
-#               FROM content_file_paths p join content_associations c ON (c.filepath=p.id)
-#               JOIN content_file_names n ON (c.filename=n.id)
-#               JOIN binaries b ON (b.id=c.binary_pkg)
-#               JOIN override o ON (o.package=b.package)
-#               JOIN section s ON (s.id=o.section)
-#               WHERE o.suite = $1 AND o.type = $2
-#               AND s.id = $3
-#               AND b.id in (SELECT ba.bin from bin_associations ba join binaries b on b.id=ba.bin where (b.architecture=$3 or b.architecture=$4)and ba.suite=$1 and b.type='udeb')
-#               GROUP BY fn
-#               ORDER BY fn;"""
-
+udeb_contents_q = """PREPARE udeb_contents_q(int,int,text, int) as
+                SELECT file, section, package, arch
+                FROM udeb_contents
+                WHERE suite = $1
+                AND otype = $2
+                AND section = $3
+                and arch = $4
+                ORDER BY file"""
 
 
 # clear out all of the temporarily stored content associations
 
 
 # clear out all of the temporarily stored content associations
@@ -159,121 +132,213 @@ udeb_contents_q = """PREPARE udeb_contents_q(int,int,int) as
 # so there should no longer be anything in the queue
 remove_pending_contents_cruft_q = """DELETE FROM pending_content_associations"""
 
 # so there should no longer be anything in the queue
 remove_pending_contents_cruft_q = """DELETE FROM pending_content_associations"""
 
-# delete any filenames we are storing which have no binary associated with them
-remove_filename_cruft_q = """DELETE FROM content_file_names
-                             WHERE id IN (SELECT cfn.id FROM content_file_names cfn
-                                          LEFT JOIN content_associations ca
-                                            ON ca.filename=cfn.id
-                                          WHERE ca.id IS NULL)"""
-
-# delete any paths we are storing which have no binary associated with them
-remove_filepath_cruft_q = """DELETE FROM content_file_paths
-                             WHERE id IN (SELECT cfn.id FROM content_file_paths cfn
-                                          LEFT JOIN content_associations ca
-                                             ON ca.filepath=cfn.id
-                                          WHERE ca.id IS NULL)"""
-
 class EndOfContents(object):
 class EndOfContents(object):
-    """
-    A sentry object for the end of the filename stream
-    """
     pass
 
     pass
 
-class GzippedContentWriter(object):
+class OneAtATime(object):
     """
     """
-    An object which will write contents out to a Contents-$arch.gz
-    file on a separate thread
     """
     """
+    def __init__(self):
+        self.next_in_line = None
+        self.next_lock = threading.Condition()
+
+    def enqueue(self, next):
+        self.next_lock.acquire()
+        while self.next_in_line:
+            self.next_lock.wait()
+            
+        assert( not self.next_in_line )
+        self.next_in_line = next
+        self.next_lock.notify()
+        self.next_lock.release()
+
+    def dequeue(self):
+        self.next_lock.acquire()
+        while not self.next_in_line:
+            self.next_lock.wait()
+        result = self.next_in_line
+        self.next_in_line = None
+        self.next_lock.notify()
+        self.next_lock.release()
+        return result
+        
+
+class ContentsWorkThread(threading.Thread):
+    """
+    """
+    def __init__(self, upstream, downstream):
+        threading.Thread.__init__(self)
+        self.upstream = upstream
+        self.downstream = downstream
 
 
-    header = None # a class object holding the header section of contents file
+    def run(self):
+        while True:
+            try:
+                contents_file = self.upstream.dequeue()
+                if isinstance(contents_file,EndOfContents):
+                    if self.downstream:
+                        self.downstream.enqueue(contents_file)
+                    break
+
+                s = datetime.datetime.now()
+                print("%s start: %s" % (self,contents_file) )
+                self._run(contents_file)
+                print("%s finished: %s in %d seconds" % (self, contents_file, (datetime.datetime.now()-s).seconds ))
+                if self.downstream:
+                    self.downstream.enqueue(contents_file)
+            except:
+                traceback.print_exc()
+
+class QueryThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "QueryThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.query()
+
+class IngestThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "IngestThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.ingest()
+
+class SortThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "SortThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.sorted_keys = sorted(contents_file.filenames.keys())
+
+class OutputThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "OutputThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        contents_file.open_file()
+        for fname in contents_file.sorted_keys:
+            contents_file.filehandle.write("%s\t%s\n" % (fname,contents_file.filenames[fname]))
+        contents_file.sorted_keys = None
+        contents_file.filenames.clear()
+    
+class GzipThread(ContentsWorkThread):
+    def __init__(self, upstream, downstream):
+        ContentsWorkThread.__init__(self, upstream, downstream)
+
+    def __str__(self):
+        return "GzipThread"
+    __repr__ = __str__
+
+    def _run(self, contents_file):
+        os.system("gzip -f %s" % contents_file.filename)
+    
+class ContentFile(object):
+    def __init__(self,
+                 filename,
+                 suite_str,
+                 suite_id,
+                 arch_str,
+                 arch_id):
+
+        self.filename = filename
+        self.filenames = {}
+        self.sorted_keys = None
+        self.suite_str = suite_str
+        self.suite_id = suite_id
+        self.arch_str = arch_str
+        self.arch_id = arch_id
+        self.cursor = None
+        self.filehandle = None
+
+    def __str__(self):
+        return self.filename
+    __repr__ = __str__
+
+
+    def cleanup(self):
+        self.filenames = None
+        self.sortedkeys = None
+        self.filehandle.close()
+        self.cursor.close()
+
+    def query(self):
+        self.cursor = DBConn().cursor();
+
+        self.cursor.execute("""SELECT file, section || '/' || package
+        FROM deb_contents
+        WHERE ( arch=2 or arch = %d) AND suite = %d
+        """ % (self.arch_id, self.suite_id))
+
+    def ingest(self):
+        while True:
+            r = self.cursor.fetchone()
+            if not r:
+                break
+            filename, package = r
+            if self.filenames.has_key(filename):
+                self.filenames[filename] += ",%s" % (package)
+            else:
+                self.filenames[filename] = "%s" % (package)
+        self.cursor.close()
 
 
-    def __init__(self, filename):
-        """
-        @type filename: string
-        @param filename: the name of the file to write to
-        """
-        self.queue = Queue.Queue()
-        self.current_file = None
-        self.first_package = True
-        self.output = self.open_file(filename)
-        self.thread = threading.Thread(target=self.write_thread,
-                                       name='Contents writer')
-        self.thread.start()
-
-    def open_file(self, filename):
+    def open_file(self):
         """
         opens a gzip stream to the contents file
         """
         """
         opens a gzip stream to the contents file
         """
-        filepath = Config()["Contents::Root"] + filename
-        filedir = os.path.dirname(filepath)
+#        filepath = Config()["Contents::Root"] + self.filename
+        self.filename = "/home/stew/contents/" + self.filename
+        filedir = os.path.dirname(self.filename)
         if not os.path.isdir(filedir):
             os.makedirs(filedir)
         if not os.path.isdir(filedir):
             os.makedirs(filedir)
-        return gzip.open(filepath, "w")
-
-    def write(self, filename, section, package):
-        """
-        enqueue content to be written to the file on a separate thread
-        """
-        self.queue.put((filename,section,package))
-
-    def write_thread(self):
-        """
-        the target of a Thread which will do the actual writing
-        """
-        while True:
-            next = self.queue.get()
-            if isinstance(next, EndOfContents):
-                self.output.write('\n')
-                self.output.close()
-                break
-
-            (filename,section,package)=next
-            if next != self.current_file:
-                # this is the first file, so write the header first
-                if not self.current_file:
-                    self.output.write(self._getHeader())
-
-                self.output.write('\n%s\t' % filename)
-                self.first_package = True
+#        self.filehandle = gzip.open(self.filename, "w")
+        self.filehandle = open(self.filename, "w")
+        self._write_header()
 
 
-            self.current_file=filename
+    def _write_header(self):
+        self._get_header();
+        self.filehandle.write(ContentFile.header)
 
 
-            if not self.first_package:
-                self.output.write(',')
-            else:
-                self.first_package=False
-            self.output.write('%s/%s' % (section,package))
-
-    def finish(self):
-        """
-        enqueue the sentry object so that writers will know to terminate
-        """
-        self.queue.put(EndOfContents())
+    header=None
 
     @classmethod
 
     @classmethod
-    def _getHeader(self):
+    def _get_header(self):
         """
         Internal method to return the header for Contents.gz files
 
         This is boilerplate which explains the contents of the file and how
         it can be used.
         """
         """
         Internal method to return the header for Contents.gz files
 
         This is boilerplate which explains the contents of the file and how
         it can be used.
         """
-        if not GzippedContentWriter.header:
+        if not ContentFile.header:
             if Config().has_key("Contents::Header"):
                 try:
                     h = open(os.path.join( Config()["Dir::Templates"],
                                            Config()["Contents::Header"] ), "r")
             if Config().has_key("Contents::Header"):
                 try:
                     h = open(os.path.join( Config()["Dir::Templates"],
                                            Config()["Contents::Header"] ), "r")
-                    GzippedContentWriter.header = h.read()
+                    ContentFile.header = h.read()
                     h.close()
                 except:
                     log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
                                                                       traceback.format_exc() ))
                     h.close()
                 except:
                     log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
                                                                       traceback.format_exc() ))
-                    GzippedContentWriter.header = None
+                    ContentFile.header = None
             else:
             else:
-                GzippedContentWriter.header = None
-
-        return GzippedContentWriter.header
+                ContentFile.header = None
 
 
+        return ContentFile.header
 
 class Contents(object):
     """
 
 class Contents(object):
     """
@@ -312,7 +377,6 @@ class Contents(object):
 
         cursor = DBConn().cursor();
         DBConn().prepare("debs_q",debs_q)
 
         cursor = DBConn().cursor();
         DBConn().prepare("debs_q",debs_q)
-        DBConn().prepare("olddeb_q",olddeb_q)
         DBConn().prepare("arches_q",arches_q)
 
         suites = self._suites()
         DBConn().prepare("arches_q",arches_q)
 
         suites = self._suites()
@@ -331,76 +395,92 @@ class Contents(object):
                         break
                     count += 1
                     cursor1 = DBConn().cursor();
                         break
                     count += 1
                     cursor1 = DBConn().cursor();
-                    cursor1.execute( "EXECUTE olddeb_q(%d)" % (deb[0] ) )
+                    cursor1.execute( "SELECT 1 FROM deb_contents WHERE binary_id = %d LIMIT 1" % (deb[0] ) )
                     old = cursor1.fetchone()
                     if old:
                     old = cursor1.fetchone()
                     if old:
-                        log.debug( "already imported: %s" % (deb[1]) )
+                        log.log( "already imported: %s" % (deb[1]) )
                     else:
                     else:
-                        log.debug( "scanning: %s" % (deb[1]) )
+#                        log.debug( "scanning: %s" % (deb[1]) )
+                        log.log( "scanning: %s" % (deb[1]) )
                         debfile = os.path.join( pooldir, deb[1] )
                         if os.path.exists( debfile ):
                         debfile = os.path.join( pooldir, deb[1] )
                         if os.path.exists( debfile ):
-                            Binary(debfile, self.reject).scan_package(deb[0],True)
+                            Binary(debfile, self.reject).scan_package(deb[0], True)
                         else:
                             log.error("missing .deb: %s" % deb[1])
 
                         else:
                             log.error("missing .deb: %s" % deb[1])
 
+
     def generate(self):
         """
     def generate(self):
         """
-        Generate Contents-$arch.gz files for every available arch in each given suite.
+        Generate contents files for both deb and udeb
         """
         """
-        cursor = DBConn().cursor()
-
         DBConn().prepare("arches_q", arches_q)
         DBConn().prepare("arches_q", arches_q)
-        DBConn().prepare("contents_q", contents_q)
-        DBConn().prepare("udeb_contents_q", udeb_contents_q)
-
-        debtype_id=DBConn().get_override_type_id("deb")
-        udebtype_id=DBConn().get_override_type_id("udeb")
+        self.deb_generate()
+#        self.udeb_generate()
 
 
-        arch_all_id = DBConn().get_architecture_id("all")
+    def deb_generate(self):
+        """
+        Generate Contents-$arch.gz files for every available arch in each given suite.
+        """
+        cursor = DBConn().cursor()
+        debtype_id = DBConn().get_override_type_id("deb")
         suites = self._suites()
 
         suites = self._suites()
 
-
+        inputtoquery = OneAtATime()
+        querytoingest = OneAtATime()
+        ingesttosort = OneAtATime()
+        sorttooutput = OneAtATime()
+        outputtogzip = OneAtATime()
+
+        qt = QueryThread(inputtoquery,querytoingest)
+        it = IngestThread(querytoingest,ingesttosort)
+# these actually make things worse
+#        it2 = IngestThread(querytoingest,ingesttosort)
+#        it3 = IngestThread(querytoingest,ingesttosort)
+#        it4 = IngestThread(querytoingest,ingesttosort)
+        st = SortThread(ingesttosort,sorttooutput)
+        ot = OutputThread(sorttooutput,outputtogzip)
+        gt = GzipThread(outputtogzip, None)
+
+        qt.start()
+        it.start()
+#        it2.start()
+#        it3.start()
+#        it2.start()
+        st.start()
+        ot.start()
+        gt.start()
+        
         # Get our suites, and the architectures
         for suite in [i.lower() for i in suites]:
             suite_id = DBConn().get_suite_id(suite)
             arch_list = self._arches(cursor, suite_id)
 
         # Get our suites, and the architectures
         for suite in [i.lower() for i in suites]:
             suite_id = DBConn().get_suite_id(suite)
             arch_list = self._arches(cursor, suite_id)
 
-            file_writers = {}
-
-            try:
-                for arch_id in arch_list:
-                    file_writers[arch_id[0]] = GzippedContentWriter("dists/%s/Contents-%s.gz" % (suite, arch_id[1]))
-
-                cursor.execute("EXECUTE contents_q(%d,%d);" % (suite_id, debtype_id))
+            for (arch_id,arch_str) in arch_list:
+                print( "suite: %s, arch: %s time: %s" %(suite_id, arch_id, datetime.datetime.now().isoformat()) )
 
 
-                while True:
-                    r = cursor.fetchone()
-                    if not r:
-                        break
-
-                    filename, section, package, arch = r
-
-                    if not file_writers.has_key( arch ):
-                        continue
+#                filename = "dists/%s/Contents-%s.gz" % (suite, arch_str)
+                filename = "dists/%s/Contents-%s" % (suite, arch_str)
+                cf = ContentFile(filename, suite, suite_id, arch_str, arch_id)
+                inputtoquery.enqueue( cf )
 
 
-                    if arch == arch_all_id:
-                        ## its arch all, so all contents files get it
-                        for writer in file_writers.values():
-                            writer.write(filename, section, package)
+        inputtoquery.enqueue( EndOfContents() )
+        gt.join()
 
 
-                    else:
-                        file_writers[arch].write(filename, section, package)
+    def udeb_generate(self):
+        """
+        Generate Contents-$arch.gz files for every available arch in each given suite.
+        """
+        cursor = DBConn().cursor()
 
 
-            finally:
-                # close all the files
-                for writer in file_writers.values():
-                    writer.finish()
+        DBConn().prepare("udeb_contents_q", udeb_contents_q)
+        udebtype_id=DBConn().get_override_type_id("udeb")
+        suites = self._suites()
 
 
+#        for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s.gz"),
+#                                    ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
 
 
-            # The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
-            # This is HORRIBLY debian specific :-/
-        for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s.gz"),
-                                    ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
+        for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s"),
+                                    ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s")]:
 
             section_id = DBConn().get_section_id(section) # all udebs should be here)
             if section_id != -1:
 
             section_id = DBConn().get_section_id(section) # all udebs should be here)
             if section_id != -1:
@@ -410,35 +490,22 @@ class Contents(object):
                     suite_id = DBConn().get_suite_id(suite)
                     arch_list = self._arches(cursor, suite_id)
 
                     suite_id = DBConn().get_suite_id(suite)
                     arch_list = self._arches(cursor, suite_id)
 
-                    file_writers = {}
+                    for arch_id in arch_list:
 
 
-                    try:
-                        for arch_id in arch_list:
-                            file_writers[arch_id[0]] = GzippedContentWriter(fn_pattern % (suite, arch_id[1]))
+                        writer = GzippedContentWriter(fn_pattern % (suite, arch_id[1]))
+                        try:
 
 
-                        cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (suite_id, udebtype_id, section_id))
+                            cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (suite_id, udebtype_id, section_id, arch_id))
 
 
-                        while True:
-                            r = cursor.fetchone()
-                            if not r:
-                                break
+                            while True:
+                                r = cursor.fetchone()
+                                if not r:
+                                    break
 
 
-                            filename, section, package, arch = r
-
-                            if not file_writers.has_key( arch ):
-                                continue
-
-                            if arch == arch_all_id:
-                                ## its arch all, so all contents files get it
-                                for writer in file_writers.values():
-                                    writer.write(filename, section, package)
-
-                            else:
-                                file_writers[arch].write(filename, section, package)
-                    finally:
-                        # close all the files
-                        for writer in file_writers.values():
-                            writer.finish()
+                                filename, section, package, arch = r
+                                writer.write(filename, section, package)
+                        finally:
+                            writer.close()
 
 
 
 
 
 
@@ -451,7 +518,8 @@ class Contents(object):
         if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
             suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
         else:
         if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
             suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
         else:
-            suites = Config().SubTree("Suite").List()
+            suites = [ 'unstable', 'testing' ]
+#            suites = Config().SubTree("Suite").List()
 
         return suites
 
 
         return suites
 
@@ -476,7 +544,8 @@ class Contents(object):
 
 def main():
     cnf = Config()
 
 def main():
     cnf = Config()
-
+#    log = logging.Logger(cnf, "contents")
+                         
     arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")),
                  ('s',"suite", "%s::%s" % (options_prefix,"Suite"),"HasArg"),
                  ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")),
     arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")),
                  ('s',"suite", "%s::%s" % (options_prefix,"Suite"),"HasArg"),
                  ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")),
@@ -496,17 +565,17 @@ def main():
     if cnf.has_key("%s::%s" % (options_prefix,"Help")):
         usage()
 
     if cnf.has_key("%s::%s" % (options_prefix,"Help")):
         usage()
 
-    level=logging.INFO
-    if cnf.has_key("%s::%s" % (options_prefix,"Quiet")):
-        level=logging.ERROR
+    level=logging.INFO
+    if cnf.has_key("%s::%s" % (options_prefix,"Quiet")):
+        level=logging.ERROR
 
 
-    elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")):
-        level=logging.DEBUG
+    elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")):
+        level=logging.DEBUG
 
 
 
 
-    logging.basicConfig( level=level,
-                         format='%(asctime)s %(levelname)s %(message)s',
-                         stream = sys.stderr )
+    logging.basicConfig( level=level,
+                         format='%(asctime)s %(levelname)s %(message)s',
+                         stream = sys.stderr )
 
     commands[args[0]](Contents())
 
 
     commands[args[0]](Contents())
 
diff --git a/dak/dakdb/update17.py b/dak/dakdb/update17.py
new file mode 100644 (file)
index 0000000..a3e134a
--- /dev/null
@@ -0,0 +1,310 @@
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding a trainee field to the process-new notes
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009  Mike O'Connor <stew@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def suites():
+    """
+    return a list of suites to operate on
+    """
+    if Config().has_key( "%s::%s" %(options_prefix,"Suite")):
+        suites = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Suite")])
+    else:
+        suites = [ 'unstable', 'testing' ]
+#            suites = Config().SubTree("Suite").List()
+
+    return suites
+
+def arches(cursor, suite):
+    """
+    return a list of archs to operate on
+    """
+    arch_list = []
+    cursor.execute("EXECUTE arches_q(%d)" % (suite))
+    while True:
+        r = cursor.fetchone()
+        if not r:
+            break
+
+        if r[1] != "source" and r[1] != "all":
+            arch_list.append((r[0], r[1]))
+
+    return arch_list
+
+def do_update(self):
+    """
+    Adding contents table as first step to maybe, finally getting rid
+    of apt-ftparchive
+    """
+
+    print __doc__
+
+    try:
+        c = self.db.cursor()
+        c.execute("""CREATE TABLE deb_contents (
+        file text,
+        section text,
+        package text,
+        binary_id integer,
+        arch integer,
+        suite integer,
+        component integer)""" )
+        
+        c.execute("""CREATE TABLE udeb_contents (
+        file text,
+        section text,
+        package text,
+        binary_id integer,
+        suite integer,
+        component integer )""" )
+        
+        c.execute("""ALTER TABLE ONLY deb_contents
+        ADD CONSTRAINT deb_contents_arch_fkey
+        FOREIGN KEY (arch) REFERENCES architecture(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY udeb_contents
+        ADD CONSTRAINT udeb_contents_arch_fkey
+        FOREIGN KEY (arch) REFERENCES architecture(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY deb_contents
+        ADD CONSTRAINT deb_contents_suite_fkey
+        FOREIGN KEY (suite) REFERENCES suite(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY udeb_contents
+        ADD CONSTRAINT udeb_contents_suite_fkey
+        FOREIGN KEY (suite) REFERENCES suite(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY deb_contents
+        ADD CONSTRAINT deb_contents_binary_fkey
+        FOREIGN KEY (binary_id) REFERENCES binaries(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""ALTER TABLE ONLY udeb_contents
+        ADD CONSTRAINT udeb_contents_binary_fkey
+        FOREIGN KEY (binary_id) REFERENCES binaries(id)
+        ON DELETE CASCADE;""")
+
+        c.execute("""CREATE INDEX ind_deb_contents_binary ON deb_contents(binary_id);""" )
+
+        arches_q = """PREPARE arches_q(int) as
+        SELECT s.architecture, a.arch_string
+        FROM suite_architectures s
+        JOIN architecture a ON (s.architecture=a.id)
+        WHERE suite = $1"""
+        
+        suites = self.suites()
+
+        for suite in [i.lower() for i in suites]:
+            suite_id = DBConn().get_suite_id(suite)
+            arch_list = arches(c, suite_id)
+            arch_list = arches(c, suite_id)
+
+            for (arch_id,arch_str) in arch_list:
+                c.execute( "CREATE INDEX ind_deb_contents_%s_%s ON deb_contents (arch,suite) WHERE (arch=2 OR arch=%d) AND suite=$d"%(arch_str,suite,arch_id,suite_id) )
+
+            for section, sname in [("debian-installer","main"),
+                                  ("non-free/debian-installer", "nonfree")]:
+                c.execute( "CREATE INDEX ind_udeb_contents_%s_%s ON udeb_contents (section,suite) WHERE section=%s AND suite=$d"%(sname,suite,section,suite_id) )
+                
+
+   Column   |  Type   | Modifiers
+   ------------+---------+-----------
+    package    | text    | not null
+     suite      | integer | not null
+      component  | integer | not null
+       priority   | integer |
+        section    | integer | not null
+         type       | integer | not null
+          maintainer | text    |
+          
+        c.execute("""CREATE TABLE deb_contents (
+        file text,
+        section text,
+        package text,
+        binary_id integer,
+        arch integer,
+        suite integer,
+        component integer)""" )
+        
+
+CREATE FUNCTION update_contents_for_override() RETURNS trigger AS $update_contents_for_override$
+BEGIN
+    UPDATE deb_contents  SET section=NEW.section, component=NEW.component
+    WHERE deb_contents.package=OLD.package
+                            
+
+DELETE FROM 
+NEW.last_date := current_timestamp;
+NEW.last_user := current_user;
+RETURN NEW;
+END;
+$update_contents_for_override$ LANGUAGE plpgsql;
+
+
+        self.db.commit()
+
+    except psycopg2.ProgrammingError, msg:
+        self.db.rollback()
+        raise DBUpdateError, "Unable to apply process-new update 14, rollback issued. Error message : %s" % (str(msg))
+"""
+         INSERT INTO deb_contents SELECT (p.path||'/'||n.file) AS file,
+                  s.section AS section,
+                  b.package AS package,
+                  b.id AS binary_id,
+                  b.architecture AS arch,
+                  o.suite AS suited,
+                  o.component AS componentd,
+                  o.type AS otype_id
+          FROM content_associations c 
+          JOIN content_file_paths p ON (c.filepath=p.id)
+          JOIN content_file_names n ON (c.filename=n.id)
+          JOIN binaries b ON (b.id=c.binary_pkg)
+          JOIN architecture a ON (b.architecture = a.id)
+          JOIN override o ON (o.package=b.package)
+          JOIN bin_associations ba on ba.suite=o.suite and ba.bin=b.id
+          JOIN section s ON (s.id=o.section)
+          where b.type='deb';
+
+         INSERT INTO udeb_contents SELECT (p.path||'/'||n.file) AS file,
+                  s.section AS section,
+                  b.package AS package,
+                  b.id AS binary_id,
+                  b.architecture AS arch,
+                  o.suite AS suited,
+                  o.component AS componentd,
+                  o.type AS otype_id
+          FROM content_associations c 
+          JOIN content_file_paths p ON (c.filepath=p.id)
+          JOIN content_file_names n ON (c.filename=n.id)
+          JOIN binaries b ON (b.id=c.binary_pkg)
+          JOIN architecture a ON (b.architecture = a.id)
+          JOIN override o ON (o.package=b.package)
+          JOIN section s ON (s.id=o.section)
+          where b.type='udeb'
+"""
+
+"""
+CREATE INDEX ind_archid ON contents(arch);
+CREATE INDEX ind_archid_amd64 ON contents(arch) WHERE arch=16;
+CREATE INDEX ind_suite ON contents(suite);
+CREATE INDEX ind_suite_unstable ON contents(suite) WHERE suite=5;
+CREATE INDEX ind_overridetype ON contents(otype);
+CREATE INDEX ind_overridetype_deb ON contents(otype) WHERE otype=7;
+CREATE INDEX ind_packagetype ON contents(packagetype);
+CREATE INDEX ind_packagetype_deb ON contents(packagetype) WHERE packagetype='deb';
+CREATE INDEX ind_package ON contents(package);
+
+ CREATE INDEX ind_suite_otype ON contents(suite, otype) WHERE suite=5 AND otype=7;
+ CREATE INDEX ind_suite_otype_arch ON contents(suite, otype, arch) WHERE suite=5 AND otype=7 AND arch=16;
+ CREATE INDEX ind_suite_otype_package ON contents(suite, otype, packagetype) WHERE suite=5 AND otype=7 AND packagetype='deb';
+ CREATE INDEX ind_suite_otype_package_notdeb ON contents(suite, otype, packagetype) WHERE suite=5 AND otype=7 AND packagetype!='deb';
+                                                                                                                                                                                          """
+
+CREATE INDEX ind_deb_contents_binary ON deb_contents(binary_id);
+
+CREATE INDEX ind_deb_contents_arch_alpha_unstable ON deb_contents(arch) where (arch=2 or arch=3) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hurd_i386_unstable ON deb_contents(arch) where (arch=2 or arch=4) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hppa_unstable ON deb_contents(arch) where (arch=2 or arch=5) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_arm_unstable ON deb_contents(arch) where (arch=2 or arch=6) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_i386_unstable ON deb_contents(arch) where (arch=2 or arch=7) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_m68k_unstable ON deb_contents(arch) where (arch=2 or arch=8) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mips_unstable ON deb_contents(arch) where (arch=2 or arch=9) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mipsel_unstable ON deb_contents(arch) where (arch=2 or arch=10) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_powerpc_unstable ON deb_contents(arch) where (arch=2 or arch=11) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sh_unstable ON deb_contents(arch) where (arch=2 or arch=12) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sparc_unstable ON deb_contents(arch) where (arch=2 or arch=13) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_s390_unstable ON deb_contents(arch) where (arch=2 or arch=14) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_ia64_unstable ON deb_contents(arch) where (arch=2 or arch=15) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_amd64_unstable ON deb_contents(arch) where (arch=2 or arch=16) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_armel_unstable ON deb_contents(arch) where (arch=2 or arch=17) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_i386_unstable ON deb_contents(arch) where (arch=2 or arch=25) AND suite=5 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_amd64_unstable ON deb_contents(arch) where (arch=2 or arch=26) AND suite=5 AND otype=7;
+
+CREATE INDEX ind_deb_contents_arch_alpha_stable ON deb_contents(arch) where (arch=2 or arch=3) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hurd_i386_stable ON deb_contents(arch) where (arch=2 or arch=4) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hppa_stable ON deb_contents(arch) where (arch=2 or arch=5) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_arm_stable ON deb_contents(arch) where (arch=2 or arch=6) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_i386_stable ON deb_contents(arch) where (arch=2 or arch=7) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_m68k_stable ON deb_contents(arch) where (arch=2 or arch=8) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mips_stable ON deb_contents(arch) where (arch=2 or arch=9) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mipsel_stable ON deb_contents(arch) where (arch=2 or arch=10) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_powerpc_stable ON deb_contents(arch) where (arch=2 or arch=11) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sh_stable ON deb_contents(arch) where (arch=2 or arch=12) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sparc_stable ON deb_contents(arch) where (arch=2 or arch=13) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_s390_stable ON deb_contents(arch) where (arch=2 or arch=14) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_ia64_stable ON deb_contents(arch) where (arch=2 or arch=15) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_amd64_stable ON deb_contents(arch) where (arch=2 or arch=16) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_armel_stable ON deb_contents(arch) where (arch=2 or arch=17) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_i386_stable ON deb_contents(arch) where (arch=2 or arch=25) AND suite=2 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_amd64_stable ON deb_contents(arch) where (arch=2 or arch=26) AND suite=2 AND otype=7;
+
+CREATE INDEX ind_deb_contents_arch_alpha_testing ON deb_contents(arch) where (arch=2 or arch=3) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hurd_i386_testing ON deb_contents(arch) where (arch=2 or arch=4) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hppa_testing ON deb_contents(arch) where (arch=2 or arch=5) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_arm_testing ON deb_contents(arch) where (arch=2 or arch=6) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_i386_testing ON deb_contents(arch) where (arch=2 or arch=7) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_m68k_testing ON deb_contents(arch) where (arch=2 or arch=8) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mips_testing ON deb_contents(arch) where (arch=2 or arch=9) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mipsel_testing ON deb_contents(arch) where (arch=2 or arch=10) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_powerpc_testing ON deb_contents(arch) where (arch=2 or arch=11) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sh_testing ON deb_contents(arch) where (arch=2 or arch=12) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sparc_testing ON deb_contents(arch) where (arch=2 or arch=13) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_s390_testing ON deb_contents(arch) where (arch=2 or arch=14) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_ia64_testing ON deb_contents(arch) where (arch=2 or arch=15) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_amd64_testing ON deb_contents(arch) where (arch=2 or arch=16) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_armel_testing ON deb_contents(arch) where (arch=2 or arch=17) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_i386_testing ON deb_contents(arch) where (arch=2 or arch=25) AND suite=4 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_amd64_testing ON deb_contents(arch) where (arch=2 or arch=26) AND suite=4 AND otype=7;
+
+CREATE INDEX ind_deb_contents_arch_alpha_oldstable ON deb_contents(arch) where (arch=2 or arch=3) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hurd_i386_oldstable ON deb_contents(arch) where (arch=2 or arch=4) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_hppa_oldstable ON deb_contents(arch) where (arch=2 or arch=5) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_arm_oldstable ON deb_contents(arch) where (arch=2 or arch=6) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_i386_oldstable ON deb_contents(arch) where (arch=2 or arch=7) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_m68k_oldstable ON deb_contents(arch) where (arch=2 or arch=8) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mips_oldstable ON deb_contents(arch) where (arch=2 or arch=9) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_mipsel_oldstable ON deb_contents(arch) where (arch=2 or arch=10) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_powerpc_oldstable ON deb_contents(arch) where (arch=2 or arch=11) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sh_oldstable ON deb_contents(arch) where (arch=2 or arch=12) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_sparc_oldstable ON deb_contents(arch) where (arch=2 or arch=13) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_s390_oldstable ON deb_contents(arch) where (arch=2 or arch=14) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_ia64_oldstable ON deb_contents(arch) where (arch=2 or arch=15) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_amd64_oldstable ON deb_contents(arch) where (arch=2 or arch=16) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_armel_oldstable ON deb_contents(arch) where (arch=2 or arch=17) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_i386_oldstable ON deb_contents(arch) where (arch=2 or arch=25) AND suite=14 AND otype=7;
+CREATE INDEX ind_deb_contents_arch_kfreebsd_amd64_oldstable ON deb_contents(arch) where (arch=2 or arch=26) AND suite=14 AND otype=7;
index 5fe6918ab3bae19e9875f2aed1c4716540547e61..e197edd66bc48188eb3bf38cabe622e60d7a732e 100755 (executable)
@@ -45,7 +45,7 @@ from daklib.dak_exceptions import DBUpdateError
 
 Cnf = None
 projectB = None
 
 Cnf = None
 projectB = None
-required_database_schema = 13
+required_database_schema = 14
 
 ################################################################################
 
 
 ################################################################################
 
@@ -110,9 +110,10 @@ Updates dak's database schema to the lastest version. You should disable crontab
 
         try:
             # Build a connect string
 
         try:
             # Build a connect string
-            connect_str = "dbname=%s"% (Cnf["DB::Name"])
-            if Cnf["DB::Host"] != '': connect_str += " host=%s" % (Cnf["DB::Host"])
-            if Cnf["DB::Port"] != '-1': connect_str += " port=%d" % (int(Cnf["DB::Port"]))
+#            connect_str = "dbname=%s"% (Cnf["DB::Name"])
+            connect_str = "dbname=%s"% "projectbstew"
+#            if Cnf["DB::Host"] != '': connect_str += " host=%s" % (Cnf["DB::Host"])
+#            if Cnf["DB::Port"] != '-1': connect_str += " port=%d" % (int(Cnf["DB::Port"]))
 
             self.db = psycopg2.connect(connect_str)
 
 
             self.db = psycopg2.connect(connect_str)
 
index 58dd7fc55520c09bda083d6ca51ec9536f28f971..a1ac7038ce8cb920fea6ac6a9e80fe20855ad2f1 100755 (executable)
@@ -73,7 +73,8 @@ class DBConn(Singleton):
     ## Connection functions
     def __createconn(self):
         cnf = Config()
     ## Connection functions
     def __createconn(self):
         cnf = Config()
-        connstr = "dbname=%s" % cnf["DB::Name"]
+        connstr = "dbname=%s" % "projectbstew" #cnf["DB::Name"]
+        print( "connstr: %s "% connstr)
         if cnf["DB::Host"]:
            connstr += " host=%s" % cnf["DB::Host"]
         if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
         if cnf["DB::Host"]:
            connstr += " host=%s" % cnf["DB::Host"]
         if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
@@ -524,7 +525,8 @@ class DBConn(Singleton):
                 file_id = self.get_or_set_contents_file_id(file)
                 path_id = self.get_or_set_contents_path_id(path)
 
                 file_id = self.get_or_set_contents_file_id(file)
                 path_id = self.get_or_set_contents_path_id(path)
 
-                c.execute("""INSERT INTO content_associations
+                c.execute("""INSERT INTO deb_contents
+                
                                (binary_pkg, filepath, filename)
                            VALUES ( '%d', '%d', '%d')""" % (bin_id, path_id, file_id) )
 
                                (binary_pkg, filepath, filename)
                            VALUES ( '%d', '%d', '%d')""" % (bin_id, path_id, file_id) )
 
index 073b8133ef3174aae090a536b2df337b54dc4e80..04570c956bd26d1f0987a03aa17db9d2ed3b21f6 100755 (executable)
@@ -48,7 +48,8 @@ from regexes import re_html_escaping, html_escaping, re_single_line_field, \
 
 ################################################################################
 
 
 ################################################################################
 
-default_config = "/etc/dak/dak.conf"     #: default dak config, defines host properties
+#default_config = "/etc/dak/dak.conf"     #: default dak config, defines host properties
+default_config = "/home/stew/etc/dak/dak.conf"     #: default dak config, defines host properties
 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
 
 alias_cache = None        #: Cache for email alias checks
 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
 
 alias_cache = None        #: Cache for email alias checks
@@ -1558,7 +1559,7 @@ apt_pkg.init()
 Cnf = apt_pkg.newConfiguration()
 apt_pkg.ReadConfigFileISC(Cnf,default_config)
 
 Cnf = apt_pkg.newConfiguration()
 apt_pkg.ReadConfigFileISC(Cnf,default_config)
 
-if which_conf_file() != default_config:
-    apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
+#if which_conf_file() != default_config:
+#    apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
 
 ###############################################################################
 
 ###############################################################################