]> git.decadent.org.uk Git - dak.git/blobdiff - dak/queue_report.py
Merge remote-tracking branch 'dktrkranz/fixes'
[dak.git] / dak / queue_report.py
index f1d805650b1397c4b06e0f23591f2c7f0fadd020..a4b997af90e17e26cf242057c3a658d33437e281 100755 (executable)
 from copy import copy
 import glob, os, stat, sys, time
 import apt_pkg
+try:
+    import rrdtool
+except ImportError:
+    pass
 
 from daklib import utils
-from daklib.queue import Upload
-from daklib.dbconn import DBConn, has_new_comment, DBChange, get_uid_from_fingerprint
+from daklib.dbconn import DBConn, DBSource, has_new_comment, PolicyQueue, \
+                          get_uid_from_fingerprint
+from daklib.policy import PolicyQueueUploadHandler
 from daklib.textutils import fix_maintainer
+from daklib.utils import get_logins_from_ldap
 from daklib.dak_exceptions import *
 
 Cnf = None
@@ -52,7 +58,7 @@ row_number = 0
 
 def usage(exit_code=0):
     print """Usage: dak queue-report
-Prints a report of packages in queue directories (usually new and byhand).
+Prints a report of packages in queues (usually new and byhand).
 
   -h, --help                show this help and exit.
   -8, --822                 writes 822 formated output to the location set in dak.conf
@@ -60,6 +66,7 @@ Prints a report of packages in queue directories (usually new and byhand).
   -s, --sort=key            sort output according to key, see below.
   -a, --age=key             if using sort by age, how should time be treated?
                             If not given a default of hours will be used.
+  -r, --rrd=key             Directory where rrd files to be updated are stored
   -d, --directories=key     A comma seperated list of queues to be scanned
 
      Sorting Keys: ao=age,   oldest first.   an=age,   newest first.
@@ -110,7 +117,15 @@ def time_pp(x):
 def sg_compare (a, b):
     a = a[1]
     b = b[1]
-    """Sort by have note, time of oldest upload."""
+    """Sort by have pending action, have note, time of oldest upload."""
+    # Sort by have pending action
+    a_note_state = a["processed"]
+    b_note_state = b["processed"]
+    if a_note_state < b_note_state:
+        return -1
+    elif a_note_state > b_note_state:
+        return 1
+
     # Sort by have note
     a_note_state = a["note_state"]
     b_note_state = b["note_state"]
@@ -168,29 +183,48 @@ def header():
   <head>
     <meta http-equiv="content-type" content="text/xhtml+xml; charset=utf-8" />
     <link type="text/css" rel="stylesheet" href="style.css" />
-    <link rel="shortcut icon" href="http://www.debian.org/favicon.ico" />
+    <link rel="shortcut icon" href="https://www.debian.org/favicon.ico" />
     <title>
       Debian NEW and BYHAND Packages
     </title>
+    <script type="text/javascript">
+    //<![CDATA[
+    function togglePkg() {
+        var children = document.getElementsByTagName("*");
+        for (var i = 0; i < children.length; i++) {
+            if(!children[i].hasAttribute("class"))
+                continue;
+            c = children[i].getAttribute("class").split(" ");
+            for(var j = 0; j < c.length; j++) {
+                if(c[j] == "sourceNEW") {
+                    if (children[i].style.display == '')
+                        children[i].style.display = 'none';
+                    else children[i].style.display = '';
+                }
+            }
+        }
+    }
+    //]]>
+    </script>
   </head>
   <body id="NEW">
     <div id="logo">
-      <a href="http://www.debian.org/">
-        <img src="http://www.debian.org/logos/openlogo-nd-50.png"
+      <a href="https://www.debian.org/">
+        <img src="https://www.debian.org/logos/openlogo-nd-50.png"
         alt="debian logo" /></a>
-      <a href="http://www.debian.org/">
-        <img src="http://www.debian.org/Pics/debian.png"
+      <a href="https://www.debian.org/">
+        <img src="https://www.debian.org/Pics/debian.png"
         alt="Debian Project" /></a>
     </div>
     <div id="titleblock">
 
-      <img src="http://www.debian.org/Pics/red-upperleft.png"
+      <img src="https://www.debian.org/Pics/red-upperleft.png"
       id="red-upperleft" alt="corner image"/>
-      <img src="http://www.debian.org/Pics/red-lowerleft.png"
+      <img src="https://www.debian.org/Pics/red-lowerleft.png"
       id="red-lowerleft" alt="corner image"/>
-      <img src="http://www.debian.org/Pics/red-upperright.png"
+      <img src="https://www.debian.org/Pics/red-upperright.png"
       id="red-upperright" alt="corner image"/>
-      <img src="http://www.debian.org/Pics/red-lowerright.png"
+      <img src="https://www.debian.org/Pics/red-lowerright.png"
       id="red-lowerright" alt="corner image"/>
       <span class="title">
         Debian NEW and BYHAND Packages
@@ -200,29 +234,24 @@ def header():
 
 def footer():
     print "<p class=\"timestamp\">Timestamp: %s (UTC)</p>" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
+    print "<p class=\"timestamp\">There are <a href=\"/stat.html\">graphs about the queues</a> available.</p>"
 
     print """
     <div class="footer">
     <p>Hint: Age is the youngest upload of the package, if there is more than
     one version.<br />
-    You may want to look at <a href="http://ftp-master.debian.org/REJECT-FAQ.html">the REJECT-FAQ</a>
+    You may want to look at <a href="https://ftp-master.debian.org/REJECT-FAQ.html">the REJECT-FAQ</a>
       for possible reasons why one of the above packages may get rejected.</p>
-      <p>
-      <a href="http://validator.w3.org/check?uri=referer"><img src="http://www.w3.org/Icons/valid-xhtml10"
-        alt="Valid XHTML 1.0 Strict" height="31" width="88" /></a>
-      <a href="http://jigsaw.w3.org/css-validator/">
-        <img style="border:0;width:88px;height:31px" src="http://jigsaw.w3.org/css-validator/images/vcss"
-        alt="Valid CSS!" />
-      </a>
-      </p>
     </div> </body> </html>
     """
 
 def table_header(type, source_count, total_count):
-    print "<h1>Summary for: %s</h1>" % (type)
+    print "<h1 class='sourceNEW'>Summary for: %s</h1>" % (type)
+    print "<h1 class='sourceNEW' style='display: none'>Summary for: binary-%s only</h1>" % (type)
     print """
+    <p class="togglepkg" onclick="togglePkg()">Click to toggle all/binary-NEW packages</p>
     <table class="NEW">
-      <caption>
+      <caption class="sourceNEW">
     """
     print "Package count in <strong>%s</strong>: <em>%s</em>&nbsp;|&nbsp; Total Package count: <em>%s</em>" % (type, source_count, total_count)
     print """
@@ -250,19 +279,31 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
     global row_number
 
     trclass = "sid"
+    session = DBConn().session()
     for dist in distribution:
         if dist == "experimental":
             trclass = "exp"
 
+    query = '''SELECT source
+               FROM source_suite
+               WHERE source = :source
+               AND suite_name IN ('unstable', 'experimental')'''
+    if not session.execute(query, {'source': source}).rowcount:
+        trclass += " sourceNEW"
+    session.commit()
+
     if row_number % 2 != 0:
         print "<tr class=\"%s even\">" % (trclass)
     else:
         print "<tr class=\"%s odd\">" % (trclass)
 
-    print "<td class=\"package\">%s</td>" % (source)
+    if "sourceNEW" in trclass:
+        print "<td class=\"package\">%s</td>" % (source)
+    else:
+        print "<td class=\"package\"><a href=\"https://tracker.debian.org/pkg/%(source)s\">%(source)s</a></td>" % {'source': source}
     print "<td class=\"version\">"
     for vers in version.split():
-        print "<a href=\"/new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
+        print "<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
     print "</td>"
     print "<td class=\"arch\">%s</td>" % (arch)
     print "<td class=\"distribution\">"
@@ -273,65 +314,95 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
     (name, mail) = maint.split(":", 1)
 
     print "<td class=\"upload-data\">"
-    print "<span class=\"maintainer\">Maintainer: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name))
+    print "<span class=\"maintainer\">Maintainer: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name))
     (name, mail) = changedby.split(":", 1)
-    print "<span class=\"changed-by\">Changed-By: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name))
+    print "<span class=\"changed-by\">Changed-By: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name))
 
     if sponsor:
-        try:
-            (login, domain) = sponsor.split("@", 1)
-            print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span>@debian.org<br/>" % (utils.html_escape(login), utils.html_escape(login))
-        except Exception, e:
-            pass
+        print "<span class=\"sponsor\">Sponsor: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(sponsor), utils.html_escape(sponsor))
 
     print "<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint)
     print "</td>"
 
     print "<td class=\"closes\">"
     for close in closes:
-        print "<a href=\"http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br/>" % (utils.html_escape(close), utils.html_escape(close))
+        print "<a href=\"https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br/>" % (utils.html_escape(close), utils.html_escape(close))
     print "</td></tr>"
     row_number+=1
 
 ############################################################
 
-def process_changes_files(changes_files, type, log):
-    session = DBConn().session()
-    msg = ""
-    cache = {}
-    # Read in all the .changes files
-    for filename in changes_files:
+def update_graph_database(rrd_dir, type, n_source, n_binary):
+    if not rrd_dir:
+        return
+
+    rrd_file = os.path.join(rrd_dir, type.lower()+'.rrd')
+    update = [rrd_file, "N:%s:%s" % (n_source, n_binary)]
+
+    try:
+        rrdtool.update(*update)
+    except rrdtool.error:
+        create = [rrd_file]+"""
+--step
+300
+--start
+0
+DS:ds0:GAUGE:7200:0:1000
+DS:ds1:GAUGE:7200:0:1000
+RRA:AVERAGE:0.5:1:599
+RRA:AVERAGE:0.5:6:700
+RRA:AVERAGE:0.5:24:775
+RRA:AVERAGE:0.5:288:795
+RRA:MAX:0.5:1:600
+RRA:MAX:0.5:6:700
+RRA:MAX:0.5:24:775
+RRA:MAX:0.5:288:795
+""".strip().split("\n")
         try:
-            u = Upload()
-            u.load_changes(filename)
-            cache[filename] = copy(u.pkg.changes)
-            cache[filename]["filename"] = filename
-        except Exception, e:
-            print "WARNING: Exception %s" % e
-            continue
+            rc = rrdtool.create(*create)
+            ru = rrdtool.update(*update)
+        except rrdtool.error as e:
+            print('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e))
+    except NameError:
+        pass
+
+############################################################
+
+def process_queue(queue, log, rrd_dir):
+    msg = ""
+    type = queue.queue_name
+    session = DBConn().session()
+
     # Divide the .changes into per-source groups
     per_source = {}
-    for filename in cache.keys():
-        source = cache[filename]["source"]
-        if not per_source.has_key(source):
+    total_pending = 0
+    for upload in queue.uploads:
+        source = upload.changes.source
+        if source not in per_source:
             per_source[source] = {}
             per_source[source]["list"] = []
-        per_source[source]["list"].append(cache[filename])
+            per_source[source]["processed"] = ""
+            handler = PolicyQueueUploadHandler(upload, session)
+            if handler.get_action():
+                per_source[source]["processed"] = "PENDING %s" % handler.get_action()
+                total_pending += 1
+        per_source[source]["list"].append(upload)
+        per_source[source]["list"].sort(lambda x, y: cmp(x.changes.created, y.changes.created), reverse=True)
     # Determine oldest time and have note status for each source group
     for source in per_source.keys():
         source_list = per_source[source]["list"]
         first = source_list[0]
-        oldest = os.stat(first["filename"])[stat.ST_MTIME]
+        oldest = time.mktime(first.changes.created.timetuple())
         have_note = 0
         for d in per_source[source]["list"]:
-            mtime = os.stat(d["filename"])[stat.ST_MTIME]
+            mtime = time.mktime(d.changes.created.timetuple())
             if Cnf.has_key("Queue-Report::Options::New"):
                 if mtime > oldest:
                     oldest = mtime
             else:
                 if mtime < oldest:
                     oldest = mtime
-            have_note += has_new_comment(d["source"], d["version"])
+            have_note += has_new_comment(d.policy_queue, d.changes.source, d.changes.version)
         per_source[source]["oldest"] = oldest
         if not have_note:
             per_source[source]["note_state"] = 0; # none
@@ -342,10 +413,16 @@ def process_changes_files(changes_files, type, log):
     per_source_items = per_source.items()
     per_source_items.sort(sg_compare)
 
+    update_graph_database(rrd_dir, type, len(per_source_items), len(queue.uploads))
+
     entries = []
     max_source_len = 0
     max_version_len = 0
     max_arch_len = 0
+    try:
+        logins = get_logins_from_ldap()
+    except:
+        logins = dict()
     for i in per_source_items:
         maintainer = {}
         maint=""
@@ -355,29 +432,25 @@ def process_changes_files(changes_files, type, log):
         changeby = {}
         changedby=""
         sponsor=""
-        filename=i[1]["list"][0]["filename"]
+        filename=i[1]["list"][0].changes.changesname
         last_modified = time.time()-i[1]["oldest"]
-        source = i[1]["list"][0]["source"]
+        source = i[1]["list"][0].changes.source
         if len(source) > max_source_len:
             max_source_len = len(source)
-        binary_list = i[1]["list"][0]["binary"].keys()
-        binary = ', '.join(binary_list)
-        arches = {}
-        versions = {}
+        binary_list = i[1]["list"][0].binaries
+        binary = ', '.join([ b.package for b in binary_list ])
+        arches = set()
+        versions = set()
         for j in i[1]["list"]:
-            changesbase = os.path.basename(j["filename"])
-            try:
-                dbc = session.query(DBChange).filter_by(changesname=changesbase).one()
-            except Exception, e:
-                print "Can't find changes file in NEW for %s (%s)" % (changesbase, e)
-                dbc = None
+            dbc = j.changes
+            changesbase = dbc.changesname
 
             if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"):
                 try:
                     (maintainer["maintainer822"], maintainer["maintainer2047"],
                     maintainer["maintainername"], maintainer["maintaineremail"]) = \
-                    fix_maintainer (j["maintainer"])
-                except ParseMaintError, msg:
+                    fix_maintainer (dbc.maintainer)
+                except ParseMaintError as msg:
                     print "Problems while parsing maintainer address\n"
                     maintainer["maintainername"] = "Unknown"
                     maintainer["maintaineremail"] = "Unknown"
@@ -386,31 +459,35 @@ def process_changes_files(changes_files, type, log):
                 try:
                     (changeby["changedby822"], changeby["changedby2047"],
                      changeby["changedbyname"], changeby["changedbyemail"]) = \
-                     fix_maintainer (j["changed-by"])
-                except ParseMaintError, msg:
+                     fix_maintainer (dbc.changedby)
+                except ParseMaintError as msg:
                     (changeby["changedby822"], changeby["changedby2047"],
                      changeby["changedbyname"], changeby["changedbyemail"]) = \
                      ("", "", "", "")
                 changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])
 
-                distribution=j["distribution"].keys()
-                closes=j["closes"].keys()
-                if dbc:
-                    fingerprint = dbc.fingerprint
-                    sponsor_name = get_uid_from_fingerprint(fingerprint).name
-                    sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
-                    if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
-                    sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
-                        sponsor = sponsor_email
-
-            for arch in j["architecture"].keys():
-                arches[arch] = ""
-            version = j["version"]
-            versions[version] = ""
-        arches_list = arches.keys()
+                distribution=dbc.distribution.split()
+                closes=dbc.closes
+
+                fingerprint = dbc.fingerprint
+                sponsor_name = get_uid_from_fingerprint(fingerprint).name
+                sponsor_login = get_uid_from_fingerprint(fingerprint).uid
+                if '@' in sponsor_login:
+                    if fingerprint in logins:
+                        sponsor_login = logins[fingerprint]
+                if (sponsor_name != maintainer["maintainername"] and
+                  sponsor_name != changeby["changedbyname"] and
+                  sponsor_login + '@debian.org' != maintainer["maintaineremail"] and
+                  sponsor_name != changeby["changedbyemail"]):
+                    sponsor = sponsor_login
+
+            for arch in dbc.architecture.split():
+                arches.add(arch)
+            versions.add(dbc.version)
+        arches_list = list(arches)
         arches_list.sort(utils.arch_compare_sw)
         arch_list = " ".join(arches_list)
-        version_list = " ".join(versions.keys())
+        version_list = " ".join(sorted(versions, reverse=True))
         if len(version_list) > max_version_len:
             max_version_len = len(version_list)
         if len(arch_list) > max_arch_len:
@@ -419,7 +496,7 @@ def process_changes_files(changes_files, type, log):
             note = " | [N]"
         else:
             note = ""
-        entries.append([source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])
+        entries.append([source, binary, version_list, arch_list, per_source[source]["processed"], note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])
 
     # direction entry consists of "Which field, which direction, time-consider" where
     # time-consider says how we should treat last_modified. Thats all.
@@ -430,16 +507,16 @@ def process_changes_files(changes_files, type, log):
         age =  Cnf["Queue-Report::Options::Age"]
     if Cnf.has_key("Queue-Report::Options::New"):
     # If we produce html we always have oldest first.
-        direction.append([4,-1,"ao"])
+        direction.append([6,-1,"ao"])
     else:
         if Cnf.has_key("Queue-Report::Options::Sort"):
             for i in Cnf["Queue-Report::Options::Sort"].split(","):
                 if i == "ao":
                     # Age, oldest first.
-                    direction.append([4,-1,age])
+                    direction.append([6,-1,age])
                 elif i == "an":
                     # Age, newest first.
-                    direction.append([4,1,age])
+                    direction.append([6,1,age])
                 elif i == "na":
                     # Name, Ascending.
                     direction.append([0,1,0])
@@ -448,10 +525,10 @@ def process_changes_files(changes_files, type, log):
                     direction.append([0,-1,0])
                 elif i == "nl":
                     # Notes last.
-                    direction.append([3,1,0])
+                    direction.append([5,1,0])
                 elif i == "nf":
                     # Notes first.
-                    direction.append([3,-1,0])
+                    direction.append([5,-1,0])
     entries.sort(lambda x, y: sortfunc(x, y))
     # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
     # at the moment to come up with a real good sorting function that considers all the sidesteps you
@@ -461,7 +538,7 @@ def process_changes_files(changes_files, type, log):
     if Cnf.has_key("Queue-Report::Options::822"):
         # print stuff out in 822 format
         for entry in entries:
-            (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry
+            (source, binary, version_list, arch_list, processed, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry
 
             # We'll always have Source, Version, Arch, Mantainer, and Dist
             # For the rest, check to see if we have them, then print them out
@@ -480,7 +557,7 @@ def process_changes_files(changes_files, type, log):
                (name, mail) = changedby.split(":", 1)
                log.write("Changed-By: " + name + " <"+mail+">" + "\n")
             if sponsor:
-               log.write("Sponsored-By: " + sponsor + "\n")
+               log.write("Sponsored-By: %s@debian.org\n" % sponsor)
             log.write("Distribution:")
             for dist in distribution:
                log.write(" " + dist)
@@ -494,39 +571,43 @@ def process_changes_files(changes_files, type, log):
             log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
             log.write("\n")
 
+    total_count = len(queue.uploads)
+    source_count = len(per_source_items)
+
     if Cnf.has_key("Queue-Report::Options::New"):
-        direction.append([4,1,"ao"])
+        direction.append([6,1,"ao"])
         entries.sort(lambda x, y: sortfunc(x, y))
     # Output for a html file. First table header. then table_footer.
     # Any line between them is then a <tr> printed from subroutine table_row.
         if len(entries) > 0:
-            total_count = len(changes_files)
-            source_count = len(per_source_items)
             table_header(type.upper(), source_count, total_count)
             for entry in entries:
-                (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
+                (source, binary, version_list, arch_list, processed, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
                 table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes, fingerprint, sponsor, changedby)
             table_footer(type.upper())
     elif not Cnf.has_key("Queue-Report::Options::822"):
     # The "normal" output without any formatting.
-        format="%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len)
-
         msg = ""
         for entry in entries:
-            (source, binary, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
-            msg += format % (source, version_list, arch_list, note, time_pp(last_modified))
+            (source, binary, version_list, arch_list, processed, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
+            if processed:
+                format="%%-%ds | %%-%ds | %%-%ds | %%s\n" % (max_source_len, max_version_len, max_arch_len)
+                msg += format % (source, version_list, arch_list, processed)
+            else:
+                format="%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len)
+                msg += format % (source, version_list, arch_list, note, time_pp(last_modified))
 
         if msg:
-            total_count = len(changes_files)
-            source_count = len(per_source_items)
             print type.upper()
             print "-"*len(type)
             print
             print msg
-            print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count))
+            print ("%s %s source package%s / %s %s package%s in total / %s %s package%s to be processed." %
+                   (source_count, type, plural(source_count),
+                    total_count, type, plural(total_count),
+                    total_pending, type, plural(total_pending)))
             print
 
-
 ################################################################################
 
 def main():
@@ -538,41 +619,54 @@ def main():
                  ('8','822',"Queue-Report::Options::822"),
                  ('s',"sort","Queue-Report::Options::Sort", "HasArg"),
                  ('a',"age","Queue-Report::Options::Age", "HasArg"),
+                 ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"),
                  ('d',"directories","Queue-Report::Options::Directories", "HasArg")]
     for i in [ "help" ]:
         if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
             Cnf["Queue-Report::Options::%s" % (i)] = ""
 
-    apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
+    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)
 
-    Options = Cnf.SubTree("Queue-Report::Options")
+    Options = Cnf.subtree("Queue-Report::Options")
     if Options["Help"]:
         usage()
 
     if Cnf.has_key("Queue-Report::Options::New"):
         header()
 
-    # Initialize db so we can get the NEW comments
-    dbconn = DBConn()
-
-    directories = [ ]
+    queue_names = []
 
     if Cnf.has_key("Queue-Report::Options::Directories"):
         for i in Cnf["Queue-Report::Options::Directories"].split(","):
-            directories.append(i)
+            queue_names.append(i)
     elif Cnf.has_key("Queue-Report::Directories"):
-        directories = Cnf.ValueList("Queue-Report::Directories")
+        queue_names = Cnf.value_list("Queue-Report::Directories")
+    else:
+        queue_names = [ "byhand", "new" ]
+
+    if Cnf.has_key("Queue-Report::Options::Rrd"):
+        rrd_dir = Cnf["Queue-Report::Options::Rrd"]
+    elif Cnf.has_key("Dir::Rrd"):
+        rrd_dir = Cnf["Dir::Rrd"]
     else:
-        directories = [ "byhand", "new" ]
+        rrd_dir = None
 
     f = None
     if Cnf.has_key("Queue-Report::Options::822"):
         # Open the report file
-        f = open(Cnf["Queue-Report::ReportLocations::822Location"], "w")
+        f = sys.stdout
+        filename822 = Cnf.get("Queue-Report::ReportLocations::822Location")
+        if filename822:
+            f = open(filename822, "w")
 
-    for directory in directories:
-        changes_files = glob.glob("%s/*.changes" % (Cnf["Dir::Queue::%s" % (directory)]))
-        process_changes_files(changes_files, directory, f)
+    session = DBConn().session()
+
+    for queue_name in queue_names:
+        queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).first()
+        if queue is not None:
+            process_queue(queue, f, rrd_dir)
+        else:
+            utils.warn("Cannot find queue %s" % queue_name)
 
     if Cnf.has_key("Queue-Report::Options::822"):
         f.close()