]> git.decadent.org.uk Git - dak.git/blobdiff - dak/queue_report.py
Restore correct visualization of binary-NEW uploads only
[dak.git] / dak / queue_report.py
index be4787219e9fde8a5963fa3fb665637c90a0fd86..a61f99de768ebe37bc43d9f94ea6a76319c80250 100755 (executable)
 from copy import copy
 import glob, os, stat, sys, time
 import apt_pkg
+try:
+    import rrdtool
+except ImportError:
+    pass
 
 from daklib import utils
-from daklib.queue import Upload
-from daklib.dbconn import DBConn, has_new_comment, DBChange, get_uid_from_fingerprint
+from daklib.dbconn import DBConn, DBSource, has_new_comment, PolicyQueue, \
+                          get_uid_from_fingerprint
 from daklib.textutils import fix_maintainer
 from daklib.dak_exceptions import *
 
@@ -52,7 +56,7 @@ row_number = 0
 
 def usage(exit_code=0):
     print """Usage: dak queue-report
-Prints a report of packages in queue directories (usually new and byhand).
+Prints a report of packages in queues (usually new and byhand).
 
   -h, --help                show this help and exit.
   -8, --822                 writes 822 formated output to the location set in dak.conf
@@ -60,6 +64,7 @@ Prints a report of packages in queue directories (usually new and byhand).
   -s, --sort=key            sort output according to key, see below.
   -a, --age=key             if using sort by age, how should time be treated?
                             If not given a default of hours will be used.
+  -r, --rrd=key             Directory where rrd files to be updated are stored
   -d, --directories=key     A comma seperated list of queues to be scanned
 
      Sorting Keys: ao=age,   oldest first.   an=age,   newest first.
@@ -172,6 +177,25 @@ def header():
     <title>
       Debian NEW and BYHAND Packages
     </title>
+    <script type="text/javascript">
+    //<![CDATA[
+    function togglePkg() {
+        var children = document.getElementsByTagName("*");
+        for (var i = 0; i < children.length; i++) {
+            if(!children[i].hasAttribute("class"))
+                continue;
+            c = children[i].getAttribute("class").split(" ");
+            for(var j = 0; j < c.length; j++) {
+                if(c[j] == "sourceNEW") {
+                    if (children[i].style.display == '')
+                        children[i].style.display = 'none';
+                    else children[i].style.display = '';
+                }
+            }
+        }
+    }
+    //]]>
+    </script>
   </head>
   <body id="NEW">
     <div id="logo">
@@ -200,6 +224,7 @@ def header():
 
 def footer():
     print "<p class=\"timestamp\">Timestamp: %s (UTC)</p>" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
+    print "<p class=\"timestamp\">There are <a href=\"/stat.html\">graphs about the queues</a> available.</p>"
 
     print """
     <div class="footer">
@@ -219,10 +244,12 @@ def footer():
     """
 
 def table_header(type, source_count, total_count):
-    print "<h1>Summary for: %s</h1>" % (type)
+    print "<h1 class='sourceNEW'>Summary for: %s</h1>" % (type)
+    print "<h1 class='sourceNEW' style='display: none'>Summary for: binary-%s only</h1>" % (type)
     print """
+    <p class="togglepkg" onclick="togglePkg()">Click to toggle all/binary-NEW packages</p>
     <table class="NEW">
-      <caption>
+      <caption class="sourceNEW">
     """
     print "Package count in <strong>%s</strong>: <em>%s</em>&nbsp;|&nbsp; Total Package count: <em>%s</em>" % (type, source_count, total_count)
     print """
@@ -250,19 +277,31 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
     global row_number
 
     trclass = "sid"
+    session = DBConn().session()
     for dist in distribution:
         if dist == "experimental":
             trclass = "exp"
 
+    query = '''SELECT source
+               FROM source_suite
+               WHERE source = :source
+               AND suite_name IN ('unstable', 'experimental')'''
+    if not session.execute(query, {'source': source}).rowcount:
+        trclass += " sourceNEW"
+    session.commit()
+
     if row_number % 2 != 0:
         print "<tr class=\"%s even\">" % (trclass)
     else:
         print "<tr class=\"%s odd\">" % (trclass)
 
-    print "<td class=\"package\">%s</td>" % (source)
+    if "sourceNEW" in trclass:
+        print "<td class=\"package\">%s</td>" % (source)
+    else:
+        print "<td class=\"package\"><a href=\"http://packages.qa.debian.org/%(source)s\">%(source)s</a></td>" % {'source': source}
     print "<td class=\"version\">"
     for vers in version.split():
-        print "<a href=\"/new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
+        print "<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
     print "</td>"
     print "<td class=\"arch\">%s</td>" % (arch)
     print "<td class=\"distribution\">"
@@ -280,8 +319,8 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
     if sponsor:
         try:
             (login, domain) = sponsor.split("@", 1)
-            print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span>@debian.org<br/>" % (utils.html_escape(login), utils.html_escape(login))
-        except Exception, e:
+            print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(login), utils.html_escape(login))
+        except Exception as e:
             pass
 
     print "<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint)
@@ -295,43 +334,69 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
 
 ############################################################
 
-def process_changes_files(changes_files, type, log):
-    session = DBConn().session()
-    msg = ""
-    cache = {}
-    # Read in all the .changes files
-    for filename in changes_files:
+def update_graph_database(rrd_dir, type, n_source, n_binary):
+    if not rrd_dir:
+        return
+
+    rrd_file = os.path.join(rrd_dir, type.lower()+'.rrd')
+    update = [rrd_file, "N:%s:%s" % (n_source, n_binary)]
+
+    try:
+        rrdtool.update(*update)
+    except rrdtool.error:
+        create = [rrd_file]+"""
+--step
+300
+--start
+0
+DS:ds0:GAUGE:7200:0:1000
+DS:ds1:GAUGE:7200:0:1000
+RRA:AVERAGE:0.5:1:599
+RRA:AVERAGE:0.5:6:700
+RRA:AVERAGE:0.5:24:775
+RRA:AVERAGE:0.5:288:795
+RRA:MAX:0.5:1:600
+RRA:MAX:0.5:6:700
+RRA:MAX:0.5:24:775
+RRA:MAX:0.5:288:795
+""".strip().split("\n")
         try:
-            u = Upload()
-            u.load_changes(filename)
-            cache[filename] = copy(u.pkg.changes)
-            cache[filename]["filename"] = filename
-        except Exception, e:
-            print "WARNING: Exception %s" % e
-            continue
+            rc = rrdtool.create(*create)
+            ru = rrdtool.update(*update)
+        except rrdtool.error as e:
+            print('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e))
+    except NameError:
+        pass
+
+############################################################
+
+def process_queue(queue, log, rrd_dir):
+    msg = ""
+    type = queue.queue_name
+
     # Divide the .changes into per-source groups
     per_source = {}
-    for filename in cache.keys():
-        source = cache[filename]["source"]
-        if not per_source.has_key(source):
+    for upload in queue.uploads:
+        source = upload.changes.source
+        if source not in per_source:
             per_source[source] = {}
             per_source[source]["list"] = []
-        per_source[source]["list"].append(cache[filename])
+        per_source[source]["list"].append(upload)
     # Determine oldest time and have note status for each source group
     for source in per_source.keys():
         source_list = per_source[source]["list"]
         first = source_list[0]
-        oldest = os.stat(first["filename"])[stat.ST_MTIME]
+        oldest = time.mktime(first.changes.created.timetuple())
         have_note = 0
         for d in per_source[source]["list"]:
-            mtime = os.stat(d["filename"])[stat.ST_MTIME]
+            mtime = time.mktime(d.changes.created.timetuple())
             if Cnf.has_key("Queue-Report::Options::New"):
                 if mtime > oldest:
                     oldest = mtime
             else:
                 if mtime < oldest:
                     oldest = mtime
-            have_note += has_new_comment(d["source"], d["version"])
+            have_note += has_new_comment(d.changes.source, d.changes.version)
         per_source[source]["oldest"] = oldest
         if not have_note:
             per_source[source]["note_state"] = 0; # none
@@ -342,6 +407,8 @@ def process_changes_files(changes_files, type, log):
     per_source_items = per_source.items()
     per_source_items.sort(sg_compare)
 
+    update_graph_database(rrd_dir, type, len(per_source_items), len(queue.uploads))
+
     entries = []
     max_source_len = 0
     max_version_len = 0
@@ -355,29 +422,25 @@ def process_changes_files(changes_files, type, log):
         changeby = {}
         changedby=""
         sponsor=""
-        filename=i[1]["list"][0]["filename"]
+        filename=i[1]["list"][0].changes.changesname
         last_modified = time.time()-i[1]["oldest"]
-        source = i[1]["list"][0]["source"]
+        source = i[1]["list"][0].changes.source
         if len(source) > max_source_len:
             max_source_len = len(source)
-        binary_list = i[1]["list"][0]["binary"].keys()
-        binary = ', '.join(binary_list)
-        arches = {}
-        versions = {}
+        binary_list = i[1]["list"][0].binaries
+        binary = ', '.join([ b.package for b in binary_list ])
+        arches = set()
+        versions = set()
         for j in i[1]["list"]:
-            changesbase = os.path.basename(j["filename"])
-            try:
-                dbc = session.query(DBChange).filter_by(changesname=changesbase).one()
-            except Exception, e:
-                print "Can't find changes file in NEW for %s (%s)" % (changesbase, e)
-                dbc = None
+            dbc = j.changes
+            changesbase = dbc.changesname
 
             if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"):
                 try:
                     (maintainer["maintainer822"], maintainer["maintainer2047"],
                     maintainer["maintainername"], maintainer["maintaineremail"]) = \
-                    fix_maintainer (j["maintainer"])
-                except ParseMaintError, msg:
+                    fix_maintainer (dbc.maintainer)
+                except ParseMaintError as msg:
                     print "Problems while parsing maintainer address\n"
                     maintainer["maintainername"] = "Unknown"
                     maintainer["maintaineremail"] = "Unknown"
@@ -386,31 +449,30 @@ def process_changes_files(changes_files, type, log):
                 try:
                     (changeby["changedby822"], changeby["changedby2047"],
                      changeby["changedbyname"], changeby["changedbyemail"]) = \
-                     fix_maintainer (j["changed-by"])
-                except ParseMaintError, msg:
+                     fix_maintainer (dbc.changedby)
+                except ParseMaintError as msg:
                     (changeby["changedby822"], changeby["changedby2047"],
                      changeby["changedbyname"], changeby["changedbyemail"]) = \
                      ("", "", "", "")
                 changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])
 
-                distribution=j["distribution"].keys()
-                closes=j["closes"].keys()
-                if dbc:
-                    fingerprint = dbc.fingerprint
-                    sponsor_name = get_uid_from_fingerprint(fingerprint).name
-                    sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
-                    if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
-                    sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
-                        sponsor = sponsor_email
-
-            for arch in j["architecture"].keys():
-                arches[arch] = ""
-            version = j["version"]
-            versions[version] = ""
-        arches_list = arches.keys()
+                distribution=dbc.distribution.split()
+                closes=dbc.closes
+
+                fingerprint = dbc.fingerprint
+                sponsor_name = get_uid_from_fingerprint(fingerprint).name
+                sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
+                if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
+                        sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
+                    sponsor = sponsor_email
+
+            for arch in dbc.architecture.split():
+                arches.add(arch)
+            versions.add(dbc.version)
+        arches_list = list(arches)
         arches_list.sort(utils.arch_compare_sw)
         arch_list = " ".join(arches_list)
-        version_list = " ".join(versions.keys())
+        version_list = " ".join(versions)
         if len(version_list) > max_version_len:
             max_version_len = len(version_list)
         if len(arch_list) > max_arch_len:
@@ -480,7 +542,7 @@ def process_changes_files(changes_files, type, log):
                (name, mail) = changedby.split(":", 1)
                log.write("Changed-By: " + name + " <"+mail+">" + "\n")
             if sponsor:
-               log.write("Sponsored-By: " + sponsor + "\n")
+               log.write("Sponsored-By: " + "@".join(sponsor.split("@")[:2]) + "\n")
             log.write("Distribution:")
             for dist in distribution:
                log.write(" " + dist)
@@ -494,14 +556,15 @@ def process_changes_files(changes_files, type, log):
             log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
             log.write("\n")
 
+    total_count = len(queue.uploads)
+    source_count = len(per_source_items)
+
     if Cnf.has_key("Queue-Report::Options::New"):
         direction.append([5,1,"ao"])
         entries.sort(lambda x, y: sortfunc(x, y))
     # Output for a html file. First table header. then table_footer.
     # Any line between them is then a <tr> printed from subroutine table_row.
         if len(entries) > 0:
-            total_count = len(changes_files)
-            source_count = len(per_source_items)
             table_header(type.upper(), source_count, total_count)
             for entry in entries:
                 (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
@@ -517,8 +580,6 @@ def process_changes_files(changes_files, type, log):
             msg += format % (source, version_list, arch_list, note, time_pp(last_modified))
 
         if msg:
-            total_count = len(changes_files)
-            source_count = len(per_source_items)
             print type.upper()
             print "-"*len(type)
             print
@@ -526,7 +587,6 @@ def process_changes_files(changes_files, type, log):
             print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count))
             print
 
-
 ################################################################################
 
 def main():
@@ -538,41 +598,51 @@ def main():
                  ('8','822',"Queue-Report::Options::822"),
                  ('s',"sort","Queue-Report::Options::Sort", "HasArg"),
                  ('a',"age","Queue-Report::Options::Age", "HasArg"),
+                 ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"),
                  ('d',"directories","Queue-Report::Options::Directories", "HasArg")]
     for i in [ "help" ]:
         if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
             Cnf["Queue-Report::Options::%s" % (i)] = ""
 
-    apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
+    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)
 
-    Options = Cnf.SubTree("Queue-Report::Options")
+    Options = Cnf.subtree("Queue-Report::Options")
     if Options["Help"]:
         usage()
 
     if Cnf.has_key("Queue-Report::Options::New"):
         header()
 
-    # Initialize db so we can get the NEW comments
-    dbconn = DBConn()
-
-    directories = [ ]
+    queue_names = []
 
     if Cnf.has_key("Queue-Report::Options::Directories"):
         for i in Cnf["Queue-Report::Options::Directories"].split(","):
-            directories.append(i)
+            queue_names.append(i)
     elif Cnf.has_key("Queue-Report::Directories"):
-        directories = Cnf.ValueList("Queue-Report::Directories")
+        queue_names = Cnf.value_list("Queue-Report::Directories")
     else:
-        directories = [ "byhand", "new" ]
+        queue_names = [ "byhand", "new" ]
+
+    if Cnf.has_key("Queue-Report::Options::Rrd"):
+        rrd_dir = Cnf["Queue-Report::Options::Rrd"]
+    elif Cnf.has_key("Dir::Rrd"):
+        rrd_dir = Cnf["Dir::Rrd"]
+    else:
+        rrd_dir = None
 
     f = None
     if Cnf.has_key("Queue-Report::Options::822"):
         # Open the report file
         f = open(Cnf["Queue-Report::ReportLocations::822Location"], "w")
 
-    for directory in directories:
-        changes_files = glob.glob("%s/*.changes" % (Cnf["Dir::Queue::%s" % (directory)]))
-        process_changes_files(changes_files, directory, f)
+    session = DBConn().session()
+
+    for queue_name in queue_names:
+        queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).first()
+        if queue is not None:
+            process_queue(queue, f, rrd_dir)
+        else:
+            utils.warn("Cannot find queue %s" % queue_name)
 
     if Cnf.has_key("Queue-Report::Options::822"):
         f.close()