]> git.decadent.org.uk Git - dak.git/blobdiff - dak/queue_report.py
Restore correct visualization of binary-NEW uploads only
[dak.git] / dak / queue_report.py
index cc59a9f8e2e695784acc6e03b4606d0633a86c58..a61f99de768ebe37bc43d9f94ea6a76319c80250 100755 (executable)
 
 ################################################################################
 
-import copy, glob, os, stat, sys, time
+from copy import copy
+import glob, os, stat, sys, time
 import apt_pkg
-import cgi
-from daklib import queue
+try:
+    import rrdtool
+except ImportError:
+    pass
+
 from daklib import utils
+from daklib.dbconn import DBConn, DBSource, has_new_comment, PolicyQueue, \
+                          get_uid_from_fingerprint
+from daklib.textutils import fix_maintainer
 from daklib.dak_exceptions import *
 
 Cnf = None
-Upload = None
 direction = []
 row_number = 0
 
@@ -50,7 +56,7 @@ row_number = 0
 
 def usage(exit_code=0):
     print """Usage: dak queue-report
-Prints a report of packages in queue directories (usually new and byhand).
+Prints a report of packages in queues (usually new and byhand).
 
   -h, --help                show this help and exit.
   -8, --822                 writes 822 formated output to the location set in dak.conf
@@ -58,6 +64,7 @@ Prints a report of packages in queue directories (usually new and byhand).
   -s, --sort=key            sort output according to key, see below.
   -a, --age=key             if using sort by age, how should time be treated?
                             If not given a default of hours will be used.
+  -r, --rrd=key             Directory where rrd files to be updated are stored
   -d, --directories=key     A comma seperated list of queues to be scanned
 
      Sorting Keys: ao=age,   oldest first.   an=age,   newest first.
@@ -170,6 +177,25 @@ def header():
     <title>
       Debian NEW and BYHAND Packages
     </title>
+    <script type="text/javascript">
+    //<![CDATA[
+    function togglePkg() {
+        var children = document.getElementsByTagName("*");
+        for (var i = 0; i < children.length; i++) {
+            if(!children[i].hasAttribute("class"))
+                continue;
+            c = children[i].getAttribute("class").split(" ");
+            for(var j = 0; j < c.length; j++) {
+                if(c[j] == "sourceNEW") {
+                    if (children[i].style.display == '')
+                        children[i].style.display = 'none';
+                    else children[i].style.display = '';
+                }
+            }
+        }
+    }
+    //]]>
+    </script>
   </head>
   <body id="NEW">
     <div id="logo">
@@ -198,6 +224,7 @@ def header():
 
 def footer():
     print "<p class=\"timestamp\">Timestamp: %s (UTC)</p>" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
+    print "<p class=\"timestamp\">There are <a href=\"/stat.html\">graphs about the queues</a> available.</p>"
 
     print """
     <div class="footer">
@@ -217,10 +244,12 @@ def footer():
     """
 
 def table_header(type, source_count, total_count):
-    print "<h1>Summary for: %s</h1>" % (type)
+    print "<h1 class='sourceNEW'>Summary for: %s</h1>" % (type)
+    print "<h1 class='sourceNEW' style='display: none'>Summary for: binary-%s only</h1>" % (type)
     print """
+    <p class="togglepkg" onclick="togglePkg()">Click to toggle all/binary-NEW packages</p>
     <table class="NEW">
-      <caption>
+      <caption class="sourceNEW">
     """
     print "Package count in <strong>%s</strong>: <em>%s</em>&nbsp;|&nbsp; Total Package count: <em>%s</em>" % (type, source_count, total_count)
     print """
@@ -248,19 +277,31 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
     global row_number
 
     trclass = "sid"
+    session = DBConn().session()
     for dist in distribution:
         if dist == "experimental":
             trclass = "exp"
 
+    query = '''SELECT source
+               FROM source_suite
+               WHERE source = :source
+               AND suite_name IN ('unstable', 'experimental')'''
+    if not session.execute(query, {'source': source}).rowcount:
+        trclass += " sourceNEW"
+    session.commit()
+
     if row_number % 2 != 0:
         print "<tr class=\"%s even\">" % (trclass)
     else:
         print "<tr class=\"%s odd\">" % (trclass)
 
-    print "<td class=\"package\">%s</td>" % (source)
+    if "sourceNEW" in trclass:
+        print "<td class=\"package\">%s</td>" % (source)
+    else:
+        print "<td class=\"package\"><a href=\"http://packages.qa.debian.org/%(source)s\">%(source)s</a></td>" % {'source': source}
     print "<td class=\"version\">"
     for vers in version.split():
-        print "<a href=\"/new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
+        print "<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
     print "</td>"
     print "<td class=\"arch\">%s</td>" % (arch)
     print "<td class=\"distribution\">"
@@ -275,11 +316,12 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
     (name, mail) = changedby.split(":", 1)
     print "<span class=\"changed-by\">Changed-By: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name))
 
-    try:
-        (login, domain) = sponsor.split("@", 1)
-        print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span>@debian.org<br/>" % (utils.html_escape(login), utils.html_escape(login))
-    except:
-        pass
+    if sponsor:
+        try:
+            (login, domain) = sponsor.split("@", 1)
+            print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(login), utils.html_escape(login))
+        except Exception as e:
+            pass
 
     print "<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint)
     print "</td>"
@@ -292,42 +334,69 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes, fing
 
 ############################################################
 
-def process_changes_files(changes_files, type, log):
-    msg = ""
-    cache = {}
-    # Read in all the .changes files
-    for filename in changes_files:
+def update_graph_database(rrd_dir, type, n_source, n_binary):
+    if not rrd_dir:
+        return
+
+    rrd_file = os.path.join(rrd_dir, type.lower()+'.rrd')
+    update = [rrd_file, "N:%s:%s" % (n_source, n_binary)]
+
+    try:
+        rrdtool.update(*update)
+    except rrdtool.error:
+        create = [rrd_file]+"""
+--step
+300
+--start
+0
+DS:ds0:GAUGE:7200:0:1000
+DS:ds1:GAUGE:7200:0:1000
+RRA:AVERAGE:0.5:1:599
+RRA:AVERAGE:0.5:6:700
+RRA:AVERAGE:0.5:24:775
+RRA:AVERAGE:0.5:288:795
+RRA:MAX:0.5:1:600
+RRA:MAX:0.5:6:700
+RRA:MAX:0.5:24:775
+RRA:MAX:0.5:288:795
+""".strip().split("\n")
         try:
-            Upload.pkg.changes_file = filename
-            Upload.init_vars()
-            Upload.update_vars()
-            cache[filename] = copy.copy(Upload.pkg.changes)
-            cache[filename]["filename"] = filename
-        except:
-            break
+            rc = rrdtool.create(*create)
+            ru = rrdtool.update(*update)
+        except rrdtool.error as e:
+            print('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e))
+    except NameError:
+        pass
+
+############################################################
+
+def process_queue(queue, log, rrd_dir):
+    msg = ""
+    type = queue.queue_name
+
     # Divide the .changes into per-source groups
     per_source = {}
-    for filename in cache.keys():
-        source = cache[filename]["source"]
-        if not per_source.has_key(source):
+    for upload in queue.uploads:
+        source = upload.changes.source
+        if source not in per_source:
             per_source[source] = {}
             per_source[source]["list"] = []
-        per_source[source]["list"].append(cache[filename])
+        per_source[source]["list"].append(upload)
     # Determine oldest time and have note status for each source group
     for source in per_source.keys():
         source_list = per_source[source]["list"]
         first = source_list[0]
-        oldest = os.stat(first["filename"])[stat.ST_MTIME]
+        oldest = time.mktime(first.changes.created.timetuple())
         have_note = 0
         for d in per_source[source]["list"]:
-            mtime = os.stat(d["filename"])[stat.ST_MTIME]
+            mtime = time.mktime(d.changes.created.timetuple())
             if Cnf.has_key("Queue-Report::Options::New"):
                 if mtime > oldest:
                     oldest = mtime
             else:
                 if mtime < oldest:
                     oldest = mtime
-            have_note += (d.has_key("process-new note"))
+            have_note += has_new_comment(d.changes.source, d.changes.version)
         per_source[source]["oldest"] = oldest
         if not have_note:
             per_source[source]["note_state"] = 0; # none
@@ -338,6 +407,8 @@ def process_changes_files(changes_files, type, log):
     per_source_items = per_source.items()
     per_source_items.sort(sg_compare)
 
+    update_graph_database(rrd_dir, type, len(per_source_items), len(queue.uploads))
+
     entries = []
     max_source_len = 0
     max_version_len = 0
@@ -351,20 +422,25 @@ def process_changes_files(changes_files, type, log):
         changeby = {}
         changedby=""
         sponsor=""
-        filename=i[1]["list"][0]["filename"]
+        filename=i[1]["list"][0].changes.changesname
         last_modified = time.time()-i[1]["oldest"]
-        source = i[1]["list"][0]["source"]
+        source = i[1]["list"][0].changes.source
         if len(source) > max_source_len:
             max_source_len = len(source)
-        arches = {}
-        versions = {}
+        binary_list = i[1]["list"][0].binaries
+        binary = ', '.join([ b.package for b in binary_list ])
+        arches = set()
+        versions = set()
         for j in i[1]["list"]:
+            dbc = j.changes
+            changesbase = dbc.changesname
+
             if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"):
                 try:
                     (maintainer["maintainer822"], maintainer["maintainer2047"],
                     maintainer["maintainername"], maintainer["maintaineremail"]) = \
-                    utils.fix_maintainer (j["maintainer"])
-                except ParseMaintError, msg:
+                    fix_maintainer (dbc.maintainer)
+                except ParseMaintError as msg:
                     print "Problems while parsing maintainer address\n"
                     maintainer["maintainername"] = "Unknown"
                     maintainer["maintaineremail"] = "Unknown"
@@ -373,26 +449,30 @@ def process_changes_files(changes_files, type, log):
                 try:
                     (changeby["changedby822"], changeby["changedby2047"],
                      changeby["changedbyname"], changeby["changedbyemail"]) = \
-                     utils.fix_maintainer (j["changed-by"])
-                except ParseMaintError, msg:
+                     fix_maintainer (dbc.changedby)
+                except ParseMaintError as msg:
                     (changeby["changedby822"], changeby["changedby2047"],
                      changeby["changedbyname"], changeby["changedbyemail"]) = \
                      ("", "", "", "")
                 changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])
 
-                distribution=j["distribution"].keys()
-                closes=j["closes"].keys()
-                fingerprint=j["fingerprint"]
-                if j.has_key("sponsoremail"):
-                    sponsor=j["sponsoremail"]
-            for arch in j["architecture"].keys():
-                arches[arch] = ""
-            version = j["version"]
-            versions[version] = ""
-        arches_list = arches.keys()
+                distribution=dbc.distribution.split()
+                closes=dbc.closes
+
+                fingerprint = dbc.fingerprint
+                sponsor_name = get_uid_from_fingerprint(fingerprint).name
+                sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
+                if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
+                        sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
+                    sponsor = sponsor_email
+
+            for arch in dbc.architecture.split():
+                arches.add(arch)
+            versions.add(dbc.version)
+        arches_list = list(arches)
         arches_list.sort(utils.arch_compare_sw)
         arch_list = " ".join(arches_list)
-        version_list = " ".join(versions.keys())
+        version_list = " ".join(versions)
         if len(version_list) > max_version_len:
             max_version_len = len(version_list)
         if len(arch_list) > max_arch_len:
@@ -401,7 +481,7 @@ def process_changes_files(changes_files, type, log):
             note = " | [N]"
         else:
             note = ""
-        entries.append([source, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])
+        entries.append([source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])
 
     # direction entry consists of "Which field, which direction, time-consider" where
     # time-consider says how we should treat last_modified. Thats all.
@@ -412,16 +492,16 @@ def process_changes_files(changes_files, type, log):
         age =  Cnf["Queue-Report::Options::Age"]
     if Cnf.has_key("Queue-Report::Options::New"):
     # If we produce html we always have oldest first.
-        direction.append([4,-1,"ao"])
+        direction.append([5,-1,"ao"])
     else:
         if Cnf.has_key("Queue-Report::Options::Sort"):
             for i in Cnf["Queue-Report::Options::Sort"].split(","):
                 if i == "ao":
                     # Age, oldest first.
-                    direction.append([4,-1,age])
+                    direction.append([5,-1,age])
                 elif i == "an":
                     # Age, newest first.
-                    direction.append([4,1,age])
+                    direction.append([5,1,age])
                 elif i == "na":
                     # Name, Ascending.
                     direction.append([0,1,0])
@@ -430,10 +510,10 @@ def process_changes_files(changes_files, type, log):
                     direction.append([0,-1,0])
                 elif i == "nl":
                     # Notes last.
-                    direction.append([3,1,0])
+                    direction.append([4,1,0])
                 elif i == "nf":
                     # Notes first.
-                    direction.append([3,-1,0])
+                    direction.append([4,-1,0])
     entries.sort(lambda x, y: sortfunc(x, y))
     # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
     # at the moment to come up with a real good sorting function that considers all the sidesteps you
@@ -443,11 +523,12 @@ def process_changes_files(changes_files, type, log):
     if Cnf.has_key("Queue-Report::Options::822"):
         # print stuff out in 822 format
         for entry in entries:
-            (source, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry
+            (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry
 
             # We'll always have Source, Version, Arch, Mantainer, and Dist
             # For the rest, check to see if we have them, then print them out
             log.write("Source: " + source + "\n")
+            log.write("Binary: " + binary + "\n")
             log.write("Version: " + version_list + "\n")
             log.write("Architectures: ")
             log.write( (", ".join(arch_list.split(" "))) + "\n")
@@ -461,7 +542,7 @@ def process_changes_files(changes_files, type, log):
                (name, mail) = changedby.split(":", 1)
                log.write("Changed-By: " + name + " <"+mail+">" + "\n")
             if sponsor:
-               log.write("Sponsored-By: " + sponsor + "\n")
+               log.write("Sponsored-By: " + "@".join(sponsor.split("@")[:2]) + "\n")
             log.write("Distribution:")
             for dist in distribution:
                log.write(" " + dist)
@@ -475,17 +556,18 @@ def process_changes_files(changes_files, type, log):
             log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
             log.write("\n")
 
+    total_count = len(queue.uploads)
+    source_count = len(per_source_items)
+
     if Cnf.has_key("Queue-Report::Options::New"):
-        direction.append([4,1,"ao"])
+        direction.append([5,1,"ao"])
         entries.sort(lambda x, y: sortfunc(x, y))
     # Output for a html file. First table header. then table_footer.
     # Any line between them is then a <tr> printed from subroutine table_row.
         if len(entries) > 0:
-            total_count = len(changes_files)
-            source_count = len(per_source_items)
             table_header(type.upper(), source_count, total_count)
             for entry in entries:
-                (source, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
+                (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
                 table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes, fingerprint, sponsor, changedby)
             table_footer(type.upper())
     elif not Cnf.has_key("Queue-Report::Options::822"):
@@ -494,12 +576,10 @@ def process_changes_files(changes_files, type, log):
 
         msg = ""
         for entry in entries:
-            (source, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
+            (source, binary, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
             msg += format % (source, version_list, arch_list, note, time_pp(last_modified))
 
         if msg:
-            total_count = len(changes_files)
-            source_count = len(per_source_items)
             print type.upper()
             print "-"*len(type)
             print
@@ -507,11 +587,10 @@ def process_changes_files(changes_files, type, log):
             print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count))
             print
 
-
 ################################################################################
 
 def main():
-    global Cnf, Upload
+    global Cnf
 
     Cnf = utils.get_conf()
     Arguments = [('h',"help","Queue-Report::Options::Help"),
@@ -519,40 +598,51 @@ def main():
                  ('8','822',"Queue-Report::Options::822"),
                  ('s',"sort","Queue-Report::Options::Sort", "HasArg"),
                  ('a',"age","Queue-Report::Options::Age", "HasArg"),
+                 ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"),
                  ('d',"directories","Queue-Report::Options::Directories", "HasArg")]
     for i in [ "help" ]:
         if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
             Cnf["Queue-Report::Options::%s" % (i)] = ""
 
-    apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
+    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)
 
-    Options = Cnf.SubTree("Queue-Report::Options")
+    Options = Cnf.subtree("Queue-Report::Options")
     if Options["Help"]:
         usage()
 
-    Upload = queue.Upload(Cnf)
-
     if Cnf.has_key("Queue-Report::Options::New"):
         header()
 
-    directories = [ ]
+    queue_names = []
 
     if Cnf.has_key("Queue-Report::Options::Directories"):
         for i in Cnf["Queue-Report::Options::Directories"].split(","):
-            directories.append(i)
+            queue_names.append(i)
     elif Cnf.has_key("Queue-Report::Directories"):
-        directories = Cnf.ValueList("Queue-Report::Directories")
+        queue_names = Cnf.value_list("Queue-Report::Directories")
+    else:
+        queue_names = [ "byhand", "new" ]
+
+    if Cnf.has_key("Queue-Report::Options::Rrd"):
+        rrd_dir = Cnf["Queue-Report::Options::Rrd"]
+    elif Cnf.has_key("Dir::Rrd"):
+        rrd_dir = Cnf["Dir::Rrd"]
     else:
-        directories = [ "byhand", "new" ]
+        rrd_dir = None
 
     f = None
     if Cnf.has_key("Queue-Report::Options::822"):
         # Open the report file
         f = open(Cnf["Queue-Report::ReportLocations::822Location"], "w")
 
-    for directory in directories:
-        changes_files = glob.glob("%s/*.changes" % (Cnf["Dir::Queue::%s" % (directory)]))
-        process_changes_files(changes_files, directory, f)
+    session = DBConn().session()
+
+    for queue_name in queue_names:
+        queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).first()
+        if queue is not None:
+            process_queue(queue, f, rrd_dir)
+        else:
+            utils.warn("Cannot find queue %s" % queue_name)
 
     if Cnf.has_key("Queue-Report::Options::822"):
         f.close()