from copy import copy
import glob, os, stat, sys, time
import apt_pkg
+try:
+ import rrdtool
+except ImportError:
+ pass
from daklib import utils
-from daklib.queue import Upload
-from daklib.dbconn import DBConn, has_new_comment, DBChange
+from daklib.dbconn import DBConn, DBSource, has_new_comment, PolicyQueue, \
+ get_uid_from_fingerprint
from daklib.textutils import fix_maintainer
from daklib.dak_exceptions import *
def usage(exit_code=0):
print """Usage: dak queue-report
-Prints a report of packages in queue directories (usually new and byhand).
+Prints a report of packages in queues (usually new and byhand).
-h, --help show this help and exit.
-8, --822 writes 822 formated output to the location set in dak.conf
-s, --sort=key sort output according to key, see below.
-a, --age=key if using sort by age, how should time be treated?
If not given a default of hours will be used.
+ -r, --rrd=key Directory where rrd files to be updated are stored
-d, --directories=key A comma seperated list of queues to be scanned
Sorting Keys: ao=age, oldest first. an=age, newest first.
<title>
Debian NEW and BYHAND Packages
</title>
+ <script type="text/javascript">
+ //<![CDATA[
+ function togglePkg() {
+ var children = document.getElementsByTagName("*");
+ for (var i = 0; i < children.length; i++) {
+ if(!children[i].hasAttribute("class"))
+ continue;
+ c = children[i].getAttribute("class").split(" ");
+ for(var j = 0; j < c.length; j++) {
+ if(c[j] == "binNEW") {
+ if (children[i].style.display == '')
+ children[i].style.display = 'none';
+ else children[i].style.display = '';
+ }
+ }
+ }
+ }
+ //]]>
+ </script>
</head>
<body id="NEW">
<div id="logo">
def footer():
print "<p class=\"timestamp\">Timestamp: %s (UTC)</p>" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
+ print "<p class=\"timestamp\">There are <a href=\"/stat.html\">graphs about the queues</a> available.</p>"
print """
<div class="footer">
"""
def table_header(type, source_count, total_count):
- print "<h1>Summary for: %s</h1>" % (type)
+ print "<h1 class='binNEW'>Summary for: %s</h1>" % (type)
+ print "<h1 class='binNEW' style='display: none'>Summary for: binary-%s only</h1>" % (type)
print """
+ <p class="togglepkg" onclick="togglePkg()">Click to toggle all/binary-NEW packages</p>
<table class="NEW">
- <caption>
+ <caption class="binNEW">
"""
print "Package count in <strong>%s</strong>: <em>%s</em> | Total Package count: <em>%s</em>" % (type, source_count, total_count)
print """
global row_number
trclass = "sid"
+ session = DBConn().session()
for dist in distribution:
if dist == "experimental":
trclass = "exp"
+ if not len(session.query(DBSource).filter_by(source = source).all()):
+ trclass += " binNEW"
+ session.commit()
+
if row_number % 2 != 0:
print "<tr class=\"%s even\">" % (trclass)
else:
print "<tr class=\"%s odd\">" % (trclass)
- print "<td class=\"package\">%s</td>" % (source)
+ if "binNEW" in trclass:
+ print "<td class=\"package\">%s</td>" % (source)
+ else:
+ print "<td class=\"package\"><a href=\"http://packages.qa.debian.org/%(source)s\">%(source)s</a></td>" % {'source': source}
print "<td class=\"version\">"
for vers in version.split():
- print "<a href=\"/new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
+ print "<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
print "</td>"
print "<td class=\"arch\">%s</td>" % (arch)
print "<td class=\"distribution\">"
if sponsor:
try:
(login, domain) = sponsor.split("@", 1)
- print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span>@debian.org<br/>" % (utils.html_escape(login), utils.html_escape(login))
- except Exception, e:
+ print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(login), utils.html_escape(login))
+ except Exception as e:
pass
print "<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint)
############################################################
-def process_changes_files(changes_files, type, log):
- session = DBConn().session()
- msg = ""
- cache = {}
- # Read in all the .changes files
- for filename in changes_files:
+def update_graph_database(rrd_dir, type, n_source, n_binary):
+ if not rrd_dir:
+ return
+
+ rrd_file = os.path.join(rrd_dir, type.lower()+'.rrd')
+ update = [rrd_file, "N:%s:%s" % (n_source, n_binary)]
+
+ try:
+ rrdtool.update(*update)
+ except rrdtool.error:
+ create = [rrd_file]+"""
+--step
+300
+--start
+0
+DS:ds0:GAUGE:7200:0:1000
+DS:ds1:GAUGE:7200:0:1000
+RRA:AVERAGE:0.5:1:599
+RRA:AVERAGE:0.5:6:700
+RRA:AVERAGE:0.5:24:775
+RRA:AVERAGE:0.5:288:795
+RRA:MAX:0.5:1:600
+RRA:MAX:0.5:6:700
+RRA:MAX:0.5:24:775
+RRA:MAX:0.5:288:795
+""".strip().split("\n")
try:
- u = Upload()
- u.load_changes(filename)
- cache[filename] = copy(u.pkg.changes)
- cache[filename]["filename"] = filename
- except Exception, e:
- print "WARNING: Exception %s" % e
- continue
+ rc = rrdtool.create(*create)
+ ru = rrdtool.update(*update)
+ except rrdtool.error as e:
+ print('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e))
+ except NameError:
+ pass
+
+############################################################
+
+def process_queue(queue, log, rrd_dir):
+ msg = ""
+ type = queue.queue_name
+
# Divide the .changes into per-source groups
per_source = {}
- for filename in cache.keys():
- source = cache[filename]["source"]
- if not per_source.has_key(source):
+ for upload in queue.uploads:
+ source = upload.changes.source
+ if source not in per_source:
per_source[source] = {}
per_source[source]["list"] = []
- per_source[source]["list"].append(cache[filename])
+ per_source[source]["list"].append(upload)
# Determine oldest time and have note status for each source group
for source in per_source.keys():
source_list = per_source[source]["list"]
first = source_list[0]
- oldest = os.stat(first["filename"])[stat.ST_MTIME]
+ oldest = time.mktime(first.changes.created.timetuple())
have_note = 0
for d in per_source[source]["list"]:
- mtime = os.stat(d["filename"])[stat.ST_MTIME]
+ mtime = time.mktime(d.changes.created.timetuple())
if Cnf.has_key("Queue-Report::Options::New"):
if mtime > oldest:
oldest = mtime
else:
if mtime < oldest:
oldest = mtime
- have_note += has_new_comment(d["source"], d["version"])
+ have_note += has_new_comment(d.changes.source, d.changes.version)
per_source[source]["oldest"] = oldest
if not have_note:
per_source[source]["note_state"] = 0; # none
per_source_items = per_source.items()
per_source_items.sort(sg_compare)
+ update_graph_database(rrd_dir, type, len(per_source_items), len(queue.uploads))
+
entries = []
max_source_len = 0
max_version_len = 0
changeby = {}
changedby=""
sponsor=""
- filename=i[1]["list"][0]["filename"]
+ filename=i[1]["list"][0].changes.changesname
last_modified = time.time()-i[1]["oldest"]
- source = i[1]["list"][0]["source"]
+ source = i[1]["list"][0].changes.source
if len(source) > max_source_len:
max_source_len = len(source)
- arches = {}
- versions = {}
+ binary_list = i[1]["list"][0].binaries
+ binary = ', '.join([ b.package for b in binary_list ])
+ arches = set()
+ versions = set()
for j in i[1]["list"]:
- changesbase = os.path.basename(j["filename"])
- try:
- dbc = session.query(DBChange).filter_by(changesname=changesbase).one()
- except Exception, e:
- print "Can't find changes file in NEW for %s (%s)" % (changesbase, e)
- dbc = None
+ dbc = j.changes
+ changesbase = dbc.changesname
if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"):
try:
(maintainer["maintainer822"], maintainer["maintainer2047"],
maintainer["maintainername"], maintainer["maintaineremail"]) = \
- fix_maintainer (j["maintainer"])
- except ParseMaintError, msg:
+ fix_maintainer (dbc.maintainer)
+ except ParseMaintError as msg:
print "Problems while parsing maintainer address\n"
maintainer["maintainername"] = "Unknown"
maintainer["maintaineremail"] = "Unknown"
try:
(changeby["changedby822"], changeby["changedby2047"],
changeby["changedbyname"], changeby["changedbyemail"]) = \
- fix_maintainer (j["changed-by"])
- except ParseMaintError, msg:
+ fix_maintainer (dbc.changedby)
+ except ParseMaintError as msg:
(changeby["changedby822"], changeby["changedby2047"],
changeby["changedbyname"], changeby["changedbyemail"]) = \
("", "", "", "")
changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])
- distribution=j["distribution"].keys()
- closes=j["closes"].keys()
- if dbc:
- fingerprint = dbc.fingerprint
-
- # TODO: This won't work now as it never gets set
- # Fix so that we compare the changed-by/maintainer and the signing key
- # Should probably be done somewhere more central
- #if j.has_key("sponsoremail"):
- # sponsor=j["sponsoremail"]
-
- for arch in j["architecture"].keys():
- arches[arch] = ""
- version = j["version"]
- versions[version] = ""
- arches_list = arches.keys()
+ distribution=dbc.distribution.split()
+ closes=dbc.closes
+
+ fingerprint = dbc.fingerprint
+ sponsor_name = get_uid_from_fingerprint(fingerprint).name
+ sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
+ if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
+ sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
+ sponsor = sponsor_email
+
+ for arch in dbc.architecture.split():
+ arches.add(arch)
+ versions.add(dbc.version)
+ arches_list = list(arches)
arches_list.sort(utils.arch_compare_sw)
arch_list = " ".join(arches_list)
- version_list = " ".join(versions.keys())
+ version_list = " ".join(versions)
if len(version_list) > max_version_len:
max_version_len = len(version_list)
if len(arch_list) > max_arch_len:
note = " | [N]"
else:
note = ""
- entries.append([source, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])
+ entries.append([source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])
# direction entry consists of "Which field, which direction, time-consider" where
# time-consider says how we should treat last_modified. Thats all.
age = Cnf["Queue-Report::Options::Age"]
if Cnf.has_key("Queue-Report::Options::New"):
# If we produce html we always have oldest first.
- direction.append([4,-1,"ao"])
+ direction.append([5,-1,"ao"])
else:
if Cnf.has_key("Queue-Report::Options::Sort"):
for i in Cnf["Queue-Report::Options::Sort"].split(","):
if i == "ao":
# Age, oldest first.
- direction.append([4,-1,age])
+ direction.append([5,-1,age])
elif i == "an":
# Age, newest first.
- direction.append([4,1,age])
+ direction.append([5,1,age])
elif i == "na":
# Name, Ascending.
direction.append([0,1,0])
direction.append([0,-1,0])
elif i == "nl":
# Notes last.
- direction.append([3,1,0])
+ direction.append([4,1,0])
elif i == "nf":
# Notes first.
- direction.append([3,-1,0])
+ direction.append([4,-1,0])
entries.sort(lambda x, y: sortfunc(x, y))
# Yes, in theory you can add several sort options at the commandline with. But my mind is to small
# at the moment to come up with a real good sorting function that considers all the sidesteps you
if Cnf.has_key("Queue-Report::Options::822"):
# print stuff out in 822 format
for entry in entries:
- (source, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry
+ (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry
# We'll always have Source, Version, Arch, Mantainer, and Dist
# For the rest, check to see if we have them, then print them out
log.write("Source: " + source + "\n")
+ log.write("Binary: " + binary + "\n")
log.write("Version: " + version_list + "\n")
log.write("Architectures: ")
log.write( (", ".join(arch_list.split(" "))) + "\n")
(name, mail) = changedby.split(":", 1)
log.write("Changed-By: " + name + " <"+mail+">" + "\n")
if sponsor:
- log.write("Sponsored-By: " + sponsor + "\n")
+ log.write("Sponsored-By: " + "@".join(sponsor.split("@")[:2]) + "\n")
log.write("Distribution:")
for dist in distribution:
log.write(" " + dist)
log.write("\n")
if Cnf.has_key("Queue-Report::Options::New"):
- direction.append([4,1,"ao"])
+ direction.append([5,1,"ao"])
entries.sort(lambda x, y: sortfunc(x, y))
# Output for a html file. First table header. then table_footer.
# Any line between them is then a <tr> printed from subroutine table_row.
source_count = len(per_source_items)
table_header(type.upper(), source_count, total_count)
for entry in entries:
- (source, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
+ (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes, fingerprint, sponsor, changedby)
table_footer(type.upper())
elif not Cnf.has_key("Queue-Report::Options::822"):
msg = ""
for entry in entries:
- (source, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
+ (source, binary, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
msg += format % (source, version_list, arch_list, note, time_pp(last_modified))
if msg:
- total_count = len(changes_files)
+ total_count = len(queue.uploads)
source_count = len(per_source_items)
print type.upper()
print "-"*len(type)
print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count))
print
-
################################################################################
def main():
('8','822',"Queue-Report::Options::822"),
('s',"sort","Queue-Report::Options::Sort", "HasArg"),
('a',"age","Queue-Report::Options::Age", "HasArg"),
+ ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"),
('d',"directories","Queue-Report::Options::Directories", "HasArg")]
for i in [ "help" ]:
if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
Cnf["Queue-Report::Options::%s" % (i)] = ""
- apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
+ apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)
- Options = Cnf.SubTree("Queue-Report::Options")
+ Options = Cnf.subtree("Queue-Report::Options")
if Options["Help"]:
usage()
if Cnf.has_key("Queue-Report::Options::New"):
header()
- # Initialize db so we can get the NEW comments
- dbconn = DBConn()
-
- directories = [ ]
+ queue_names = []
if Cnf.has_key("Queue-Report::Options::Directories"):
for i in Cnf["Queue-Report::Options::Directories"].split(","):
- directories.append(i)
+ queue_names.append(i)
elif Cnf.has_key("Queue-Report::Directories"):
- directories = Cnf.ValueList("Queue-Report::Directories")
+ queue_names = Cnf.value_list("Queue-Report::Directories")
else:
- directories = [ "byhand", "new" ]
+ queue_names = [ "byhand", "new" ]
+
+ if Cnf.has_key("Queue-Report::Options::Rrd"):
+ rrd_dir = Cnf["Queue-Report::Options::Rrd"]
+ elif Cnf.has_key("Dir::Rrd"):
+ rrd_dir = Cnf["Dir::Rrd"]
+ else:
+ rrd_dir = None
f = None
if Cnf.has_key("Queue-Report::Options::822"):
# Open the report file
f = open(Cnf["Queue-Report::ReportLocations::822Location"], "w")
- for directory in directories:
- changes_files = glob.glob("%s/*.changes" % (Cnf["Dir::Queue::%s" % (directory)]))
- process_changes_files(changes_files, directory, f)
+ session = DBConn().session()
+
+ for queue_name in queue_names:
+ queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).first()
+ if queue is not None:
+ process_queue(queue, f, rrd_dir)
+ else:
+ utils.warn("Cannot find queue %s" % queue_name)
if Cnf.has_key("Queue-Report::Options::822"):
f.close()