X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fqueue_report.py;h=a4b997af90e17e26cf242057c3a658d33437e281;hb=5a61a05250b1bf5d54661103e8999eb7c7fc9207;hp=ff3e082bbf73bb0eef76017b55697d410b8bbc97;hpb=ea17738cea735d71766bfc2bd082f59b3adf2dbb;p=dak.git diff --git a/dak/queue_report.py b/dak/queue_report.py index ff3e082b..a4b997af 100755 --- a/dak/queue_report.py +++ b/dak/queue_report.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Produces a report on NEW and BYHAND packages +""" Produces a report on NEW and BYHAND packages """ # Copyright (C) 2001, 2002, 2003, 2005, 2006 James Troup # This program is free software; you can redistribute it and/or modify @@ -34,13 +34,23 @@ ################################################################################ -import copy, glob, os, stat, sys, time +from copy import copy +import glob, os, stat, sys, time import apt_pkg -import daklib.queue -import daklib.utils +try: + import rrdtool +except ImportError: + pass + +from daklib import utils +from daklib.dbconn import DBConn, DBSource, has_new_comment, PolicyQueue, \ + get_uid_from_fingerprint +from daklib.policy import PolicyQueueUploadHandler +from daklib.textutils import fix_maintainer +from daklib.utils import get_logins_from_ldap +from daklib.dak_exceptions import * Cnf = None -Upload = None direction = [] row_number = 0 @@ -48,20 +58,23 @@ row_number = 0 def usage(exit_code=0): print """Usage: dak queue-report -Prints a report of packages in queue directories (usually new and byhand). +Prints a report of packages in queues (usually new and byhand). -h, --help show this help and exit. + -8, --822 writes 822 formated output to the location set in dak.conf -n, --new produce html-output -s, --sort=key sort output according to key, see below. -a, --age=key if using sort by age, how should time be treated? If not given a default of hours will be used. + -r, --rrd=key Directory where rrd files to be updated are stored + -d, --directories=key A comma seperated list of queues to be scanned Sorting Keys: ao=age, oldest first. an=age, newest first. na=name, ascending nd=name, descending nf=notes, first nl=notes, last Age Keys: m=minutes, h=hours, d=days, w=weeks, o=months, y=years - + """ sys.exit(exit_code) @@ -104,7 +117,15 @@ def time_pp(x): def sg_compare (a, b): a = a[1] b = b[1] - """Sort by have note, time of oldest upload.""" + """Sort by have pending action, have note, time of oldest upload.""" + # Sort by have pending action + a_note_state = a["processed"] + b_note_state = b["processed"] + if a_note_state < b_note_state: + return -1 + elif a_note_state > b_note_state: + return 1 + # Sort by have note a_note_state = a["note_state"] b_note_state = b["note_state"] @@ -119,178 +140,269 @@ def sg_compare (a, b): ############################################################ def sortfunc(a,b): - for sorting in direction: - (sortkey, way, time) = sorting - ret = 0 - if time == "m": - x=int(a[sortkey]/60) - y=int(b[sortkey]/60) - elif time == "h": - x=int(a[sortkey]/3600) - y=int(b[sortkey]/3600) - elif time == "d": - x=int(a[sortkey]/86400) - y=int(b[sortkey]/86400) - elif time == "w": - x=int(a[sortkey]/604800) - y=int(b[sortkey]/604800) - elif time == "o": - x=int(a[sortkey]/2419200) - y=int(b[sortkey]/2419200) - elif time == "y": - x=int(a[sortkey]/29030400) - y=int(b[sortkey]/29030400) - else: - x=a[sortkey] - y=b[sortkey] - if x < y: - ret = -1 - elif x > y: - ret = 1 - if ret != 0: - if way < 0: - ret = ret*-1 - return ret - return 0 + for sorting in direction: + (sortkey, way, time) = sorting + ret = 0 + if time == "m": + x=int(a[sortkey]/60) + y=int(b[sortkey]/60) + elif time == "h": + x=int(a[sortkey]/3600) + y=int(b[sortkey]/3600) + elif time == "d": + x=int(a[sortkey]/86400) + y=int(b[sortkey]/86400) + elif time == "w": + x=int(a[sortkey]/604800) + y=int(b[sortkey]/604800) + elif time == "o": + x=int(a[sortkey]/2419200) + y=int(b[sortkey]/2419200) + elif time == "y": + x=int(a[sortkey]/29030400) + y=int(b[sortkey]/29030400) + else: + x=a[sortkey] + y=b[sortkey] + if x < y: + ret = -1 + elif x > y: + ret = 1 + if ret != 0: + if way < 0: + ret = ret*-1 + return ret + return 0 ############################################################ def header(): - print """ - - Debian NEW and BYHAND Packages - - - - -
- - - - Debian Project -
-
- - - - - - - - - - -
- Debian NEW and BYHAND Packages -
- -
- """ + print """ + + + + + + + Debian NEW and BYHAND Packages + + + + + +
+ + corner image + corner image + corner image + corner image + + Debian NEW and BYHAND Packages + +
+ """ def footer(): - print "

Timestamp: %s (UTC)

" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime())) - print "

Hint: Age is the youngest upload of the package, if there is more than one version.

" - print "

You may want to look at the REJECT-FAQ for possible reasons why one of the above packages may get rejected.

" - print """ - Valid HTML 4.01! - - Valid CSS! + print "

Timestamp: %s (UTC)

" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime())) + print "

There are graphs about the queues available.

" + + print """ + + """ + +def table_header(type, source_count, total_count): + print "

Summary for: %s

" % (type) + print "

Summary for: binary-%s only

" % (type) + print """ +

Click to toggle all/binary-NEW packages

+ + + + + + + + + + + + + + """ - print "" - -def table_header(type): - print "

Summary for: %s

" % (type) - print """
+ """ + print "Package count in %s: %s |  Total Package count: %s" % (type, source_count, total_count) + print """ +
PackageVersionArchDistributionAgeUpload infoCloses
- - - - - - - - - - """ - -def table_footer(type, source_count, total_count): - print "
PackageVersionArchDistributionAgeMaintainerCloses

\n" - print "

Package count in %s: %s\n" % (type, source_count) - print "
Total Package count: %s

\n" % (total_count) - - -def table_row(source, version, arch, last_mod, maint, distribution, closes): + +def table_footer(type): + print "" + + +def table_row(source, version, arch, last_mod, maint, distribution, closes, fingerprint, sponsor, changedby): global row_number + trclass = "sid" + session = DBConn().session() + for dist in distribution: + if dist == "experimental": + trclass = "exp" + + query = '''SELECT source + FROM source_suite + WHERE source = :source + AND suite_name IN ('unstable', 'experimental')''' + if not session.execute(query, {'source': source}).rowcount: + trclass += " sourceNEW" + session.commit() + if row_number % 2 != 0: - print "" + print "" % (trclass) else: - print "" + print "" % (trclass) - tdclass = "sid" - for dist in distribution: - if dist == "experimental": - tdclass = "exp" - print "%s" % (tdclass, source) - print "" % (tdclass) + if "sourceNEW" in trclass: + print "%s" % (source) + else: + print "%(source)s" % {'source': source} + print "" for vers in version.split(): - print "%s
" % (source, vers, vers) - print "%s" % (tdclass, arch, tdclass) + print "%s
" % (source, utils.html_escape(vers), utils.html_escape(vers)) + print "" + print "%s" % (arch) + print "" for dist in distribution: - print "%s
" % (dist) - print "%s" % (tdclass, last_mod) - (name, mail) = maint.split(":") + print "%s
" % (dist) + print "" + print "%s" % (last_mod) + (name, mail) = maint.split(":", 1) + + print "" + print "Maintainer: %s
" % (utils.html_escape(mail), utils.html_escape(name)) + (name, mail) = changedby.split(":", 1) + print "Changed-By: %s
" % (utils.html_escape(mail), utils.html_escape(name)) + + if sponsor: + print "Sponsor: %s@debian.org
" % (utils.html_escape(sponsor), utils.html_escape(sponsor)) + + print "Fingerprint: %s" % (fingerprint) + print "" - print "%s" % (tdclass, mail, name) - print "" % (tdclass) + print "" for close in closes: - print "#%s
" % (close, close) + print "#%s
" % (utils.html_escape(close), utils.html_escape(close)) print "" row_number+=1 - + ############################################################ -def process_changes_files(changes_files, type): - msg = "" - cache = {} - # Read in all the .changes files - for filename in changes_files: +def update_graph_database(rrd_dir, type, n_source, n_binary): + if not rrd_dir: + return + + rrd_file = os.path.join(rrd_dir, type.lower()+'.rrd') + update = [rrd_file, "N:%s:%s" % (n_source, n_binary)] + + try: + rrdtool.update(*update) + except rrdtool.error: + create = [rrd_file]+""" +--step +300 +--start +0 +DS:ds0:GAUGE:7200:0:1000 +DS:ds1:GAUGE:7200:0:1000 +RRA:AVERAGE:0.5:1:599 +RRA:AVERAGE:0.5:6:700 +RRA:AVERAGE:0.5:24:775 +RRA:AVERAGE:0.5:288:795 +RRA:MAX:0.5:1:600 +RRA:MAX:0.5:6:700 +RRA:MAX:0.5:24:775 +RRA:MAX:0.5:288:795 +""".strip().split("\n") try: - Upload.pkg.changes_file = filename - Upload.init_vars() - Upload.update_vars() - cache[filename] = copy.copy(Upload.pkg.changes) - cache[filename]["filename"] = filename - except: - break + rc = rrdtool.create(*create) + ru = rrdtool.update(*update) + except rrdtool.error as e: + print('warning: queue_report: rrdtool error, skipping %s.rrd: %s' % (type, e)) + except NameError: + pass + +############################################################ + +def process_queue(queue, log, rrd_dir): + msg = "" + type = queue.queue_name + session = DBConn().session() + # Divide the .changes into per-source groups per_source = {} - for filename in cache.keys(): - source = cache[filename]["source"] - if not per_source.has_key(source): + total_pending = 0 + for upload in queue.uploads: + source = upload.changes.source + if source not in per_source: per_source[source] = {} per_source[source]["list"] = [] - per_source[source]["list"].append(cache[filename]) + per_source[source]["processed"] = "" + handler = PolicyQueueUploadHandler(upload, session) + if handler.get_action(): + per_source[source]["processed"] = "PENDING %s" % handler.get_action() + total_pending += 1 + per_source[source]["list"].append(upload) + per_source[source]["list"].sort(lambda x, y: cmp(x.changes.created, y.changes.created), reverse=True) # Determine oldest time and have note status for each source group for source in per_source.keys(): source_list = per_source[source]["list"] first = source_list[0] - oldest = os.stat(first["filename"])[stat.ST_MTIME] + oldest = time.mktime(first.changes.created.timetuple()) have_note = 0 for d in per_source[source]["list"]: - mtime = os.stat(d["filename"])[stat.ST_MTIME] + mtime = time.mktime(d.changes.created.timetuple()) if Cnf.has_key("Queue-Report::Options::New"): if mtime > oldest: oldest = mtime else: if mtime < oldest: oldest = mtime - have_note += (d.has_key("process-new note")) + have_note += has_new_comment(d.policy_queue, d.changes.source, d.changes.version) per_source[source]["oldest"] = oldest if not have_note: per_source[source]["note_state"] = 0; # none @@ -301,43 +413,81 @@ def process_changes_files(changes_files, type): per_source_items = per_source.items() per_source_items.sort(sg_compare) + update_graph_database(rrd_dir, type, len(per_source_items), len(queue.uploads)) + entries = [] max_source_len = 0 max_version_len = 0 max_arch_len = 0 - maintainer = {} - maint="" - distribution="" - closes="" - source_exists="" + try: + logins = get_logins_from_ldap() + except: + logins = dict() for i in per_source_items: + maintainer = {} + maint="" + distribution="" + closes="" + fingerprint="" + changeby = {} + changedby="" + sponsor="" + filename=i[1]["list"][0].changes.changesname last_modified = time.time()-i[1]["oldest"] - source = i[1]["list"][0]["source"] + source = i[1]["list"][0].changes.source if len(source) > max_source_len: max_source_len = len(source) - arches = {} - versions = {} + binary_list = i[1]["list"][0].binaries + binary = ', '.join([ b.package for b in binary_list ]) + arches = set() + versions = set() for j in i[1]["list"]: - if Cnf.has_key("Queue-Report::Options::New"): + dbc = j.changes + changesbase = dbc.changesname + + if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"): try: (maintainer["maintainer822"], maintainer["maintainer2047"], maintainer["maintainername"], maintainer["maintaineremail"]) = \ - daklib.utils.fix_maintainer (j["maintainer"]) - except daklib.utils.ParseMaintError, msg: + fix_maintainer (dbc.maintainer) + except ParseMaintError as msg: print "Problems while parsing maintainer address\n" maintainer["maintainername"] = "Unknown" maintainer["maintaineremail"] = "Unknown" maint="%s:%s" % (maintainer["maintainername"], maintainer["maintaineremail"]) - distribution=j["distribution"].keys() - closes=j["closes"].keys() - for arch in j["architecture"].keys(): - arches[arch] = "" - version = j["version"] - versions[version] = "" - arches_list = arches.keys() - arches_list.sort(daklib.utils.arch_compare_sw) + # ...likewise for the Changed-By: field if it exists. + try: + (changeby["changedby822"], changeby["changedby2047"], + changeby["changedbyname"], changeby["changedbyemail"]) = \ + fix_maintainer (dbc.changedby) + except ParseMaintError as msg: + (changeby["changedby822"], changeby["changedby2047"], + changeby["changedbyname"], changeby["changedbyemail"]) = \ + ("", "", "", "") + changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"]) + + distribution=dbc.distribution.split() + closes=dbc.closes + + fingerprint = dbc.fingerprint + sponsor_name = get_uid_from_fingerprint(fingerprint).name + sponsor_login = get_uid_from_fingerprint(fingerprint).uid + if '@' in sponsor_login: + if fingerprint in logins: + sponsor_login = logins[fingerprint] + if (sponsor_name != maintainer["maintainername"] and + sponsor_name != changeby["changedbyname"] and + sponsor_login + '@debian.org' != maintainer["maintaineremail"] and + sponsor_name != changeby["changedbyemail"]): + sponsor = sponsor_login + + for arch in dbc.architecture.split(): + arches.add(arch) + versions.add(dbc.version) + arches_list = list(arches) + arches_list.sort(utils.arch_compare_sw) arch_list = " ".join(arches_list) - version_list = " ".join(versions.keys()) + version_list = " ".join(sorted(versions, reverse=True)) if len(version_list) > max_version_len: max_version_len = len(version_list) if len(arch_list) > max_arch_len: @@ -346,7 +496,7 @@ def process_changes_files(changes_files, type): note = " | [N]" else: note = "" - entries.append([source, version_list, arch_list, note, last_modified, maint, distribution, closes]) + entries.append([source, binary, version_list, arch_list, per_source[source]["processed"], note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename]) # direction entry consists of "Which field, which direction, time-consider" where # time-consider says how we should treat last_modified. Thats all. @@ -357,99 +507,169 @@ def process_changes_files(changes_files, type): age = Cnf["Queue-Report::Options::Age"] if Cnf.has_key("Queue-Report::Options::New"): # If we produce html we always have oldest first. - direction.append([4,-1,"ao"]) + direction.append([6,-1,"ao"]) else: - if Cnf.has_key("Queue-Report::Options::Sort"): - for i in Cnf["Queue-Report::Options::Sort"].split(","): - if i == "ao": - # Age, oldest first. - direction.append([4,-1,age]) - elif i == "an": - # Age, newest first. - direction.append([4,1,age]) - elif i == "na": - # Name, Ascending. - direction.append([0,1,0]) - elif i == "nd": - # Name, Descending. - direction.append([0,-1,0]) - elif i == "nl": - # Notes last. - direction.append([3,1,0]) - elif i == "nf": - # Notes first. - direction.append([3,-1,0]) + if Cnf.has_key("Queue-Report::Options::Sort"): + for i in Cnf["Queue-Report::Options::Sort"].split(","): + if i == "ao": + # Age, oldest first. + direction.append([6,-1,age]) + elif i == "an": + # Age, newest first. + direction.append([6,1,age]) + elif i == "na": + # Name, Ascending. + direction.append([0,1,0]) + elif i == "nd": + # Name, Descending. + direction.append([0,-1,0]) + elif i == "nl": + # Notes last. + direction.append([5,1,0]) + elif i == "nf": + # Notes first. + direction.append([5,-1,0]) entries.sort(lambda x, y: sortfunc(x, y)) # Yes, in theory you can add several sort options at the commandline with. But my mind is to small # at the moment to come up with a real good sorting function that considers all the sidesteps you # have with it. (If you combine options it will simply take the last one at the moment). # Will be enhanced in the future. + if Cnf.has_key("Queue-Report::Options::822"): + # print stuff out in 822 format + for entry in entries: + (source, binary, version_list, arch_list, processed, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry + + # We'll always have Source, Version, Arch, Mantainer, and Dist + # For the rest, check to see if we have them, then print them out + log.write("Source: " + source + "\n") + log.write("Binary: " + binary + "\n") + log.write("Version: " + version_list + "\n") + log.write("Architectures: ") + log.write( (", ".join(arch_list.split(" "))) + "\n") + log.write("Age: " + time_pp(last_modified) + "\n") + log.write("Last-Modified: " + str(int(time.time()) - int(last_modified)) + "\n") + log.write("Queue: " + type + "\n") + + (name, mail) = maint.split(":", 1) + log.write("Maintainer: " + name + " <"+mail+">" + "\n") + if changedby: + (name, mail) = changedby.split(":", 1) + log.write("Changed-By: " + name + " <"+mail+">" + "\n") + if sponsor: + log.write("Sponsored-By: %s@debian.org\n" % sponsor) + log.write("Distribution:") + for dist in distribution: + log.write(" " + dist) + log.write("\n") + log.write("Fingerprint: " + fingerprint + "\n") + if closes: + bug_string = "" + for bugs in closes: + bug_string += "#"+bugs+", " + log.write("Closes: " + bug_string[:-2] + "\n") + log.write("Changes-File: " + os.path.basename(changes_file) + "\n") + log.write("\n") + + total_count = len(queue.uploads) + source_count = len(per_source_items) + if Cnf.has_key("Queue-Report::Options::New"): - direction.append([4,1,"ao"]) + direction.append([6,1,"ao"]) entries.sort(lambda x, y: sortfunc(x, y)) # Output for a html file. First table header. then table_footer. # Any line between them is then a printed from subroutine table_row. if len(entries) > 0: - table_header(type.upper()) + table_header(type.upper(), source_count, total_count) for entry in entries: - (source, version_list, arch_list, note, last_modified, maint, distribution, closes) = entry - table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes) - total_count = len(changes_files) - source_count = len(per_source_items) - table_footer(type.upper(), source_count, total_count) - else: + (source, binary, version_list, arch_list, processed, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry + table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes, fingerprint, sponsor, changedby) + table_footer(type.upper()) + elif not Cnf.has_key("Queue-Report::Options::822"): # The "normal" output without any formatting. - format="%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len) - msg = "" for entry in entries: - (source, version_list, arch_list, note, last_modified, undef, undef, undef) = entry - msg += format % (source, version_list, arch_list, note, time_pp(last_modified)) + (source, binary, version_list, arch_list, processed, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry + if processed: + format="%%-%ds | %%-%ds | %%-%ds | %%s\n" % (max_source_len, max_version_len, max_arch_len) + msg += format % (source, version_list, arch_list, processed) + else: + format="%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len) + msg += format % (source, version_list, arch_list, note, time_pp(last_modified)) if msg: - total_count = len(changes_files) - source_count = len(per_source_items) print type.upper() print "-"*len(type) print print msg - print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count)) + print ("%s %s source package%s / %s %s package%s in total / %s %s package%s to be processed." % + (source_count, type, plural(source_count), + total_count, type, plural(total_count), + total_pending, type, plural(total_pending))) print - ################################################################################ def main(): - global Cnf, Upload + global Cnf - Cnf = daklib.utils.get_conf() + Cnf = utils.get_conf() Arguments = [('h',"help","Queue-Report::Options::Help"), ('n',"new","Queue-Report::Options::New"), + ('8','822',"Queue-Report::Options::822"), ('s',"sort","Queue-Report::Options::Sort", "HasArg"), - ('a',"age","Queue-Report::Options::Age", "HasArg")] + ('a',"age","Queue-Report::Options::Age", "HasArg"), + ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"), + ('d',"directories","Queue-Report::Options::Directories", "HasArg")] for i in [ "help" ]: - if not Cnf.has_key("Queue-Report::Options::%s" % (i)): - Cnf["Queue-Report::Options::%s" % (i)] = "" + if not Cnf.has_key("Queue-Report::Options::%s" % (i)): + Cnf["Queue-Report::Options::%s" % (i)] = "" - apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv) + apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) - Options = Cnf.SubTree("Queue-Report::Options") + Options = Cnf.subtree("Queue-Report::Options") if Options["Help"]: - usage() - - Upload = daklib.queue.Upload(Cnf) + usage() if Cnf.has_key("Queue-Report::Options::New"): header() - directories = Cnf.ValueList("Queue-Report::Directories") - if not directories: - directories = [ "byhand", "new" ] + queue_names = [] + + if Cnf.has_key("Queue-Report::Options::Directories"): + for i in Cnf["Queue-Report::Options::Directories"].split(","): + queue_names.append(i) + elif Cnf.has_key("Queue-Report::Directories"): + queue_names = Cnf.value_list("Queue-Report::Directories") + else: + queue_names = [ "byhand", "new" ] + + if Cnf.has_key("Queue-Report::Options::Rrd"): + rrd_dir = Cnf["Queue-Report::Options::Rrd"] + elif Cnf.has_key("Dir::Rrd"): + rrd_dir = Cnf["Dir::Rrd"] + else: + rrd_dir = None + + f = None + if Cnf.has_key("Queue-Report::Options::822"): + # Open the report file + f = sys.stdout + filename822 = Cnf.get("Queue-Report::ReportLocations::822Location") + if filename822: + f = open(filename822, "w") + + session = DBConn().session() + + for queue_name in queue_names: + queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).first() + if queue is not None: + process_queue(queue, f, rrd_dir) + else: + utils.warn("Cannot find queue %s" % queue_name) - for directory in directories: - changes_files = glob.glob("%s/*.changes" % (Cnf["Dir::Queue::%s" % (directory)])) - process_changes_files(changes_files, directory) + if Cnf.has_key("Queue-Report::Options::822"): + f.close() if Cnf.has_key("Queue-Report::Options::New"): footer()