X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fshow_new.py;h=1397a324f6eae35d2fd8a5b5d105fda3a1a6bdb9;hb=f601bd71e61c6c265b5fa1f296a5258385e6f154;hp=513129f9c5d34f91623878ce8e305410bb38a89d;hpb=8d2c584e078a628f7c510b0b375bfb23fc027d5f;p=dak.git diff --git a/dak/show_new.py b/dak/show_new.py index 513129f9..1397a324 100755 --- a/dak/show_new.py +++ b/dak/show_new.py @@ -30,26 +30,29 @@ import os, sys, time import apt_pkg import examine_package +from daklib import policy from daklib.dbconn import * -from daklib.queue import determine_new, check_valid, Upload, get_policy_queue from daklib import utils from daklib.regexes import re_source_ext from daklib.config import Config from daklib import daklog -from daklib.changesutils import * -from daklib.dakmultiprocessing import DakProcessPool +from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS, PROC_STATUS_SIGNALRAISED +from multiprocessing import Manager, TimeoutError # Globals Cnf = None Options = None -sources = set() +manager = Manager() +sources = manager.list() +htmlfiles_to_process = manager.list() +timeout_str = "Timed out while processing" ################################################################################ ################################################################################ ################################################################################ -def html_header(name, filestoexamine): +def html_header(name, missing): if name.endswith('.changes'): name = ' '.join(name.split('_')[:2]) result = """ @@ -118,8 +121,7 @@ def html_header(name, filestoexamine):

source lintian

""" - for fn in filter(lambda x: x.endswith('.deb') or x.endswith('.udeb'),filestoexamine): - packagename = fn.split('_')[0] + for binarytype, packagename in filter(lambda m: m[0] in ('deb', 'udeb'), missing): result += """

%(pkg)s

control file

@@ -150,78 +152,59 @@ def html_footer(): ################################################################################ -def do_pkg(changes_file): +def do_pkg(upload_id): + cnf = Config() + session = DBConn().session() - u = Upload() - u.pkg.changes_file = changes_file - # We can afoord not to check the signature before loading the changes file - # as we've validated it already (otherwise it couldn't be in new) - # and we can more quickly skip over already processed files this way - u.load_changes(changes_file) - - origchanges = os.path.abspath(u.pkg.changes_file) - - # Still be cautious in case paring the changes file went badly - if u.pkg.changes.has_key('source') and u.pkg.changes.has_key('version'): - htmlname = u.pkg.changes["source"] + "_" + u.pkg.changes["version"] + ".html" - htmlfile = os.path.join(cnf["Show-New::HTMLPath"], htmlname) - else: - # Changes file was bad - print "Changes file %s missing source or version field" % changes_file - session.close() - return + upload = session.query(PolicyQueueUpload).filter_by(id=upload_id).one() + + queue = upload.policy_queue + changes = upload.changes + + origchanges = os.path.join(queue.path, changes.changesname) + print origchanges + + htmlname = "{0}_{1}.html".format(changes.source, changes.version) + htmlfile = os.path.join(cnf['Show-New::HTMLPath'], htmlname) # Have we already processed this? if os.path.exists(htmlfile) and \ - os.stat(htmlfile).st_mtime > os.stat(origchanges).st_mtime: - sources.add(htmlname) - session.close() - return (PROC_STATUS_SUCCESS, '%s already up-to-date' % htmlfile) - - # Now we'll load the fingerprint - (u.pkg.changes["fingerprint"], rejects) = utils.check_signature(changes_file, session=session) - new_queue = get_policy_queue('new', session ); - u.pkg.directory = new_queue.path - u.update_subst() - files = u.pkg.files - changes = u.pkg.changes - sources.add(htmlname) - - for deb_filename, f in files.items(): - if deb_filename.endswith(".udeb") or deb_filename.endswith(".deb"): - u.binary_file_checks(deb_filename, session) - u.check_binary_against_db(deb_filename, session) - else: - u.source_file_checks(deb_filename, session) - u.check_source_against_db(deb_filename, session) - u.pkg.changes["suite"] = u.pkg.changes["distribution"] - - new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, dsc=u.pkg.dsc, session=session) - - outfile = open(os.path.join(cnf["Show-New::HTMLPath"],htmlname),"w") - - filestoexamine = [] - for pkg in new.keys(): - for fn in new[pkg]["files"]: - filestoexamine.append(fn) - - print >> outfile, html_header(changes["source"], filestoexamine) - - check_valid(new, session) - distribution = changes["distribution"].keys()[0] - print >> outfile, examine_package.display_changes(distribution, changes_file) - - for fn in filter(lambda fn: fn.endswith(".dsc"), filestoexamine): - print >> outfile, examine_package.check_dsc(distribution, fn, session) - for fn in filter(lambda fn: fn.endswith(".deb") or fn.endswith(".udeb"), filestoexamine): - print >> outfile, examine_package.check_deb(distribution, fn, session) - - print >> outfile, html_footer() - - outfile.close() - session.close() + os.stat(htmlfile).st_mtime > time.mktime(changes.created.timetuple()): + with open(htmlfile, "r") as fd: + if fd.read() != timeout_str: + sources.append(htmlname) + return (PROC_STATUS_SUCCESS, + '%s already up-to-date' % htmlfile) + + # Go, process it... Now! + htmlfiles_to_process.append(htmlfile) + sources.append(htmlname) + + group = cnf.get('Dinstall::UnprivGroup') or None + + with open(htmlfile, 'w') as outfile: + with policy.UploadCopy(upload, group=group) as upload_copy: + handler = policy.PolicyQueueUploadHandler(upload, session) + missing = [ (o['type'], o['package']) for o in handler.missing_overrides() ] + distribution = changes.distribution + + print >>outfile, html_header(changes.source, missing) + print >>outfile, examine_package.display_changes(distribution, origchanges) + + if upload.source is not None and ('dsc', upload.source.source) in missing: + fn = os.path.join(upload_copy.directory, upload.source.poolfile.basename) + print >>outfile, examine_package.check_dsc(distribution, fn, session) + for binary in upload.binaries: + if (binary.binarytype, binary.package) not in missing: + continue + fn = os.path.join(upload_copy.directory, binary.poolfile.basename) + print >>outfile, examine_package.check_deb(distribution, fn, session) + + print >>outfile, html_footer() - return (PROC_STATUS_SUCCESS, '%s already updated' % htmlfile) + session.close() + htmlfiles_to_process.remove(htmlfile) + return (PROC_STATUS_SUCCESS, '{0} already updated'.format(htmlfile)) ################################################################################ @@ -240,47 +223,53 @@ def init(session): cnf = Config() Arguments = [('h',"help","Show-New::Options::Help"), - ("p","html-path","Show-New::HTMLPath","HasArg")] + ("p","html-path","Show-New::HTMLPath","HasArg"), + ('q','queue','Show-New::Options::Queue','HasArg')] for i in ["help"]: if not cnf.has_key("Show-New::Options::%s" % (i)): cnf["Show-New::Options::%s" % (i)] = "" - changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv) - if len(changes_files) == 0: - new_queue = get_policy_queue('new', session ); - changes_files = utils.get_changes_files(new_queue.path) - - Options = cnf.SubTree("Show-New::Options") + changesnames = apt_pkg.parse_commandline(cnf.Cnf,Arguments,sys.argv) + Options = cnf.subtree("Show-New::Options") if Options["help"]: usage() - return changes_files + queue_names = Options.find('Queue', 'new').split(',') + uploads = session.query(PolicyQueueUpload) \ + .join(PolicyQueueUpload.policy_queue).filter(PolicyQueue.queue_name.in_(queue_names)) \ + .join(PolicyQueueUpload.changes).order_by(DBChange.source) + + if len(changesnames) > 0: + uploads = uploads.filter(DBChange.changesname.in_(changesnames)) + + return uploads ################################################################################ ################################################################################ def main(): + examine_package.use_html = True + pool = DakProcessPool(processes=5) + session = DBConn().session() - changes_files = init(session) - - examine_package.use_html=1 - - pool = DakProcessPool() - for changes_file in changes_files: - changes_file = utils.validate_changes_file_arg(changes_file, 0) - if not changes_file: - continue - print "\n" + changes_file - pool.apply_async(do_pkg, (changes_file,)) - do_pkg(changes_file) + upload_ids = [ u.id for u in init(session) ] + session.close() + + for upload_id in upload_ids: + pool.apply_async(do_pkg, [upload_id]) pool.close() + + #p.wait(timeout=600) pool.join() + for htmlfile in htmlfiles_to_process: + with open(htmlfile, "w") as fd: + fd.write(timeout_str) files = set(os.listdir(cnf["Show-New::HTMLPath"])) - to_delete = filter(lambda x: x.endswith(".html"), files.difference(sources)) + to_delete = filter(lambda x: x.endswith(".html"), files.difference(set(sources))) for f in to_delete: os.remove(os.path.join(cnf["Show-New::HTMLPath"],f))