################################################################################
def recheck(upload, session):
- files = upload.pkg.files
-
- cnf = Config()
- for f in files.keys():
- # The .orig.tar.gz can disappear out from under us is it's a
- # duplicate of one in the archive.
- if not files.has_key(f):
- continue
- # Check that the source still exists
- if files[f]["type"] == "deb":
- source_version = files[f]["source version"]
- source_package = files[f]["source package"]
- if not upload.pkg.changes["architecture"].has_key("source") \
- and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
- source_epochless_version = re_no_epoch.sub('', source_version)
- dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
- found = 0
- for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
- if cnf.has_key("Dir::Queue::%s" % (q)):
- if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
- found = 1
- if not found:
- upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
-
- # Version and file overwrite checks
- if files[f]["type"] == "deb":
- upload.check_binary_against_db(f, session)
- elif files[f]["type"] == "dsc":
- upload.check_source_against_db(f, session)
- upload.check_dsc_against_db(f, session)
-
+ upload.recheck()
if len(upload.rejects) > 0:
answer = "XXX"
if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
try:
check_daily_lock()
done = add_overrides (new, upload, session)
- Logger.log([utils.getusername(), "NEW ACCEPT: %s" % (upload.pkg.changes_file)])
+ Logger.log(["NEW ACCEPT: %s" % (upload.pkg.changes_file)])
except CantGetLockError:
print "Hello? Operator! Give me the number for 911!"
print "Dinstall in the locked area, cant process packages, come back later"
reject_message=Options["Manual-Reject"],
note=get_new_comments(changes.get("source", ""), session=session))
if not aborted:
- Logger.log([utils.getusername(), "NEW REJECT: %s" % (upload.pkg.changes_file)])
+ Logger.log(["NEW REJECT: %s" % (upload.pkg.changes_file)])
os.unlink(upload.pkg.changes_file[:-8]+".dak")
done = 1
elif answer == 'N':
elif answer == 'P' and not Options["Trainee"]:
prod_maintainer(get_new_comments(changes.get("source", ""), session=session),
upload)
- Logger.log([utils.getusername(), "NEW PROD: %s" % (upload.pkg.changes_file)])
+ Logger.log(["NEW PROD: %s" % (upload.pkg.changes_file)])
elif answer == 'R' and not Options["Trainee"]:
confirm = utils.our_raw_input("Really clear note (y/N)? ").lower()
if confirm == "y":
print """Usage: dak process-new [OPTION]... [CHANGES]...
-a, --automatic automatic run
-h, --help show this help and exit.
- -C, --comments-dir=DIR use DIR as comments-dir, for [o-]p-u-new
-m, --manual-reject=MSG manual reject with `msg'
-n, --no-action don't do anything
-t, --trainee FTP Trainee mode
done = 1
for f in byhand:
del files[f]
- Logger.log([utils.getusername(), "BYHAND ACCEPT: %s" % (upload.pkg.changes_file)])
+ Logger.log(["BYHAND ACCEPT: %s" % (upload.pkg.changes_file)])
except CantGetLockError:
print "Hello? Operator! Give me the number for 911!"
print "Dinstall in the locked area, cant process packages, come back later"
elif answer == 'M':
- Logger.log([utils.getusername(), "BYHAND REJECT: %s" % (upload.pkg.changes_file)])
+ Logger.log(["BYHAND REJECT: %s" % (upload.pkg.changes_file)])
upload.do_reject(manual=1, reject_message=Options["Manual-Reject"])
os.unlink(upload.pkg.changes_file[:-8]+".dak")
done = 1
finally:
os.unlink(path)
-# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
-# utils.move (upload.pkg.changes_file, dest, perms=changesperms)
-# file_keys = upload.pkg.files.keys()
-# for f in file_keys:
-# utils.move (f, dest, perms=perms)
-
-# def is_source_in_queue_dir(qdir):
-# entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
-# and x.endswith(".changes") ]
-# for entry in entries:
-# # read the .dak
-# u = queue.Upload(Cnf)
-# u.pkg.changes_file = os.path.join(qdir, entry)
-# u.update_vars()
-# if not u.pkg.changes["architecture"].has_key("source"):
-# # another binary upload, ignore
-# continue
-# if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
-# # another version, ignore
-# continue
-# # found it!
-# return True
-# return False
-
-# def move_to_holding(suite, queue_dir):
-# print "Moving to %s holding area." % (suite.upper(),)
-# if Options["No-Action"]:
-# return
-# Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
-# Upload.dump_vars(queue_dir)
-# move_to_dir(queue_dir, perms=0664)
-# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
def _accept(upload):
if Options["No-Action"]:
return
upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
os.unlink(upload.pkg.changes_file[:-8]+".dak")
-# def do_accept_stableupdate(upload,suite, q):
-# cnf = Config()
-# queue_dir = cnf["Dir::Queue::%s" % (q,)]
-# if not upload.pkg.changes["architecture"].has_key("source"):
-# # It is not a sourceful upload. So its source may be either in p-u
-# # holding, in new, in accepted or already installed.
-# if is_source_in_queue_dir(queue_dir):
-# # It's in p-u holding, so move it there.
-# print "Binary-only upload, source in %s." % (q,)
-# move_to_holding(suite, queue_dir)
-# elif Upload.source_exists(Upload.pkg.changes["source"],
-# Upload.pkg.changes["version"]):
-# # dak tells us that there is source available. At time of
-# # writing this means that it is installed, so put it into
-# # accepted.
-# print "Binary-only upload, source installed."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
-# # The source is in accepted, the binary cleared NEW: accept it.
-# print "Binary-only upload, source in accepted."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
-# # It's in NEW. We expect the source to land in p-u holding
-# # pretty soon.
-# print "Binary-only upload, source in new."
-# move_to_holding(suite, queue_dir)
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
-# # It's in newstage. Accept into the holding area
-# print "Binary-only upload, source in newstage."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# else:
-# # No case applicable. Bail out. Return will cause the upload
-# # to be skipped.
-# print "ERROR"
-# print "Stable update failed. Source not found."
-# return
-# else:
-# # We are handling a sourceful upload. Move to accepted if currently
-# # in p-u holding and to p-u holding otherwise.
-# if is_source_in_queue_dir(queue_dir):
-# print "Sourceful upload in %s, accepting." % (q,)
-# _accept()
-# else:
-# move_to_holding(suite, queue_dir)
-
def do_accept(upload):
print "ACCEPT"
cnf = Config()
if not Options["No-Action"]:
(summary, short_summary) = upload.build_summaries()
-# if cnf.FindB("Dinstall::SecurityQueueHandling"):
-# upload.dump_vars(cnf["Dir::Queue::Embargoed"])
-# move_to_dir(cnf["Dir::Queue::Embargoed"])
-# upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
-# # Check for override disparities
-# upload.Subst["__SUMMARY__"] = summary
-# else:
- # Stable updates need to be copied to proposed-updates holding
- # area instead of accepted. Sourceful uploads need to go
- # to it directly, binaries only if the source has not yet been
- # accepted into p-u.
- for suite, q in [("proposed-updates", "ProposedUpdates"),
- ("oldstable-proposed-updates", "OldProposedUpdates")]:
- if not upload.pkg.changes["distribution"].has_key(suite):
- continue
- utils.fubar("stable accept not supported yet")
-# return do_accept_stableupdate(suite, q)
- # Just a normal upload, accept it...
- _accept(upload)
-
-def check_status(files):
- new = byhand = 0
- for f in files.keys():
- if files[f]["type"] == "byhand":
- byhand = 1
- elif files[f].has_key("new"):
- new = 1
- return (new, byhand)
+
+ if cnf.FindB("Dinstall::SecurityQueueHandling"):
+ upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+ upload.move_to_dir(cnf["Dir::Queue::Embargoed"])
+ upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+ # Check for override disparities
+ upload.Subst["__SUMMARY__"] = summary
+ else:
+ # Just a normal upload, accept it...
+ _accept(upload)
def do_pkg(changes_file, session):
u = Upload()
if accept_count > 1:
sets = "sets"
sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))))
- Logger.log([utils.getusername(), "total",accept_count,accept_bytes])
+ Logger.log(["total",accept_count,accept_bytes])
if not Options["No-Action"] and not Options["Trainee"]:
Logger.close()
################################################################################
-# def do_comments(dir, opref, npref, line, fn):
-# for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
-# lines = open("%s/%s" % (dir, comm)).readlines()
-# if len(lines) == 0 or lines[0] != line + "\n": continue
-# changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
-# and x.endswith(".changes") ]
-# changes_files = sort_changes(changes_files)
-# for f in changes_files:
-# f = utils.validate_changes_file_arg(f, 0)
-# if not f: continue
-# print "\n" + f
-# fn(f, "".join(lines[1:]))
-
-# if opref != npref and not Options["No-Action"]:
-# newcomm = npref + comm[len(opref):]
-# os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
-
-# ################################################################################
-
-# def comment_accept(changes_file, comments):
-# Upload.pkg.changes_file = changes_file
-# Upload.init_vars()
-# Upload.update_vars()
-# Upload.update_subst()
-# files = Upload.pkg.files
-
-# if not recheck():
-# return # dak wants to REJECT, crap
-
-# (new, byhand) = check_status(files)
-# if not new and not byhand:
-# do_accept()
-
-# ################################################################################
-
-# def comment_reject(changes_file, comments):
-# Upload.pkg.changes_file = changes_file
-# Upload.init_vars()
-# Upload.update_vars()
-# Upload.update_subst()
-
-# if not recheck():
-# pass # dak has its own reasons to reject as well, which is fine
-
-# reject(comments)
-# print "REJECT\n" + reject_message,
-# if not Options["No-Action"]:
-# Upload.do_reject(0, reject_message)
-# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-################################################################################
-
def main():
global Options, Logger, Sections, Priorities
Arguments = [('a',"automatic","Process-New::Options::Automatic"),
('h',"help","Process-New::Options::Help"),
- ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
('t',"trainee","Process-New::Options::Trainee"),
('n',"no-action","Process-New::Options::No-Action")]
- for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+ for i in ["automatic", "help", "manual-reject", "no-action", "version", "trainee"]:
if not cnf.has_key("Process-New::Options::%s" % (i)):
cnf["Process-New::Options::%s" % (i)] = ""
changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
- if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+ if len(changes_files) == 0:
changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
Options = cnf.SubTree("Process-New::Options")
# Kill me now? **FIXME**
cnf["Dinstall::Options::No-Mail"] = ""
-# commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
-# if commentsdir:
-# if changes_files != []:
-# sys.stderr.write("Can't specify any changes files if working with comments-dir")
-# sys.exit(1)
-# do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-# do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
-# else:
- if True:
- for changes_file in changes_files:
- changes_file = utils.validate_changes_file_arg(changes_file, 0)
- if not changes_file:
- continue
- print "\n" + changes_file
-
- do_pkg (changes_file, session)
+ for changes_file in changes_files:
+ changes_file = utils.validate_changes_file_arg(changes_file, 0)
+ if not changes_file:
+ continue
+ print "\n" + changes_file
+
+ do_pkg (changes_file, session)
end()
###############################################################################
- import cPickle
import errno
import os
import pg
import commands
import shutil
import textwrap
+ import tempfile
from types import *
import yaml
from holding import Holding
from dbconn import *
from summarystats import SummaryStats
- from utils import parse_changes
+ from utils import parse_changes, check_dsc_files
from textutils import fix_maintainer
from binary import Binary
# Determine the type
if f.has_key("dbtype"):
file_type = f["dbtype"]
- elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+ elif re_source_ext.match(f["type"]):
file_type = "dsc"
else:
utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
###############################################################################
+def check_status(files):
+ new = byhand = 0
+ for f in files.keys():
+ if files[f]["type"] == "byhand":
+ byhand = 1
+ elif files[f].has_key("new"):
+ new = 1
+ return (new, byhand)
+
+###############################################################################
+
# Used by Upload.check_timestamps
class TarTime(object):
def __init__(self, future_cutoff, past_cutoff):
self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
# Ensure the source version matches the version in the .changes file
- if entry["type"] == "orig.tar.gz":
+ if re_is_orig_source.match(f):
changes_version = self.pkg.changes["chopversion2"]
else:
changes_version = self.pkg.changes["chopversion"]
self.rejects.append("source only uploads are not supported.")
###########################################################################
- def check_dsc(self, action=True):
+ def check_dsc(self, action=True, session=None):
"""Returns bool indicating whether or not the source changes are valid"""
# Ensure there is source to check
if not self.pkg.changes["architecture"].has_key("source"):
if not re_valid_version.match(self.pkg.dsc["version"]):
self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
- # Bumping the version number of the .dsc breaks extraction by stable's
- # dpkg-source. So let's not do that...
- if self.pkg.dsc["format"] != "1.0":
- self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+ # Only a limited list of source formats are allowed in each suite
+ for dist in self.pkg.changes["distribution"].keys():
+ allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
+ if self.pkg.dsc["format"] not in allowed:
+ self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
# Validate the Maintainer field
try:
for field_name in [ "build-depends", "build-depends-indep" ]:
field = self.pkg.dsc.get(field_name)
if field:
- # Check for broken dpkg-dev lossage...
- if field.startswith("ARRAY"):
- self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
- (dsc_filename, field_name.title()))
-
# Have apt try to parse them...
try:
apt_pkg.ParseSrcDepends(field)
if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
- # Ensure there is a .tar.gz in the .dsc file
- has_tar = False
- for f in self.pkg.dsc_files.keys():
- m = re_issource.match(f)
- if not m:
- self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
- continue
- ftype = m.group(3)
- if ftype == "orig.tar.gz" or ftype == "tar.gz":
- has_tar = True
-
- if not has_tar:
- self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+ # Ensure the Files field contain only what's expected
+ self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
# Ensure source is newer than existing source in target suites
session = DBConn().session()
if not os.path.exists(src):
return
ftype = m.group(3)
- if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
+ if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
+ pkg.orig_files[f].has_key("path"):
continue
dest = os.path.join(os.getcwd(), f)
os.symlink(src, dest)
- # If the orig.tar.gz is not a part of the upload, create a symlink to the
- # existing copy.
- if self.pkg.orig_tar_gz:
- dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
- os.symlink(self.pkg.orig_tar_gz, dest)
+ # If the orig files are not a part of the upload, create symlinks to the
+ # existing copies.
+ for orig_file in self.pkg.orig_files.keys():
+ if not self.pkg.orig_files[orig_file].has_key("path"):
+ continue
+ dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
+ os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
# Extract the source
cmd = "dpkg-source -sn -x %s" % (dsc_filename)
(result, output) = commands.getstatusoutput(cmd)
if (result != 0):
self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
- self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
+ self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
return
if not cnf.Find("Dir::Queue::BTSVersionTrack"):
# We should probably scrap or rethink the whole reprocess thing
# Bail out if:
# a) there's no source
- # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
- # or c) the orig.tar.gz is MIA
+ # or b) reprocess is 2 - we will do this check next time when orig
+ # tarball is in 'files'
+ # or c) the orig files are MIA
if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
- or self.pkg.orig_tar_gz == -1:
+ or len(self.pkg.orig_files) == 0:
return
tmpdir = utils.temp_dirname()
self.ensure_hashes()
+ ###########################################################################
+ def check_lintian(self):
+ # Only check some distributions
+ valid_dist = False
+ for dist in ('unstable', 'experimental'):
+ if dist in self.pkg.changes['distribution']:
+ valid_dist = True
+ break
+
+ if not valid_dist:
+ return
+
+ cnf = Config()
+ tagfile = cnf.get("Dinstall::LintianTags")
+ if tagfile is None:
+ # We don't have a tagfile, so just don't do anything.
+ return
+ # Parse the yaml file
+ sourcefile = file(tagfile, 'r')
+ sourcecontent = sourcefile.read()
+ sourcefile.close()
+ try:
+ lintiantags = yaml.load(sourcecontent)['lintian']
+ except yaml.YAMLError, msg:
+ utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
+ return
+
+ # Now setup the input file for lintian. lintian wants "one tag per line" only,
+ # so put it together like it. We put all types of tags in one file and then sort
+ # through lintians output later to see if its a fatal tag we detected, or not.
+ # So we only run lintian once on all tags, even if we might reject on some, but not
+ # reject on others.
+ # Additionally build up a set of tags
+ tags = set()
+ (fd, temp_filename) = utils.temp_filename()
+ temptagfile = os.fdopen(fd, 'w')
+ for tagtype in lintiantags:
+ for tag in lintiantags[tagtype]:
+ temptagfile.write("%s\n" % tag)
+ tags.add(tag)
+ temptagfile.close()
+
+ # So now we should look at running lintian at the .changes file, capturing output
+ # to then parse it.
+ command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
+ (result, output) = commands.getstatusoutput(command)
+ # We are done with lintian, remove our tempfile
+ os.unlink(temp_filename)
+ if (result == 2):
+ utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
+ utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
+
+ if len(output) == 0:
+ return
+
+ # We have output of lintian, this package isn't clean. Lets parse it and see if we
+ # are having a victim for a reject.
+ # W: tzdata: binary-without-manpage usr/sbin/tzconfig
+ for line in output.split('\n'):
+ m = re_parse_lintian.match(line)
+ if m is None:
+ continue
+
+ etype = m.group(1)
+ epackage = m.group(2)
+ etag = m.group(3)
+ etext = m.group(4)
+
+ # So lets check if we know the tag at all.
+ if etag not in tags:
+ continue
+
+ if etype == 'O':
+ # We know it and it is overriden. Check that override is allowed.
+ if etag in lintiantags['warning']:
+ # The tag is overriden, and it is allowed to be overriden.
+ # Don't add a reject message.
+ pass
+ elif etag in lintiantags['error']:
+ # The tag is overriden - but is not allowed to be
+ self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
+ else:
+ # Tag is known, it is not overriden, direct reject.
+ self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
+ # Now tell if they *might* override it.
+ if etag in lintiantags['warning']:
+ self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
+
###########################################################################
def check_urgency(self):
cnf = Config()
# <Ganneff> yes
# This routine returns None on success or an error on failure
- res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
+ res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
if res:
utils.fubar(res)
"""
@warning: NB: this function can remove entries from the 'files' index [if
- the .orig.tar.gz is a duplicate of the one in the archive]; if
+ the orig tarball is a duplicate of the one in the archive]; if
you're iterating over 'files' and call this function as part of
the loop, be sure to add a check to the top of the loop to
ensure you haven't just tried to dereference the deleted entry.
"""
Cnf = Config()
- self.pkg.orig_tar_gz = None
+ self.pkg.orig_files = {} # XXX: do we need to clear it?
+ orig_files = self.pkg.orig_files
# Try and find all files mentioned in the .dsc. This has
# to work harder to cope with the multiple possible
if len(ql) > 0:
# Ignore exact matches for .orig.tar.gz
match = 0
- if dsc_name.endswith(".orig.tar.gz"):
+ if re_is_orig_source.match(dsc_name):
for i in ql:
if self.pkg.files.has_key(dsc_name) and \
int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
# This would fix the stupidity of changing something we often iterate over
# whilst we're doing it
del self.pkg.files[dsc_name]
- self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
match = 1
if not match:
self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
- elif dsc_name.endswith(".orig.tar.gz"):
+ elif re_is_orig_source.match(dsc_name):
# Check in the pool
ql = get_poolfile_like_name(dsc_name, session)
# need this for updating dsc_files in install()
dsc_entry["files id"] = x.file_id
# See install() in process-accepted...
- self.pkg.orig_tar_id = x.file_id
- self.pkg.orig_tar_gz = old_file
- self.pkg.orig_tar_location = x.location.location_id
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["id"] = x.file_id
+ orig_files[dsc_name]["path"] = old_file
+ orig_files[dsc_name]["location"] = x.location.location_id
else:
# TODO: Record the queues and info in the DB so we don't hardcode all this crap
# Not there? Check the queue directories...
in_otherdir_fh.close()
actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
found = in_otherdir
- self.pkg.orig_tar_gz = in_otherdir
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["path"] = in_otherdir
if not found:
self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
- self.pkg.orig_tar_gz = -1
continue
else:
self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
if actual_size != int(dsc_entry["size"]):
self.rejects.append("size for %s doesn't match %s." % (found, file))
+ ################################################################################
+ # This is used by process-new and process-holding to recheck a changes file
+ # at the time we're running. It mainly wraps various other internal functions
+ # and is similar to accepted_checks - these should probably be tidied up
+ # and combined
+ def recheck(self, session):
+ cnf = Config()
+ for f in self.pkg.files.keys():
+ # The .orig.tar.gz can disappear out from under us is it's a
+ # duplicate of one in the archive.
+ if not self.pkg.files.has_key(f):
+ continue
+
+ entry = self.pkg.files[f]
+
+ # Check that the source still exists
+ if entry["type"] == "deb":
+ source_version = entry["source version"]
+ source_package = entry["source package"]
+ if not self.pkg.changes["architecture"].has_key("source") \
+ and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+ source_epochless_version = re_no_epoch.sub('', source_version)
+ dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+ found = False
+ for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+ if cnf.has_key("Dir::Queue::%s" % (q)):
+ if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+ found = True
+ if not found:
+ self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+ # Version and file overwrite checks
+ if entry["type"] == "deb":
+ self.check_binary_against_db(f, session)
+ elif entry["type"] == "dsc":
+ self.check_source_against_db(f, session)
+ self.check_dsc_against_db(f, session)
+
################################################################################
def accepted_checks(self, overwrite_checks, session):
# Recheck anything that relies on the database; since that's not