from urgencylog import UrgencyLog
from dbconn import *
from summarystats import SummaryStats
-from utils import parse_changes, check_dsc_files
+from utils import parse_changes, check_dsc_files, build_package_set
from textutils import fix_maintainer
-from binary import Binary
from lintian import parse_lintian_output, generate_reject_messages
+from contents import UnpackedSource
+
+# suppress some deprecation warnings in squeeze related to apt_pkg
+# module
+import warnings
+warnings.filterwarnings('ignore', \
+ "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
+ DeprecationWarning)
+warnings.filterwarnings('ignore', \
+ "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
+ DeprecationWarning)
###############################################################################
# Determine what parts in a .changes are NEW
-def determine_new(changes, files, warn=1, session = None):
+def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
"""
Determine what parts in a C{changes} file are NEW.
+ @type filename: str
+ @param filename: changes filename
+
@type changes: Upload.Pkg.changes dict
@param changes: Changes dictionary
@type warn: bool
@param warn: Warn if overrides are added for (old)stable
+ @type dsc: Upload.Pkg.dsc dict
+ @param dsc: (optional); Dsc dictionary
+
+ @type new: dict
+ @param new: new packages as returned by a previous call to this function, but override information may have changed
+
@rtype: dict
@return: dictionary of NEW components.
"""
- new = {}
+ # TODO: This should all use the database instead of parsing the changes
+ # file again
+ byhand = {}
+
+ dbchg = get_dbchange(filename, session)
+ if dbchg is None:
+ print "Warning: cannot find changes file in database; won't check byhand"
+
+ # Try to get the Package-Set field from an included .dsc file (if possible).
+ if dsc:
+ for package, entry in build_package_set(dsc, session).items():
+ if not new.has_key(package):
+ new[package] = entry
# Build up a list of potentially new things
for name, f in files.items():
- # Skip byhand elements
-# if f["type"] == "byhand":
-# continue
+ # Keep a record of byhand elements
+ if f["section"] == "byhand":
+ byhand[name] = 1
+ continue
+
pkg = f["package"]
priority = f["priority"]
section = f["section"]
# Fix up the list of target suites
cnf = Config()
for suite in changes["suite"].keys():
- override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
- if override:
- (olderr, newerr) = (get_suite(suite, session) == None,
- get_suite(override, session) == None)
- if olderr or newerr:
- (oinv, newinv) = ("", "")
- if olderr: oinv = "invalid "
- if newerr: ninv = "invalid "
- print "warning: overriding %ssuite %s to %ssuite %s" % (
- oinv, suite, ninv, override)
- del changes["suite"][suite]
- changes["suite"][override] = 1
+ oldsuite = get_suite(suite, session)
+ if not oldsuite:
+ print "WARNING: Invalid suite %s found" % suite
+ continue
+
+ if oldsuite.overridesuite:
+ newsuite = get_suite(oldsuite.overridesuite, session)
+
+ if newsuite:
+ print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
+ oldsuite.overridesuite, suite)
+ del changes["suite"][suite]
+ changes["suite"][oldsuite.overridesuite] = 1
+ else:
+ print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
+ oldsuite.overridesuite, suite)
+ # Check for unprocessed byhand files
+ if dbchg is not None:
+ for b in byhand.keys():
+ # Find the file entry in the database
+ found = False
+ for f in dbchg.files:
+ if f.filename == b:
+ found = True
+ # If it's processed, we can ignore it
+ if f.processed:
+ del byhand[b]
+ break
+
+ if not found:
+ print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
+
+ # Check for new stuff
for suite in changes["suite"].keys():
for pkg in new.keys():
ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
if new[pkg].has_key("othercomponents"):
print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
- return new
+ return new, byhand
################################################################################
###############################################################################
+def prod_maintainer(notes, upload):
+ cnf = Config()
+
+ # Here we prepare an editor and get them ready to prod...
+ (fd, temp_filename) = utils.temp_filename()
+ temp_file = os.fdopen(fd, 'w')
+ for note in notes:
+ temp_file.write(note.comment)
+ temp_file.close()
+ editor = os.environ.get("EDITOR","vi")
+ answer = 'E'
+ while answer == 'E':
+ os.system("%s %s" % (editor, temp_filename))
+ temp_fh = utils.open_file(temp_filename)
+ prod_message = "".join(temp_fh.readlines())
+ temp_fh.close()
+ print "Prod message:"
+ print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
+ prompt = "[P]rod, Edit, Abandon, Quit ?"
+ answer = "XXX"
+ while prompt.find(answer) == -1:
+ answer = utils.our_raw_input(prompt)
+ m = re_default_answer.search(prompt)
+ if answer == "":
+ answer = m.group(1)
+ answer = answer[:1].upper()
+ os.unlink(temp_filename)
+ if answer == 'A':
+ return
+ elif answer == 'Q':
+ end()
+ sys.exit(0)
+ # Otherwise, do the proding...
+ user_email_address = utils.whoami() + " <%s>" % (
+ cnf["Dinstall::MyAdminAddress"])
+
+ Subst = upload.Subst
+
+ Subst["__FROM_ADDRESS__"] = user_email_address
+ Subst["__PROD_MESSAGE__"] = prod_message
+ Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
+
+ prod_mail_message = utils.TemplateSubst(
+ Subst,cnf["Dir::Templates"]+"/process-new.prod")
+
+ # Send the prod mail
+ utils.send_mail(prod_mail_message)
+
+ print "Sent prodding message"
+
+################################################################################
+
+def edit_note(note, upload, session, trainee=False):
+ # Write the current data to a temporary file
+ (fd, temp_filename) = utils.temp_filename()
+ editor = os.environ.get("EDITOR","vi")
+ answer = 'E'
+ while answer == 'E':
+ os.system("%s %s" % (editor, temp_filename))
+ temp_file = utils.open_file(temp_filename)
+ newnote = temp_file.read().rstrip()
+ temp_file.close()
+ print "New Note:"
+ print utils.prefix_multi_line_string(newnote," ")
+ prompt = "[D]one, Edit, Abandon, Quit ?"
+ answer = "XXX"
+ while prompt.find(answer) == -1:
+ answer = utils.our_raw_input(prompt)
+ m = re_default_answer.search(prompt)
+ if answer == "":
+ answer = m.group(1)
+ answer = answer[:1].upper()
+ os.unlink(temp_filename)
+ if answer == 'A':
+ return
+ elif answer == 'Q':
+ end()
+ sys.exit(0)
+
+ comment = NewComment()
+ comment.package = upload.pkg.changes["source"]
+ comment.version = upload.pkg.changes["version"]
+ comment.comment = newnote
+ comment.author = utils.whoami()
+ comment.trainee = trainee
+ session.add(comment)
+ session.commit()
+
+###############################################################################
+
+# suite names DMs can upload to
+dm_suites = ['unstable', 'experimental']
+
+def get_newest_source(source, session):
+ 'returns the newest DBSource object in dm_suites'
+ ## the most recent version of the package uploaded to unstable or
+ ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+ ## section of its control file
+ q = session.query(DBSource).filter_by(source = source). \
+ filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
+ order_by(desc('source.version'))
+ return q.first()
+
+def get_suite_version_by_source(source, session):
+ 'returns a list of tuples (suite_name, version) for source package'
+ q = session.query(Suite.suite_name, DBSource.version). \
+ join(Suite.sources).filter_by(source = source)
+ return q.all()
+
+def get_source_by_package_and_suite(package, suite_name, session):
+ '''
+ returns a DBSource query filtered by DBBinary.package and this package's
+ suite_name
+ '''
+ return session.query(DBSource). \
+ join(DBSource.binaries).filter_by(package = package). \
+ join(DBBinary.suites).filter_by(suite_name = suite_name)
+
+def get_suite_version_by_package(package, arch_string, session):
+ '''
+ returns a list of tuples (suite_name, version) for binary package and
+ arch_string
+ '''
+ return session.query(Suite.suite_name, DBBinary.version). \
+ join(Suite.binaries).filter_by(package = package). \
+ join(DBBinary.architecture). \
+ filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
+
class Upload(object):
"""
Everything that has to do with an upload processed.
self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
- if "sponsoremail" in self.pkg.changes:
- self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
+ # Process policy doesn't set the fingerprint field and I don't want to make it
+ # do it for now as I don't want to have to deal with the case where we accepted
+ # the package into PU-NEW, but the fingerprint has gone away from the keyring in
+ # the meantime so the package will be remarked as rejectable. Urgh.
+ # TODO: Fix this properly
+ if self.pkg.changes.has_key('fingerprint'):
+ session = DBConn().session()
+ fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
+ if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
+ if self.pkg.changes.has_key("sponsoremail"):
+ self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
+ session.close()
if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
self.Subst["__REJECT_MESSAGE__"] = self.package_info()
self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
+ self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
###########################################################################
def load_changes(self, filename):
if not re_valid_pkg_name.match(prov):
self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
+ # If there is a Built-Using field, we need to check we can find the
+ # exact source version
+ built_using = control.Find("Built-Using")
+ if built_using:
+ try:
+ entry["built-using"] = []
+ for dep in apt_pkg.parse_depends(built_using):
+ bu_s, bu_v, bu_e = dep[0]
+ # Check that it's an exact match dependency and we have
+ # some form of version
+ if bu_e != "=" or len(bu_v) < 1:
+ self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
+ else:
+ # Find the source id for this version
+ bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
+ if len(bu_so) != 1:
+ self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
+ else:
+ entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
+
+ except ValueError, e:
+ self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
+
+
# Check the section & priority match those given in the .changes (non-fatal)
if control.Find("Section") and entry["section"] != "" \
and entry["section"] != control.Find("Section"):
(source_version, f, self.pkg.changes["version"]))
else:
# Check in the SQL database
- if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+ if not source_exists(source_package, source_version, suites = \
+ self.pkg.changes["distribution"].keys(), session = session):
# Check in one of the other directories
source_epochless_version = re_no_epoch.sub('', source_version)
dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
entry["new"] = 1
else:
dsc_file_exists = False
- for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
+ for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
if cnf.has_key("Dir::Queue::%s" % (myq)):
if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
dsc_file_exists = True
# Check the version and for file overwrites
self.check_binary_against_db(f, session)
- # Temporarily disable contents generation until we change the table storage layout
- #b = Binary(f)
- #b.scan_package()
- #if len(b.rejects) > 0:
- # for j in b.rejects:
- # self.rejects.append(j)
-
def source_file_checks(self, f, session):
entry = self.pkg.files[f]
# Check for packages that have moved from one component to another
entry['suite'] = suite
- res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
- if res.rowcount > 0:
- entry["othercomponents"] = res.fetchone()[0]
+ arch_list = [entry["architecture"], 'all']
+ component = get_component_by_package_suite(self.pkg.files[f]['package'], \
+ [suite], arch_list = arch_list, session = session)
+ if component is not None:
+ entry["othercomponents"] = component
def check_files(self, action=True):
file_keys = self.pkg.files.keys()
self.rejects.append("source only uploads are not supported.")
###########################################################################
- def check_dsc(self, action=True, session=None):
- """Returns bool indicating whether or not the source changes are valid"""
- # Ensure there is source to check
- if not self.pkg.changes["architecture"].has_key("source"):
- return True
- # Find the .dsc
+ def __dsc_filename(self):
+ """
+ Returns: (Status, Dsc_Filename)
+ where
+ Status: Boolean; True when there was no error, False otherwise
+ Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
+ """
dsc_filename = None
- for f, entry in self.pkg.files.items():
- if entry["type"] == "dsc":
+
+ # find the dsc
+ for name, entry in self.pkg.files.items():
+ if entry.has_key("type") and entry["type"] == "dsc":
if dsc_filename:
- self.rejects.append("can not process a .changes file with multiple .dsc's.")
- return False
+ return False, "cannot process a .changes file with multiple .dsc's."
else:
- dsc_filename = f
+ dsc_filename = name
- # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
if not dsc_filename:
- self.rejects.append("source uploads must contain a dsc file")
- return False
+ return False, "source uploads must contain a dsc file"
+
+ return True, dsc_filename
+
+ def load_dsc(self, action=True, signing_rules=1):
+ """
+ Find and load the dsc from self.pkg.files into self.dsc
+
+ Returns: (Status, Reason)
+ where
+ Status: Boolean; True when there was no error, False otherwise
+ Reason: String; When Status is False this describes the error
+ """
+
+ # find the dsc
+ (status, dsc_filename) = self.__dsc_filename()
+ if not status:
+ # If status is false, dsc_filename has the reason
+ return False, dsc_filename
- # Parse the .dsc file
try:
- self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
+ self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
except CantOpenError:
- # if not -n copy_to_holding() will have done this for us...
if not action:
- self.rejects.append("%s: can't read file." % (dsc_filename))
+ return False, "%s: can't read file." % (dsc_filename)
except ParseChangesError, line:
- self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
+ return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
except InvalidDscError, line:
- self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
+ return False, "%s: syntax error on line %s." % (dsc_filename, line)
except ChangesUnicodeError:
- self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
+ return False, "%s: dsc file not proper utf-8." % (dsc_filename)
+
+ return True, None
+
+ ###########################################################################
+
+ def check_dsc(self, action=True, session=None):
+ """Returns bool indicating whether or not the source changes are valid"""
+ # Ensure there is source to check
+ if not self.pkg.changes["architecture"].has_key("source"):
+ return True
+
+ (status, reason) = self.load_dsc(action=action)
+ if not status:
+ self.rejects.append(reason)
+ return False
+ (status, dsc_filename) = self.__dsc_filename()
+ if not status:
+ # If status is false, dsc_filename has the reason
+ self.rejects.append(dsc_filename)
+ return False
# Build up the file list of files mentioned by the .dsc
try:
session = DBConn().session()
self.check_source_against_db(dsc_filename, session)
self.check_dsc_against_db(dsc_filename, session)
- session.close()
+
+ dbchg = get_dbchange(self.pkg.changes_file, session)
# Finally, check if we're missing any files
for f in self.later_check_files:
- self.rejects.append("Could not find file %s references in changes" % f)
+ print 'XXX: %s' % f
+ # Check if we've already processed this file if we have a dbchg object
+ ok = False
+ if dbchg:
+ for pf in dbchg.files:
+ if pf.filename == f and pf.processed:
+ self.notes.append('%s was already processed so we can go ahead' % f)
+ ok = True
+ del self.pkg.files[f]
+ if not ok:
+ self.rejects.append("Could not find file %s references in changes" % f)
+
+ session.close()
return True
os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
# Extract the source
- cmd = "dpkg-source -sn -x %s" % (dsc_filename)
- (result, output) = commands.getstatusoutput(cmd)
- if (result != 0):
- self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
- self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
+ try:
+ unpacked = UnpackedSource(dsc_filename)
+ except:
+ self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
return
if not cnf.Find("Dir::Queue::BTSVersionTrack"):
upstr_version = re_strip_revision.sub('', upstr_version)
# Ensure the changelog file exists
- changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
- if not os.path.exists(changelog_filename):
+ changelog_file = unpacked.get_changelog_file()
+ if changelog_file is None:
self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
return
# Parse the changelog
self.pkg.dsc["bts changelog"] = ""
- changelog_file = utils.open_file(changelog_filename)
for line in changelog_file.readlines():
m = re_changelog_versions.match(line)
if m:
self.pkg.dsc["bts changelog"] += line
changelog_file.close()
+ unpacked.cleanup()
# Check we found at least one revision in the changelog
if not self.pkg.dsc["bts changelog"]:
# If we do not have a tagfile, don't do anything
tagfile = cnf.get("Dinstall::LintianTags")
- if tagfile is None:
+ if not tagfile:
return
# Parse the yaml file
# Check any one-off upload blocks
self.check_upload_blocks(fpr, session)
- # Start with DM as a special case
+ # If the source_acl is None, source is never allowed
+ if fpr.source_acl is None:
+ if self.pkg.changes["architecture"].has_key("source"):
+ rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+ rej += '\nPlease contact ftpmaster if you think this is incorrect'
+ self.rejects.append(rej)
+ return
+ # Do DM as a special case
# DM is a special case unfortunately, so we check it first
# (keys with no source access get more access than DMs in one
# way; DMs can only upload for their packages whether source
# or binary, whereas keys with no access might be able to
# upload some binaries)
- if fpr.source_acl.access_level == 'dm':
+ elif fpr.source_acl.access_level == 'dm':
self.check_dm_upload(fpr, session)
else:
- # Check source-based permissions for other types
- if self.pkg.changes["architecture"].has_key("source") and \
- fpr.source_acl.access_level is None:
- rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
- rej += '\nPlease contact ftpmaster if you think this is incorrect'
- self.rejects.append(rej)
- return
# If not a DM, we allow full upload rights
uid_email = "%s@debian.org" % (fpr.uid.uid)
self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
if len(tmparches.keys()) > 0:
if fpr.binary_reject:
- rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
- rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+ rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+ if len(tmparches.keys()) == 1:
+ rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
+ else:
+ rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
self.rejects.append(rej)
else:
# TODO: This is where we'll implement reject vs throw away binaries later
if rej:
return
- ## the most recent version of the package uploaded to unstable or
- ## experimental includes the field "DM-Upload-Allowed: yes" in the source
- ## section of its control file
- q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
- q = q.join(SrcAssociation)
- q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
- q = q.order_by(desc('source.version')).limit(1)
-
- r = q.all()
+ r = get_newest_source(self.pkg.changes["source"], session)
- if len(r) != 1:
+ if r is None:
rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
self.rejects.append(rej)
return
- r = r[0]
if not r.dm_upload_allowed:
rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
self.rejects.append(rej)
## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
## non-developer maintainers cannot NMU or hijack packages)
- # srcuploaders includes the maintainer
+ # uploader includes the maintainer
accept = False
- for sup in r.srcuploaders:
- (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+ for uploader in r.uploaders:
+ (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
# Eww - I hope we never have two people with the same name in Debian
if email == fpr.uid.uid or name == fpr.uid.name:
accept = True
## none of the packages are being taken over from other source packages
for b in self.pkg.changes["binary"].keys():
for suite in self.pkg.changes["distribution"].keys():
- q = session.query(DBSource)
- q = q.join(DBBinary).filter_by(package=b)
- q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
-
- for s in q.all():
+ for s in get_source_by_package_and_suite(b, suite, session):
if s.source != self.pkg.changes["source"]:
self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
self.Subst["__SHORT_SUMMARY__"] = short_summary
for dist in self.pkg.changes["distribution"].keys():
- announce_list = cnf.Find("Suite::%s::Announce" % (dist))
+ suite = get_suite(dist)
+ if suite is None: continue
+ announce_list = suite.announce
if announce_list == "" or lists_done.has_key(announce_list):
continue
print "Installing."
self.logger.log(["installing changes", self.pkg.changes_file])
+ binaries = []
poolfiles = []
# Add the .dsc file to the DB first
# Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
for newfile, entry in self.pkg.files.items():
if entry["type"] == "deb":
- poolfiles.append(add_deb_to_db(self, newfile, session))
+ b, pf = add_deb_to_db(self, newfile, session)
+ binaries.append(b)
+ poolfiles.append(pf)
# If this is a sourceful diff only upload that is moving
# cross-component we need to copy the .orig files into the new
stats.accept_bytes += float(entry["size"])
# Copy the .changes file across for suite which need it.
- copy_changes = {}
- for suite_name in self.pkg.changes["distribution"].keys():
- if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
- copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
+ copy_changes = dict([(x.copychanges, '')
+ for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
+ if x.copychanges is not None])
for dest in copy_changes.keys():
utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
# Our SQL session will automatically start a new transaction after
# the last commit
+ # Now ensure that the metadata has been added
+ # This has to be done after we copy the files into the pool
+ # For source if we have it:
+ if self.pkg.changes["architecture"].has_key("source"):
+ import_metadata_into_db(source, session)
+
+ # Now for any of our binaries
+ for b in binaries:
+ import_metadata_into_db(b, session)
+
+ session.commit()
+
# Move the .changes into the 'done' directory
utils.move(self.pkg.changes_file,
os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
self.update_subst()
- self.Subst["__SUITE__"] = ""
self.Subst["__SUMMARY__"] = summary
mail_message = utils.TemplateSubst(self.Subst,
os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
file_type = binary_type
# Override suite name; used for example with proposed-updates
- if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
- suite = cnf["Suite::%s::OverrideSuite" % (suite)]
+ oldsuite = get_suite(suite, session)
+ if (not oldsuite is None) and oldsuite.overridesuite:
+ suite = oldsuite.overridesuite
result = get_override(package, suite, component, file_type, session)
################################################################################
def check_binary_against_db(self, filename, session):
# Ensure version is sane
- q = session.query(BinAssociation)
- q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
- q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
-
- self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
- filename, self.pkg.files[filename]["version"], sourceful=False)
+ self.cross_suite_version_check( \
+ get_suite_version_by_package(self.pkg.files[filename]["package"], \
+ self.pkg.files[filename]["architecture"], session),
+ filename, self.pkg.files[filename]["version"], sourceful=False)
# Check for any existing copies of the file
q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
version = self.pkg.dsc.get("version")
# Ensure version is sane
- q = session.query(SrcAssociation)
- q = q.join(DBSource).filter(DBSource.source==source)
-
- self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
- filename, version, sourceful=True)
+ self.cross_suite_version_check( \
+ get_suite_version_by_source(source, session), filename, version,
+ sourceful=True)
################################################################################
def check_dsc_against_db(self, filename, session):
source_version = entry["source version"]
source_package = entry["source package"]
if not self.pkg.changes["architecture"].has_key("source") \
- and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+ and not source_exists(source_package, source_version, \
+ suites = self.pkg.changes["distribution"].keys(), session = session):
source_epochless_version = re_no_epoch.sub('', source_version)
dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
found = False
source_version = entry["source version"]
source_package = entry["source package"]
if not self.pkg.changes["architecture"].has_key("source") \
- and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
+ and not source_exists(source_package, source_version, \
+ suites = self.pkg.changes["distribution"].keys(), \
+ session = session):
self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
# Version and file overwrite checks
if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
self.rejects.append("%s is NEW for %s." % (checkfile, suite))
- ################################################################################
- # This is not really a reject, but an unaccept, but since a) the code for
- # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
- # extremely rare, for now we'll go with whining at our admin folks...
-
- def do_unaccept(self):
- cnf = Config()
-
- self.update_subst()
- self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
- self.Subst["__REJECT_MESSAGE__"] = self.package_info()
- self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
- self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
- if cnf.has_key("Dinstall::Bcc"):
- self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
-
- template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
-
- reject_mail_message = utils.TemplateSubst(self.Subst, template)
-
- # Write the rejection email out as the <foo>.reason file
- reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
- reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
-
- # If we fail here someone is probably trying to exploit the race
- # so let's just raise an exception ...
- if os.path.exists(reject_filename):
- os.unlink(reject_filename)
-
- fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- os.write(fd, reject_mail_message)
- os.close(fd)
-
- utils.send_mail(reject_mail_message)
-
- del self.Subst["__REJECTOR_ADDRESS__"]
- del self.Subst["__REJECT_MESSAGE__"]
- del self.Subst["__CC__"]
-
################################################################################
# If any file of an upload has a recent mtime then chances are good
# the file is still being uploaded.