import sys
import traceback
import apt_pkg
+from sqlalchemy.orm.exc import NoResultFound
from daklib.dbconn import *
from daklib import daklog
from daklib import utils
from daklib.dak_exceptions import CantOpenError, AlreadyLockedError, CantGetLockError
from daklib.config import Config
-from daklib.archive import ArchiveTransaction
+from daklib.archive import ArchiveTransaction, source_component_from_package_list
from daklib.urgencylog import UrgencyLog
+from daklib.packagelist import PackageList
import daklib.announce
+import daklib.utils
# Globals
Options = None
def do_comments(dir, srcqueue, opref, npref, line, fn, transaction):
session = transaction.session
+ actions = []
for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
lines = open(os.path.join(dir, comm)).readlines()
if len(lines) == 0 or lines[0] != line + "\n": continue
else:
changes_prefix = changes_prefix + '.changes'
+ # We need to escape "_" as we use it with the LIKE operator (via the
+ # SQLA startwith) later.
+ changes_prefix = changes_prefix.replace("_", r"\_")
+
uploads = session.query(PolicyQueueUpload).filter_by(policy_queue=srcqueue) \
.join(PolicyQueueUpload.changes).filter(DBChange.changesname.startswith(changes_prefix)) \
.order_by(PolicyQueueUpload.source_id)
- for u in uploads:
- print "Processing changes file: %s" % u.changes.changesname
- fn(u, srcqueue, "".join(lines[1:]), transaction)
+ reason = "".join(lines[1:])
+ actions.extend((u, reason) for u in uploads)
if opref != npref:
newcomm = npref + comm[len(opref):]
- transaction.fs.move(os.path.join(dir, comm), os.path.join(dir, newcomm))
+ newcomm = utils.find_next_free(os.path.join(dir, newcomm))
+ transaction.fs.move(os.path.join(dir, comm), newcomm)
+
+ actions.sort()
+
+ for u, reason in actions:
+ print("Processing changes file: {0}".format(u.changes.changesname))
+ fn(u, srcqueue, reason, transaction)
################################################################################
real_comment_reject(upload, srcqueue, comments, transaction, notify=False)
if not Options['No-Action']:
transaction.commit()
+ else:
+ transaction.rollback()
return wrapper
################################################################################
overridesuite = session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()
def binary_component_func(db_binary):
- override = session.query(Override).filter_by(suite=overridesuite, package=db_binary.package) \
- .join(OverrideType).filter(OverrideType.overridetype == db_binary.binarytype) \
- .join(Component).one()
- return override.component
+ section = db_binary.proxy['Section']
+ component_name = 'main'
+ if section.find('/') != -1:
+ component_name = section.split('/', 1)[0]
+ return get_mapped_component(component_name, session=session)
+
+ def is_debug_binary(db_binary):
+ return daklib.utils.is_in_debug_section(db_binary.proxy)
+
+ def has_debug_binaries(upload):
+ return any((is_debug_binary(x) for x in upload.binaries))
def source_component_func(db_source):
- override = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
+ package_list = PackageList(db_source.proxy)
+ component = source_component_from_package_list(package_list, upload.target_suite)
+ if component is not None:
+ return get_mapped_component(component.component_name, session=session)
+
+ # Fallback for packages without Package-List field
+ query = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
.join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
- .join(Component).one()
- return override.component
+ .join(Component)
+ return query.one().component
all_target_suites = [upload.target_suite]
all_target_suites.extend([q.suite for q in upload.target_suite.copy_queues])
for suite in all_target_suites:
+ debug_suite = suite.debug_suite
+
if upload.source is not None:
- transaction.copy_source(upload.source, suite, source_component_func(upload.source), allow_tainted=allow_tainted)
+ # If we have Source in this upload, let's include it into
+ # upload suite.
+ transaction.copy_source(
+ upload.source,
+ suite,
+ source_component_func(upload.source),
+ allow_tainted=allow_tainted,
+ )
+
+ if debug_suite is not None and has_debug_binaries(upload):
+ # If we're handing a debug package, we also need to include the
+ # source in the debug suite as well.
+ transaction.copy_source(
+ upload.source,
+ debug_suite,
+ source_component_func(upload.source),
+ allow_tainted=allow_tainted,
+ )
+
for db_binary in upload.binaries:
- # build queues may miss the source package if this is a binary-only upload
+ # Now, let's work out where to copy this guy to -- if it's
+ # a debug binary, and the suite has a debug suite, let's go
+ # ahead and target the debug suite rather then the stock
+ # suite.
+ copy_to_suite = suite
+ if debug_suite is not None and is_debug_binary(db_binary):
+ copy_to_suite = debug_suite
+
+ # build queues may miss the source package if this is a
+ # binary-only upload.
if suite != upload.target_suite:
- transaction.copy_source(db_binary.source, suite, source_component_func(db_binary.source), allow_tainted=allow_tainted)
- transaction.copy_binary(db_binary, suite, binary_component_func(db_binary), allow_tainted=allow_tainted, extra_archives=[upload.target_suite.archive])
+ transaction.copy_source(
+ db_binary.source,
+ copy_to_suite,
+ source_component_func(db_binary.source),
+ allow_tainted=allow_tainted,
+ )
+
+ transaction.copy_binary(
+ db_binary,
+ copy_to_suite,
+ binary_component_func(db_binary),
+ allow_tainted=allow_tainted,
+ extra_archives=[upload.target_suite.archive],
+ )
# Copy .changes if needed
if upload.target_suite.copychanges:
dst = os.path.join(upload.target_suite.path, upload.changes.changesname)
fs.copy(src, dst, mode=upload.target_suite.archive.mode)
+ # Copy upload to Process-Policy::CopyDir
+ # Used on security.d.o to sync accepted packages to ftp-master, but this
+ # should eventually be replaced by something else.
+ copydir = cnf.get('Process-Policy::CopyDir') or None
+ if copydir is not None:
+ mode = upload.target_suite.archive.mode
+ if upload.source is not None:
+ for f in [ df.poolfile for df in upload.source.srcfiles ]:
+ dst = os.path.join(copydir, f.basename)
+ if not os.path.exists(dst):
+ fs.copy(f.fullpath, dst, mode=mode)
+
+ for db_binary in upload.binaries:
+ f = db_binary.poolfile
+ dst = os.path.join(copydir, f.basename)
+ if not os.path.exists(dst):
+ fs.copy(f.fullpath, dst, mode=mode)
+
+ src = os.path.join(upload.policy_queue.path, upload.changes.changesname)
+ dst = os.path.join(copydir, upload.changes.changesname)
+ if not os.path.exists(dst):
+ fs.copy(src, dst, mode=mode)
+
if upload.source is not None and not Options['No-Action']:
urgency = upload.changes.urgency
if urgency not in cnf.value_list('Urgency::Valid'):
# The comments stuff relies on being in the right directory
os.chdir(pq.path)
+ do_comments(commentsdir, pq, "REJECT.", "REJECTED.", "NOTOK", comment_reject, transaction)
do_comments(commentsdir, pq, "ACCEPT.", "ACCEPTED.", "OK", comment_accept, transaction)
do_comments(commentsdir, pq, "ACCEPTED.", "ACCEPTED.", "OK", comment_accept, transaction)
- do_comments(commentsdir, pq, "REJECT.", "REJECTED.", "NOTOK", comment_reject, transaction)
remove_unreferenced_binaries(pq, transaction)
remove_unreferenced_sources(pq, transaction)