session.add(qb)
- # If the .orig tarballs are in the pool, create a symlink to
- # them (if one doesn't already exist)
- for dsc_file in changes.dsc_files.keys():
- # Skip all files except orig tarballs
- from daklib.regexes import re_is_orig_source
- if not re_is_orig_source.match(dsc_file):
- continue
- # Skip orig files not identified in the pool
- if not (changes.orig_files.has_key(dsc_file) and
- changes.orig_files[dsc_file].has_key("id")):
- continue
- orig_file_id = changes.orig_files[dsc_file]["id"]
- dest = os.path.join(dest_dir, dsc_file)
-
- # If it doesn't exist, create a symlink
- if not os.path.exists(dest):
- q = session.execute("SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id",
- {'id': orig_file_id})
- res = q.fetchone()
- if not res:
- return "[INTERNAL ERROR] Couldn't find id %s in files table." % (orig_file_id)
-
- src = os.path.join(res[0], res[1])
- os.symlink(src, dest)
+ exists, symlinked = utils.ensure_orig_files(changes, dest, session)
- # Add it to the list of packages for later processing by apt-ftparchive
- qb = QueueBuild()
- qb.suite_id = s.suite_id
- qb.queue_id = self.queue_id
- qb.filename = dest
+ # Add symlinked files to the list of packages for later processing
+ # by apt-ftparchive
+ for filename in symlinked:
+ qb = QueueBuild()
+ qb.suite_id = s.suite_id
+ qb.queue_id = self.queue_id
+ qb.filename = filename
+ qb.in_queue = True
+ session.add(qb)
+
+ # Update files to ensure they are not removed prematurely
+ for filename in exists:
+ qb = get_queue_build(filename, s.suite_id, session)
+ if qb is None:
qb.in_queue = True
+ qb.last_used = None
session.add(qb)
- # If it does, update things to ensure it's not removed prematurely
- else:
- qb = get_queue_build(dest, s.suite_id, session)
- if qb is None:
- qb.in_queue = True
- qb.last_used = None
- session.add(qb)
-
if privatetrans:
session.commit()
session.close()
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_taint_free, \
- re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
+ re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
+ re_is_orig_source
from srcformats import get_format_from_string
from collections import defaultdict
apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
###############################################################################
+
+def ensure_orig_files(changes, dest_dir, session):
+ """
+ Ensure that dest_dir contains all the orig tarballs for the specified
+ changes. If it does not, symlink them into place.
+
+ Returns a 2-tuple (already_exists, symlinked) containing a list of files
+ that were already there and a list of files that were symlinked into place.
+ """
+
+ exists, symlinked = [], []
+
+ for dsc_file in changes.dsc_files:
+
+ # Skip all files that are not orig tarballs
+ if not re_is_orig_source.match(dsc_file):
+ continue
+
+ # Skip orig files not identified in the pool
+ if not (dsc_file in changes.orig_files and
+ 'id' in changes.orig_files[dsc_file]):
+ continue
+
+ dest = os.path.join(dest_dir, dsc_file)
+
+ if os.path.exists(dest):
+ exists.append(dest)
+ continue
+
+ orig_file_id = changes.orig_files[dsc_file]['id']
+
+ c = session.execute(
+ 'SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id',
+ {'id': orig_file_id}
+ )
+
+ res = c.fetchone()
+ if not res:
+ return "[INTERNAL ERROR] Couldn't find id %s in files table." % orig_file_id
+
+ src = os.path.join(res[0], res[1])
+ os.symlink(src, dest)
+ symlinked.append(dest)
+
+ return files