import apt_pkg, commands
from daklib import daklog
-from daklib import queue
+from daklib.queue import *
from daklib import utils
from daklib.dbconn import *
from daklib.binary import copy_temporary_contents
###############################################################################
-def action (u, stable_queue=None, log_urgency=True):
+def action (u, stable_queue=None, log_urgency=True, session=None):
(summary, short_summary) = u.build_summaries()
pi = u.package_info()
if stable_queue:
stable_install(u, summary, short_summary, stable_queue, log_urgency)
else:
- install(u, log_urgency)
+ install(u, session, log_urgency)
elif answer == 'Q':
sys.exit(0)
source.source = u.pkg.dsc["source"]
source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
- source.changedby_id = get_or_set_maintainer(u.pkg.dsc["changed-by"], session).maintainer_id
- source.fingerprint_id = get_or_set_fingerprint(u.pkg.dsc["fingerprint"], session).fingerprint_id
+ source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
+ source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
source.install_date = datetime.now().date()
dsc_component = entry["component"]
dscfile.poolfile_id = entry["files id"]
session.add(dscfile)
- for dsc_file, dentry in u.pkg.dsc_files.keys():
+ for dsc_file, dentry in u.pkg.dsc_files.items():
df = DSCFile()
df.source_id = source.source_id
- # If the .orig.tar.gz is already in the pool, it's
+ # If the .orig tarball is already in the pool, it's
# files id is stored in dsc_files by check_dsc().
files_id = dentry.get("files id", None)
+ # Find the entry in the files hash
+ # TODO: Bail out here properly
+ dfentry = None
+ for f, e in u.pkg.files.items():
+ if f == dsc_file:
+ dfentry = e
+ break
+
if files_id is None:
- filename = dentry["pool name"] + dsc_file
+ filename = dfentry["pool name"] + dsc_file
(found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
# FIXME: needs to check for -1/-2 and or handle exception
# If still not found, add it
if files_id is None:
+ # HACK: Force sha1sum etc into dentry
+ dentry["sha1sum"] = dfentry["sha1sum"]
+ dentry["sha256sum"] = dfentry["sha256sum"]
poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
files_id = poolfile.file_id
session.flush()
# Add the src_uploaders to the DB
- uploader_ids = [maintainer_id]
+ uploader_ids = [source.maintainer_id]
if u.pkg.dsc.has_key("uploaders"):
for up in u.pkg.dsc["uploaders"].split(","):
up = up.strip()
su = SrcUploader()
su.maintainer_id = up
- su.source_id = source_id
+ su.source_id = source.source_id
session.add(su)
session.flush()
# Find poolfile id
filename = entry["pool name"] + filename
+ fullpath = os.path.join(cnf["Dir::Pool"], filename)
if not entry.get("location id", None):
entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
bin.poolfile_id = entry["files id"]
# Find source id
- bin_sources = get_sources_from_name(entry["source package"], entry["source version"])
+ bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
if len(bin_sources) != 1:
raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
(bin.package, bin.version, bin.architecture.arch_string,
ba = BinAssociation()
ba.binary_id = bin.binary_id
ba.suite_id = get_suite(suite_name).suite_id
- session.add(sa)
+ session.add(ba)
session.flush()
- # Deal with contents
- contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, filename, reject=None)
- if not contents:
- print "REJECT\n" + "\n".join(contents.rejects)
- session.rollback()
- raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
+ # Deal with contents - disabled for now
+ #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
+ #if not contents:
+ # print "REJECT\nCould not determine contents of package %s" % bin.package
+ # session.rollback()
+ # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
-def install(u, log_urgency=True):
+def install(u, session, log_urgency=True):
cnf = Config()
summarystats = SummaryStats()
print "Installing."
- Logger.log(["installing changes",pkg.changes_file])
-
- # Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
- session = DBConn().session()
+ Logger.log(["installing changes", u.pkg.changes_file])
# Ensure that we have all the hashes we need below.
u.ensure_hashes()
return
# Add the .dsc file to the DB first
- for newfile in u.pkg.files.keys():
+ for newfile, entry in u.pkg.files.items():
if entry["type"] == "dsc":
dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
# Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
- for newfile in u.pkg.files.keys():
+ for newfile, entry in u.pkg.files.items():
if entry["type"] == "deb":
add_deb_to_db(u, newfile, session)
# If this is a sourceful diff only upload that is moving
- # cross-component we need to copy the .orig.tar.gz into the new
+ # cross-component we need to copy the .orig files into the new
# component too for the same reasons as above.
- #
- if u.pkg.changes["architecture"].has_key("source") and u.pkg.orig_tar_id and \
- u.pkg.orig_tar_location != dsc_location_id:
-
- oldf = get_poolfile_by_id(u.pkg.orig_tar_id, session)
- old_filename = os.path.join(oldf.location.path, oldf.filename)
- old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
- 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
-
- new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
-
- # TODO: Care about size/md5sum collisions etc
- (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
-
- if newf is None:
- utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
- newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
-
- # TODO: Check that there's only 1 here
- source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
- dscf = get_dscfiles(source_id = source.source_id, poolfile_id=u.pkg.orig_tar_id, session=session)[0]
- dscf.poolfile_id = newf.file_id
- session.add(dscf)
- session.flush()
+ if u.pkg.changes["architecture"].has_key("source"):
+ for orig_file in u.pkg.orig_files.keys():
+ if not u.pkg.orig_files[orig_file].has_key("id"):
+ continue # Skip if it's not in the pool
+ orig_file_id = u.pkg.orig_files[orig_file]["id"]
+ if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
+ continue # Skip if the location didn't change
+
+ # Do the move
+ oldf = get_poolfile_by_id(orig_file_id, session)
+ old_filename = os.path.join(oldf.location.path, oldf.filename)
+ old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
+ 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
+
+ new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
+
+ # TODO: Care about size/md5sum collisions etc
+ (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
+
+ if newf is None:
+ utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
+ newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
+
+ # TODO: Check that there's only 1 here
+ source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
+ dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
+ dscf.poolfile_id = newf.file_id
+ session.add(dscf)
+ session.flush()
# Install the files into the pool
for newfile, entry in u.pkg.files.items():
# Copy the .changes file across for suite which need it.
copy_changes = {}
copy_dot_dak = {}
- for suite_name in changes["distribution"].keys():
+ for suite_name in u.pkg.changes["distribution"].keys():
if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
# and the .dak file...
os.unlink(dest)
os.symlink(src, dest)
- # Update last_used on any non-upload .orig.tar.gz symlink
- if u.pkg.orig_tar_id:
+ # Update last_used on any non-uploaded .orig symlink
+ for orig_file in u.pkg.orig_files.keys():
# Determine the .orig.tar.gz file name
- for dsc_file in u.pkg.dsc_files.keys():
- if dsc_file.endswith(".orig.tar.gz"):
- u.pkg.orig_tar_gz = os.path.join(dest_dir, dsc_file)
+ if not u.pkg.orig_files[orig_file].has_key("id"):
+ continue # Skip files not in the pool
+ # XXX: do we really want to update the orig_files dict here
+ # instead of using a temporary variable?
+ u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
# Remove it from the list of packages for later processing by apt-ftparchive
- qb = get_queue_build(u.pkg.orig_tar_gz, suite.suite_id, session)
+ qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
if qb:
qb.in_queue = False
qb.last_used = now_date
summarystats.accept_count += 1
################################################################################
-### XXX: UP TO HERE
-def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates"):
+def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
summarystats = SummaryStats()
fromsuite_name = fromsuite_name.lower()
fromsuite = get_suite(fromsuite_name)
tosuite = get_suite(tosuite_name)
- # Begin a transaction; if we bomb out anywhere between here and
- # the COMMIT WORK below, the DB won't be changed.
- session = DBConn().session()
-
# Add the source to stable (and remove it from proposed-updates)
for newfile, entry in u.pkg.files.items():
if entry["type"] == "dsc":
################################################################################
-def process_it(changes_file, stable_queue=None, log_urgency=True):
+def process_it(changes_file, stable_queue, log_urgency, session):
cnf = Config()
u = Upload()
# overwrite_checks should not be performed if installing to stable
overwrite_checks = False
- u.load_dot_dak(cfile)
+ u.pkg.load_dot_dak(cfile)
u.update_subst()
if stable_queue:
u.pkg.changes_file = old
- u.accepted_checks(overwrite_checks)
- action(u, stable_queue, log_urgency)
+ u.accepted_checks(overwrite_checks, session)
+ action(u, stable_queue, log_urgency, session)
# Restore CWD
os.chdir(u.prevdir)
# Sort the .changes files so that we process sourceful ones first
changes_files.sort(utils.changes_compare)
+
# Process the changes files
for changes_file in changes_files:
print "\n" + changes_file
- process_it(changes_file, stable_queue, log_urgency)
+ session = DBConn().session()
+ process_it(changes_file, stable_queue, log_urgency, session)
+ session.close()
if summarystats.accept_count:
sets = "set"