4 Installs Debian packages from queue/accepted into the pool
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
8 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
9 @license: GNU General Public License version 2 or later
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, write to the Free Software
24 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 ###############################################################################
28 # Cartman: "I'm trying to make the best of a bad situation, I don't
29 # need to hear crap from a bunch of hippy freaks living in
30 # denial. Screw you guys, I'm going home."
32 # Kyle: "But Cartman, we're trying to..."
34 # Cartman: "uhh.. screw you guys... home."
36 ###############################################################################
42 from datetime import datetime
44 import apt_pkg, commands
46 from daklib import daklog
47 from daklib.queue import *
48 from daklib import utils
49 from daklib.dbconn import *
50 from daklib.binary import copy_temporary_contents
51 from daklib.dak_exceptions import *
52 from daklib.regexes import re_default_answer, re_issource, re_fdnic
53 from daklib.urgencylog import UrgencyLog
54 from daklib.summarystats import SummaryStats
55 from daklib.config import Config
57 ###############################################################################
62 ###############################################################################
67 # Initialize config and connection to db
71 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
72 ('h',"help","Dinstall::Options::Help"),
73 ('n',"no-action","Dinstall::Options::No-Action"),
74 ('p',"no-lock", "Dinstall::Options::No-Lock"),
75 ('s',"no-mail", "Dinstall::Options::No-Mail"),
76 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
78 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
79 "version", "directory"]:
80 if not cnf.has_key("Dinstall::Options::%s" % (i)):
81 cnf["Dinstall::Options::%s" % (i)] = ""
83 changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
84 Options = cnf.SubTree("Dinstall::Options")
89 # If we have a directory flag, use it to find our files
90 if cnf["Dinstall::Options::Directory"] != "":
91 # Note that we clobber the list of files we were given in this case
92 # so warn if the user has done both
93 if len(changes_files) > 0:
94 utils.warn("Directory provided so ignoring files given on command line")
96 changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"])
100 ###############################################################################
102 def usage (exit_code=0):
103 print """Usage: dak process-accepted [OPTION]... [CHANGES]...
104 -a, --automatic automatic run
105 -h, --help show this help and exit.
106 -n, --no-action don't do anything
107 -p, --no-lock don't check lockfile !! for cron.daily only !!
108 -s, --no-mail don't send any mail
109 -V, --version display the version number and exit"""
112 ###############################################################################
114 def action (u, stable_queue=None, log_urgency=True, session=None):
115 (summary, short_summary) = u.build_summaries()
116 pi = u.package_info()
118 (prompt, answer) = ("", "XXX")
119 if Options["No-Action"] or Options["Automatic"]:
122 if len(u.rejects) > 0:
123 print "REJECT\n" + pi
124 prompt = "[R]eject, Skip, Quit ?"
125 if Options["Automatic"]:
128 print "INSTALL to " + ", ".join(u.pkg.changes["distribution"].keys())
130 prompt = "[I]nstall, Skip, Quit ?"
131 if Options["Automatic"]:
134 while prompt.find(answer) == -1:
135 answer = utils.our_raw_input(prompt)
136 m = re_default_answer.match(prompt)
139 answer = answer[:1].upper()
143 Logger.log(["unaccepted", u.pkg.changes_file])
146 stable_install(u, summary, short_summary, stable_queue, log_urgency)
148 install(u, session, log_urgency)
153 ###############################################################################
154 def add_poolfile(filename, datadict, location_id, session):
155 poolfile = PoolFile()
156 poolfile.filename = filename
157 poolfile.filesize = datadict["size"]
158 poolfile.md5sum = datadict["md5sum"]
159 poolfile.sha1sum = datadict["sha1sum"]
160 poolfile.sha256sum = datadict["sha256sum"]
161 poolfile.location_id = location_id
163 session.add(poolfile)
164 # Flush to get a file id (NB: This is not a commit)
169 def add_dsc_to_db(u, filename, session):
170 entry = u.pkg.files[filename]
173 source.source = u.pkg.dsc["source"]
174 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
175 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
176 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
177 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
178 source.install_date = datetime.now().date()
180 dsc_component = entry["component"]
181 dsc_location_id = entry["location id"]
183 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
185 # Set up a new poolfile if necessary
186 if not entry.has_key("files id") or not entry["files id"]:
187 filename = entry["pool name"] + filename
188 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
189 entry["files id"] = poolfile.file_id
191 source.poolfile_id = entry["files id"]
195 for suite_name in u.pkg.changes["distribution"].keys():
196 sa = SrcAssociation()
197 sa.source_id = source.source_id
198 sa.suite_id = get_suite(suite_name).suite_id
203 # Add the source files to the DB (files and dsc_files)
205 dscfile.source_id = source.source_id
206 dscfile.poolfile_id = entry["files id"]
209 for dsc_file, dentry in u.pkg.dsc_files.items():
211 df.source_id = source.source_id
213 # If the .orig tarball is already in the pool, it's
214 # files id is stored in dsc_files by check_dsc().
215 files_id = dentry.get("files id", None)
217 # Find the entry in the files hash
218 # TODO: Bail out here properly
220 for f, e in u.pkg.files.items():
226 filename = dfentry["pool name"] + dsc_file
228 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
229 # FIXME: needs to check for -1/-2 and or handle exception
230 if found and obj is not None:
231 files_id = obj.file_id
233 # If still not found, add it
235 # HACK: Force sha1sum etc into dentry
236 dentry["sha1sum"] = dfentry["sha1sum"]
237 dentry["sha256sum"] = dfentry["sha256sum"]
238 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
239 files_id = poolfile.file_id
241 df.poolfile_id = files_id
246 # Add the src_uploaders to the DB
247 uploader_ids = [source.maintainer_id]
248 if u.pkg.dsc.has_key("uploaders"):
249 for up in u.pkg.dsc["uploaders"].split(","):
251 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
254 for up in uploader_ids:
255 if added_ids.has_key(up):
256 utils.warn("Already saw uploader %s for source %s" % (up, source.source))
262 su.maintainer_id = up
263 su.source_id = source.source_id
268 return dsc_component, dsc_location_id
270 def add_deb_to_db(u, filename, session):
272 Contrary to what you might expect, this routine deals with both
273 debs and udebs. That info is in 'dbtype', whilst 'type' is
274 'deb' for both of them
277 entry = u.pkg.files[filename]
280 bin.package = entry["package"]
281 bin.version = entry["version"]
282 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
283 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
284 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
285 bin.binarytype = entry["dbtype"]
288 filename = entry["pool name"] + filename
289 fullpath = os.path.join(cnf["Dir::Pool"], filename)
290 if not entry.get("location id", None):
291 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
293 if not entry.get("files id", None):
294 poolfile = add_poolfile(filename, entry, entry["location id"], session)
295 entry["files id"] = poolfile.file_id
297 bin.poolfile_id = entry["files id"]
300 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
301 if len(bin_sources) != 1:
302 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
303 (bin.package, bin.version, bin.architecture.arch_string,
304 filename, bin.binarytype, u.pkg.changes["fingerprint"])
306 bin.source_id = bin_sources[0].source_id
308 # Add and flush object so it has an ID
312 # Add BinAssociations
313 for suite_name in u.pkg.changes["distribution"].keys():
314 ba = BinAssociation()
315 ba.binary_id = bin.binary_id
316 ba.suite_id = get_suite(suite_name).suite_id
321 # Deal with contents - disabled for now
322 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
324 # print "REJECT\nCould not determine contents of package %s" % bin.package
326 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
329 def install(u, session, log_urgency=True):
331 summarystats = SummaryStats()
335 Logger.log(["installing changes", u.pkg.changes_file])
337 # Ensure that we have all the hashes we need below.
339 if len(u.rejects) > 0:
340 # There were errors. Print them and SKIP the changes.
341 for msg in u.rejects:
345 # Add the .dsc file to the DB first
346 for newfile, entry in u.pkg.files.items():
347 if entry["type"] == "dsc":
348 dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
350 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
351 for newfile, entry in u.pkg.files.items():
352 if entry["type"] == "deb":
353 add_deb_to_db(u, newfile, session)
355 # If this is a sourceful diff only upload that is moving
356 # cross-component we need to copy the .orig files into the new
357 # component too for the same reasons as above.
358 if u.pkg.changes["architecture"].has_key("source"):
359 for orig_file in u.pkg.orig_files.keys():
360 if not u.pkg.orig_files[orig_file].has_key("id"):
361 continue # Skip if it's not in the pool
362 orig_file_id = u.pkg.orig_files[orig_file]["id"]
363 if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
364 continue # Skip if the location didn't change
367 oldf = get_poolfile_by_id(orig_file_id, session)
368 old_filename = os.path.join(oldf.location.path, oldf.filename)
369 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
370 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
372 new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
374 # TODO: Care about size/md5sum collisions etc
375 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
378 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
379 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
381 # TODO: Check that there's only 1 here
382 source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
383 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
384 dscf.poolfile_id = newf.file_id
388 # Install the files into the pool
389 for newfile, entry in u.pkg.files.items():
390 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
391 utils.move(newfile, destination)
392 Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
393 summarystats.accept_bytes += float(entry["size"])
395 # Copy the .changes file across for suite which need it.
398 for suite_name in u.pkg.changes["distribution"].keys():
399 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
400 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
401 # and the .dak file...
402 if cnf.has_key("Suite::%s::CopyDotDak" % (suite_name)):
403 copy_dot_dak[cnf["Suite::%s::CopyDotDak" % (suite_name)]] = ""
405 for dest in copy_changes.keys():
406 utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
408 for dest in copy_dot_dak.keys():
409 utils.copy(u.pkg.changes_file[:-8]+".dak", dest)
411 # We're done - commit the database changes
414 # Move the .changes into the 'done' directory
415 utils.move(u.pkg.changes_file,
416 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
418 # Remove the .dak file
419 os.unlink(u.pkg.changes_file[:-8] + ".dak")
421 if u.pkg.changes["architecture"].has_key("source") and log_urgency:
422 UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
424 # Our SQL session will automatically start a new transaction after
427 # Undo the work done in queue.py(accept) to help auto-building
429 now_date = datetime.now()
431 for suite_name in u.pkg.changes["distribution"].keys():
432 if suite_name not in cnf.ValueList("Dinstall::QueueBuildSuites"):
435 suite = get_suite(suite_name, session)
436 dest_dir = cnf["Dir::QueueBuild"]
438 if cnf.FindB("Dinstall::SecurityQueueBuild"):
439 dest_dir = os.path.join(dest_dir, suite_name)
441 for newfile, entry in u.pkg.files.items():
442 dest = os.path.join(dest_dir, newfile)
444 qb = get_queue_build(dest, suite.suite_id, session)
446 # Remove it from the list of packages for later processing by apt-ftparchive
448 qb.last_used = now_date
452 if not cnf.FindB("Dinstall::SecurityQueueBuild"):
453 # Update the symlink to point to the new location in the pool
454 pool_location = utils.poolify(u.pkg.changes["source"], entry["component"])
455 src = os.path.join(cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
456 if os.path.islink(dest):
458 os.symlink(src, dest)
460 # Update last_used on any non-uploaded .orig symlink
461 for orig_file in u.pkg.orig_files.keys():
462 # Determine the .orig.tar.gz file name
463 if not u.pkg.orig_files[orig_file].has_key("id"):
464 continue # Skip files not in the pool
465 # XXX: do we really want to update the orig_files dict here
466 # instead of using a temporary variable?
467 u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
469 # Remove it from the list of packages for later processing by apt-ftparchive
470 qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
473 qb.last_used = now_date
479 summarystats.accept_count += 1
481 ################################################################################
483 def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
484 summarystats = SummaryStats()
486 fromsuite_name = fromsuite_name.lower()
487 tosuite_name = "Stable"
488 if fromsuite_name == "oldstable-proposed-updates":
489 tosuite_name = "OldStable"
491 print "Installing from %s to %s." % (fromsuite_name, tosuite_name)
493 fromsuite = get_suite(fromsuite_name)
494 tosuite = get_suite(tosuite_name)
496 # Add the source to stable (and remove it from proposed-updates)
497 for newfile, entry in u.pkg.files.items():
498 if entry["type"] == "dsc":
499 package = u.pkg.dsc["source"]
500 # NB: not files[file]["version"], that has no epoch
501 version = u.pkg.dsc["version"]
503 source = get_sources_from_name(package, version, session)
505 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
508 # Remove from old suite
509 old = session.query(SrcAssociation).filter_by(source_id = source.source_id)
510 old = old.filter_by(suite_id = fromsuite.suite_id)
514 new = SrcAssociation()
515 new.source_id = source.source_id
516 new.suite_id = tosuite.suite_id
519 # Add the binaries to stable (and remove it/them from proposed-updates)
520 for newfile, entry in u.pkg.files.items():
521 if entry["type"] == "deb":
522 package = entry["package"]
523 version = entry["version"]
524 architecture = entry["architecture"]
526 binary = get_binaries_from_name(package, version, [architecture, 'all'])
529 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
532 # Remove from old suite
533 old = session.query(BinAssociation).filter_by(binary_id = binary.binary_id)
534 old = old.filter_by(suite_id = fromsuite.suite_id)
538 new = BinAssociation()
539 new.binary_id = binary.binary_id
540 new.suite_id = tosuite.suite_id
545 utils.move(u.pkg.changes_file,
546 os.path.join(cnf["Dir::Morgue"], 'process-accepted', os.path.basename(u.pkg.changes_file)))
548 ## Update the Stable ChangeLog file
549 # TODO: URGH - Use a proper tmp file
550 new_changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + ".ChangeLog"
551 changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + "ChangeLog"
552 if os.path.exists(new_changelog_filename):
553 os.unlink(new_changelog_filename)
555 new_changelog = utils.open_file(new_changelog_filename, 'w')
556 for newfile, entry in u.pkg.files.items():
557 if entry["type"] == "deb":
558 new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.suite_name,
560 entry["architecture"],
562 elif re_issource.match(newfile):
563 new_changelog.write("%s/%s/source/%s\n" % (tosuite.suite_name,
567 new_changelog.write("%s\n" % (newfile))
569 chop_changes = re_fdnic.sub("\n", u.pkg.changes["changes"])
570 new_changelog.write(chop_changes + '\n\n')
572 if os.access(changelog_filename, os.R_OK) != 0:
573 changelog = utils.open_file(changelog_filename)
574 new_changelog.write(changelog.read())
576 new_changelog.close()
578 if os.access(changelog_filename, os.R_OK) != 0:
579 os.unlink(changelog_filename)
580 utils.move(new_changelog_filename, changelog_filename)
582 summarystats.accept_count += 1
584 if not Options["No-Mail"] and u.pkg.changes["architecture"].has_key("source"):
585 u.Subst["__SUITE__"] = " into %s" % (tosuite)
586 u.Subst["__SUMMARY__"] = summary
587 u.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
589 if cnf.has_key("Dinstall::Bcc"):
590 u.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
592 template = os.path.join(cnf["Dir::Templates"], 'process-accepted.install')
594 mail_message = utils.TemplateSubst(u.Subst, template)
595 utils.send_mail(mail_message)
596 u.announce(short_summary, True)
598 # Finally remove the .dak file
599 dot_dak_file = os.path.join(cnf["Suite::%s::CopyDotDak" % (fromsuite.suite_name)],
600 os.path.basename(u.pkg.changes_file[:-8]+".dak"))
601 os.unlink(dot_dak_file)
603 ################################################################################
605 def process_it(changes_file, stable_queue, log_urgency, session):
609 overwrite_checks = True
611 # Absolutize the filename to avoid the requirement of being in the
612 # same directory as the .changes file.
613 cfile = os.path.abspath(changes_file)
615 # And since handling of installs to stable munges with the CWD
616 # save and restore it.
617 u.prevdir = os.getcwd()
621 cfile = os.path.basename(old)
622 os.chdir(cnf["Suite::%s::CopyDotDak" % (stable_queue)])
623 # overwrite_checks should not be performed if installing to stable
624 overwrite_checks = False
626 u.pkg.load_dot_dak(cfile)
630 u.pkg.changes_file = old
632 u.accepted_checks(overwrite_checks, session)
633 action(u, stable_queue, log_urgency, session)
638 ###############################################################################
644 summarystats = SummaryStats()
645 changes_files = init()
649 # -n/--dry-run invalidates some other options which would involve things happening
650 if Options["No-Action"]:
651 Options["Automatic"] = ""
653 # Check that we aren't going to clash with the daily cron job
655 if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (cnf["Dir::Root"])) and not Options["No-Lock"]:
656 utils.fubar("Archive maintenance in progress. Try again later.")
658 # If running from within proposed-updates; assume an install to stable
660 if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
661 stable_queue = "Oldstable-Proposed-Updates"
662 elif os.getenv('PWD').find('proposed-updates') != -1:
663 stable_queue = "Proposed-Updates"
665 # Obtain lock if not in no-action mode and initialize the log
666 if not Options["No-Action"]:
667 lock_fd = os.open(cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
669 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
671 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
672 utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
675 Logger = daklog.Logger(cnf, "process-accepted")
676 if not stable_queue and cnf.get("Dir::UrgencyLog"):
677 # Initialise UrgencyLog()
681 # Sort the .changes files so that we process sourceful ones first
682 changes_files.sort(utils.changes_compare)
685 # Process the changes files
686 for changes_file in changes_files:
687 print "\n" + changes_file
688 session = DBConn().session()
689 process_it(changes_file, stable_queue, log_urgency, session)
692 if summarystats.accept_count:
694 if summarystats.accept_count > 1:
696 sys.stderr.write("Installed %d package %s, %s.\n" % (summarystats.accept_count, sets,
697 utils.size_type(int(summarystats.accept_bytes))))
698 Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes])
700 if not Options["No-Action"]:
705 ###############################################################################
707 if __name__ == '__main__':