4 Installs Debian packages from queue/accepted into the pool
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
8 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
9 @license: GNU General Public License version 2 or later
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, write to the Free Software
24 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 ###############################################################################
28 # Cartman: "I'm trying to make the best of a bad situation, I don't
29 # need to hear crap from a bunch of hippy freaks living in
30 # denial. Screw you guys, I'm going home."
32 # Kyle: "But Cartman, we're trying to..."
34 # Cartman: "uhh.. screw you guys... home."
36 ###############################################################################
42 from datetime import datetime
45 from daklib import daklog
46 from daklib.queue import *
47 from daklib import utils
48 from daklib.dbconn import *
49 from daklib.dak_exceptions import *
50 from daklib.regexes import re_default_answer, re_issource, re_fdnic
51 from daklib.urgencylog import UrgencyLog
52 from daklib.summarystats import SummaryStats
53 from daklib.config import Config
55 ###############################################################################
60 ###############################################################################
65 # Initialize config and connection to db
69 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
70 ('h',"help","Dinstall::Options::Help"),
71 ('n',"no-action","Dinstall::Options::No-Action"),
72 ('p',"no-lock", "Dinstall::Options::No-Lock"),
73 ('s',"no-mail", "Dinstall::Options::No-Mail"),
74 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
76 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
77 "version", "directory"]:
78 if not cnf.has_key("Dinstall::Options::%s" % (i)):
79 cnf["Dinstall::Options::%s" % (i)] = ""
81 changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
82 Options = cnf.SubTree("Dinstall::Options")
87 # If we have a directory flag, use it to find our files
88 if cnf["Dinstall::Options::Directory"] != "":
89 # Note that we clobber the list of files we were given in this case
90 # so warn if the user has done both
91 if len(changes_files) > 0:
92 utils.warn("Directory provided so ignoring files given on command line")
94 changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"])
98 ###############################################################################
100 def usage (exit_code=0):
101 print """Usage: dak process-accepted [OPTION]... [CHANGES]...
102 -a, --automatic automatic run
103 -h, --help show this help and exit.
104 -n, --no-action don't do anything
105 -p, --no-lock don't check lockfile !! for cron.daily only !!
106 -s, --no-mail don't send any mail
107 -V, --version display the version number and exit"""
110 ###############################################################################
112 def action (u, stable_queue=None, log_urgency=True, session=None):
113 (summary, short_summary) = u.build_summaries()
114 pi = u.package_info()
116 (prompt, answer) = ("", "XXX")
117 if Options["No-Action"] or Options["Automatic"]:
120 if len(u.rejects) > 0:
121 print "REJECT\n" + pi
122 prompt = "[R]eject, Skip, Quit ?"
123 if Options["Automatic"]:
126 print "INSTALL to " + ", ".join(u.pkg.changes["distribution"].keys())
128 prompt = "[I]nstall, Skip, Quit ?"
129 if Options["Automatic"]:
132 while prompt.find(answer) == -1:
133 answer = utils.our_raw_input(prompt)
134 m = re_default_answer.match(prompt)
137 answer = answer[:1].upper()
141 Logger.log(["unaccepted", u.pkg.changes_file])
144 stable_install(u, summary, short_summary, stable_queue, log_urgency)
146 install(u, session, log_urgency)
151 ###############################################################################
152 def add_poolfile(filename, datadict, location_id, session):
153 poolfile = PoolFile()
154 poolfile.filename = filename
155 poolfile.filesize = datadict["size"]
156 poolfile.md5sum = datadict["md5sum"]
157 poolfile.sha1sum = datadict["sha1sum"]
158 poolfile.sha256sum = datadict["sha256sum"]
159 poolfile.location_id = location_id
161 session.add(poolfile)
162 # Flush to get a file id (NB: This is not a commit)
167 def add_dsc_to_db(u, filename, session):
168 entry = u.pkg.files[filename]
171 source.source = u.pkg.dsc["source"]
172 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
173 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
174 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
175 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
176 source.install_date = datetime.now().date()
178 dsc_component = entry["component"]
179 dsc_location_id = entry["location id"]
181 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
183 # Set up a new poolfile if necessary
184 if not entry.has_key("files id") or not entry["files id"]:
185 filename = entry["pool name"] + filename
186 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
187 entry["files id"] = poolfile.file_id
189 source.poolfile_id = entry["files id"]
193 for suite_name in u.pkg.changes["distribution"].keys():
194 sa = SrcAssociation()
195 sa.source_id = source.source_id
196 sa.suite_id = get_suite(suite_name).suite_id
201 # Add the source files to the DB (files and dsc_files)
203 dscfile.source_id = source.source_id
204 dscfile.poolfile_id = entry["files id"]
207 for dsc_file, dentry in u.pkg.dsc_files.items():
209 df.source_id = source.source_id
211 # If the .orig tarball is already in the pool, it's
212 # files id is stored in dsc_files by check_dsc().
213 files_id = dentry.get("files id", None)
215 # Find the entry in the files hash
216 # TODO: Bail out here properly
218 for f, e in u.pkg.files.items():
224 filename = dfentry["pool name"] + dsc_file
226 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
227 # FIXME: needs to check for -1/-2 and or handle exception
228 if found and obj is not None:
229 files_id = obj.file_id
231 # If still not found, add it
233 # HACK: Force sha1sum etc into dentry
234 dentry["sha1sum"] = dfentry["sha1sum"]
235 dentry["sha256sum"] = dfentry["sha256sum"]
236 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
237 files_id = poolfile.file_id
239 df.poolfile_id = files_id
244 # Add the src_uploaders to the DB
245 uploader_ids = [source.maintainer_id]
246 if u.pkg.dsc.has_key("uploaders"):
247 for up in u.pkg.dsc["uploaders"].split(","):
249 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
252 for up in uploader_ids:
253 if added_ids.has_key(up):
254 utils.warn("Already saw uploader %s for source %s" % (up, source.source))
260 su.maintainer_id = up
261 su.source_id = source.source_id
266 return dsc_component, dsc_location_id
268 def add_deb_to_db(u, filename, session):
270 Contrary to what you might expect, this routine deals with both
271 debs and udebs. That info is in 'dbtype', whilst 'type' is
272 'deb' for both of them
275 entry = u.pkg.files[filename]
278 bin.package = entry["package"]
279 bin.version = entry["version"]
280 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
281 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
282 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
283 bin.binarytype = entry["dbtype"]
286 filename = entry["pool name"] + filename
287 fullpath = os.path.join(cnf["Dir::Pool"], filename)
288 if not entry.get("location id", None):
289 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
291 if not entry.get("files id", None):
292 poolfile = add_poolfile(filename, entry, entry["location id"], session)
293 entry["files id"] = poolfile.file_id
295 bin.poolfile_id = entry["files id"]
298 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
299 if len(bin_sources) != 1:
300 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
301 (bin.package, bin.version, bin.architecture.arch_string,
302 filename, bin.binarytype, u.pkg.changes["fingerprint"])
304 bin.source_id = bin_sources[0].source_id
306 # Add and flush object so it has an ID
310 # Add BinAssociations
311 for suite_name in u.pkg.changes["distribution"].keys():
312 ba = BinAssociation()
313 ba.binary_id = bin.binary_id
314 suite = get_suite(suite_name)
315 ba.suite_id = suite.suite_id
317 component_id = bin.poolfile.location.component_id;
318 component_id = bin.poolfile.location.component_id;
320 contents = copy_temporary_contents(bin os.path.basename(filename), None, session)
322 print "REJECT\nCould not determine contents of package %s" % bin.package
324 raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
334 def install(u, session, log_urgency=True):
336 summarystats = SummaryStats()
340 Logger.log(["installing changes", u.pkg.changes_file])
342 # Ensure that we have all the hashes we need below.
344 if len(u.rejects) > 0:
345 # There were errors. Print them and SKIP the changes.
346 for msg in u.rejects:
350 # Add the .dsc file to the DB first
351 for newfile, entry in u.pkg.files.items():
352 if entry["type"] == "dsc":
353 dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
355 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
356 for newfile, entry in u.pkg.files.items():
357 if entry["type"] == "deb":
358 add_deb_to_db(u, newfile, session)
360 # If this is a sourceful diff only upload that is moving
361 # cross-component we need to copy the .orig files into the new
362 # component too for the same reasons as above.
363 if u.pkg.changes["architecture"].has_key("source"):
364 for orig_file in u.pkg.orig_files.keys():
365 if not u.pkg.orig_files[orig_file].has_key("id"):
366 continue # Skip if it's not in the pool
367 orig_file_id = u.pkg.orig_files[orig_file]["id"]
368 if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
369 continue # Skip if the location didn't change
372 oldf = get_poolfile_by_id(orig_file_id, session)
373 old_filename = os.path.join(oldf.location.path, oldf.filename)
374 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
375 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
377 new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
379 # TODO: Care about size/md5sum collisions etc
380 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
383 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
384 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
386 # TODO: Check that there's only 1 here
387 source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
388 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
389 dscf.poolfile_id = newf.file_id
393 # Install the files into the pool
394 for newfile, entry in u.pkg.files.items():
395 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
396 utils.move(newfile, destination)
397 Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
398 summarystats.accept_bytes += float(entry["size"])
400 # Copy the .changes file across for suite which need it.
403 for suite_name in u.pkg.changes["distribution"].keys():
404 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
405 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
406 # and the .dak file...
407 if cnf.has_key("Suite::%s::CopyDotDak" % (suite_name)):
408 copy_dot_dak[cnf["Suite::%s::CopyDotDak" % (suite_name)]] = ""
410 for dest in copy_changes.keys():
411 utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
413 for dest in copy_dot_dak.keys():
414 utils.copy(u.pkg.changes_file[:-8]+".dak", dest)
416 # We're done - commit the database changes
419 # Move the .changes into the 'done' directory
420 utils.move(u.pkg.changes_file,
421 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
423 # Remove the .dak file
424 os.unlink(u.pkg.changes_file[:-8] + ".dak")
426 if u.pkg.changes["architecture"].has_key("source") and log_urgency:
427 UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
429 # Our SQL session will automatically start a new transaction after
432 # Undo the work done in queue.py(accept) to help auto-building
434 now_date = datetime.now()
436 for suite_name in u.pkg.changes["distribution"].keys():
437 if suite_name not in cnf.ValueList("Dinstall::QueueBuildSuites"):
440 suite = get_suite(suite_name, session)
441 dest_dir = cnf["Dir::QueueBuild"]
443 if cnf.FindB("Dinstall::SecurityQueueBuild"):
444 dest_dir = os.path.join(dest_dir, suite_name)
446 for newfile, entry in u.pkg.files.items():
447 dest = os.path.join(dest_dir, newfile)
449 qb = get_queue_build(dest, suite.suite_id, session)
451 # Remove it from the list of packages for later processing by apt-ftparchive
453 qb.last_used = now_date
457 if not cnf.FindB("Dinstall::SecurityQueueBuild"):
458 # Update the symlink to point to the new location in the pool
459 pool_location = utils.poolify(u.pkg.changes["source"], entry["component"])
460 src = os.path.join(cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
461 if os.path.islink(dest):
463 os.symlink(src, dest)
465 # Update last_used on any non-uploaded .orig symlink
466 for orig_file in u.pkg.orig_files.keys():
467 # Determine the .orig.tar.gz file name
468 if not u.pkg.orig_files[orig_file].has_key("id"):
469 continue # Skip files not in the pool
470 # XXX: do we really want to update the orig_files dict here
471 # instead of using a temporary variable?
472 u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
474 # Remove it from the list of packages for later processing by apt-ftparchive
475 qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
478 qb.last_used = now_date
484 summarystats.accept_count += 1
486 ################################################################################
488 def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
489 summarystats = SummaryStats()
491 fromsuite_name = fromsuite_name.lower()
492 tosuite_name = "Stable"
493 if fromsuite_name == "oldstable-proposed-updates":
494 tosuite_name = "OldStable"
496 print "Installing from %s to %s." % (fromsuite_name, tosuite_name)
498 fromsuite = get_suite(fromsuite_name)
499 tosuite = get_suite(tosuite_name)
501 # Add the source to stable (and remove it from proposed-updates)
502 for newfile, entry in u.pkg.files.items():
503 if entry["type"] == "dsc":
504 package = u.pkg.dsc["source"]
505 # NB: not files[file]["version"], that has no epoch
506 version = u.pkg.dsc["version"]
508 source = get_sources_from_name(package, version, session)
510 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
513 # Remove from old suite
514 old = session.query(SrcAssociation).filter_by(source_id = source.source_id)
515 old = old.filter_by(suite_id = fromsuite.suite_id)
519 new = SrcAssociation()
520 new.source_id = source.source_id
521 new.suite_id = tosuite.suite_id
524 # Add the binaries to stable (and remove it/them from proposed-updates)
525 for newfile, entry in u.pkg.files.items():
526 if entry["type"] == "deb":
527 package = entry["package"]
528 version = entry["version"]
529 architecture = entry["architecture"]
531 binary = get_binaries_from_name(package, version, [architecture, 'all'])
534 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
537 # Remove from old suite
538 old = session.query(BinAssociation).filter_by(binary_id = binary.binary_id)
539 old = old.filter_by(suite_id = fromsuite.suite_id)
543 new = BinAssociation()
544 new.binary_id = binary.binary_id
545 new.suite_id = tosuite.suite_id
550 utils.move(u.pkg.changes_file,
551 os.path.join(cnf["Dir::Morgue"], 'process-accepted', os.path.basename(u.pkg.changes_file)))
553 ## Update the Stable ChangeLog file
554 # TODO: URGH - Use a proper tmp file
555 new_changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + ".ChangeLog"
556 changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + "ChangeLog"
557 if os.path.exists(new_changelog_filename):
558 os.unlink(new_changelog_filename)
560 new_changelog = utils.open_file(new_changelog_filename, 'w')
561 for newfile, entry in u.pkg.files.items():
562 if entry["type"] == "deb":
563 new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.suite_name,
565 entry["architecture"],
567 elif re_issource.match(newfile):
568 new_changelog.write("%s/%s/source/%s\n" % (tosuite.suite_name,
572 new_changelog.write("%s\n" % (newfile))
574 chop_changes = re_fdnic.sub("\n", u.pkg.changes["changes"])
575 new_changelog.write(chop_changes + '\n\n')
577 if os.access(changelog_filename, os.R_OK) != 0:
578 changelog = utils.open_file(changelog_filename)
579 new_changelog.write(changelog.read())
581 new_changelog.close()
583 if os.access(changelog_filename, os.R_OK) != 0:
584 os.unlink(changelog_filename)
585 utils.move(new_changelog_filename, changelog_filename)
587 summarystats.accept_count += 1
589 if not Options["No-Mail"] and u.pkg.changes["architecture"].has_key("source"):
590 u.Subst["__SUITE__"] = " into %s" % (tosuite)
591 u.Subst["__SUMMARY__"] = summary
592 u.Subst["__BCC__"] = "X-DAK: dak process-accepted"
594 if cnf.has_key("Dinstall::Bcc"):
595 u.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
597 template = os.path.join(cnf["Dir::Templates"], 'process-accepted.install')
599 mail_message = utils.TemplateSubst(u.Subst, template)
600 utils.send_mail(mail_message)
601 u.announce(short_summary, True)
603 # Finally remove the .dak file
604 dot_dak_file = os.path.join(cnf["Suite::%s::CopyDotDak" % (fromsuite.suite_name)],
605 os.path.basename(u.pkg.changes_file[:-8]+".dak"))
606 os.unlink(dot_dak_file)
608 ################################################################################
610 def process_it(changes_file, stable_queue, log_urgency, session):
614 overwrite_checks = True
616 # Absolutize the filename to avoid the requirement of being in the
617 # same directory as the .changes file.
618 cfile = os.path.abspath(changes_file)
620 # And since handling of installs to stable munges with the CWD
621 # save and restore it.
622 u.prevdir = os.getcwd()
626 cfile = os.path.basename(old)
627 os.chdir(cnf["Suite::%s::CopyDotDak" % (stable_queue)])
628 # overwrite_checks should not be performed if installing to stable
629 overwrite_checks = False
631 u.pkg.load_dot_dak(cfile)
635 u.pkg.changes_file = old
637 u.accepted_checks(overwrite_checks, session)
638 action(u, stable_queue, log_urgency, session)
643 ###############################################################################
649 summarystats = SummaryStats()
650 changes_files = init()
654 # -n/--dry-run invalidates some other options which would involve things happening
655 if Options["No-Action"]:
656 Options["Automatic"] = ""
658 # Check that we aren't going to clash with the daily cron job
660 if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (cnf["Dir::Root"])) and not Options["No-Lock"]:
661 utils.fubar("Archive maintenance in progress. Try again later.")
663 # If running from within proposed-updates; assume an install to stable
665 if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
666 stable_queue = "Oldstable-Proposed-Updates"
667 elif os.getenv('PWD').find('proposed-updates') != -1:
668 stable_queue = "Proposed-Updates"
670 # Obtain lock if not in no-action mode and initialize the log
671 if not Options["No-Action"]:
672 lock_fd = os.open(cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
674 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
676 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
677 utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
680 Logger = daklog.Logger(cnf, "process-accepted")
681 if not stable_queue and cnf.get("Dir::UrgencyLog"):
682 # Initialise UrgencyLog()
686 # Sort the .changes files so that we process sourceful ones first
687 changes_files.sort(utils.changes_compare)
690 # Process the changes files
691 for changes_file in changes_files:
692 print "\n" + changes_file
693 session = DBConn().session()
694 process_it(changes_file, stable_queue, log_urgency, session)
697 if summarystats.accept_count:
699 if summarystats.accept_count > 1:
701 sys.stderr.write("Installed %d package %s, %s.\n" % (summarystats.accept_count, sets,
702 utils.size_type(int(summarystats.accept_bytes))))
703 Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes])
705 if not Options["No-Action"]:
710 ###############################################################################
712 if __name__ == '__main__':