4 Installs Debian packages from queue/accepted into the pool
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
8 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
9 @license: GNU General Public License version 2 or later
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, write to the Free Software
24 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 ###############################################################################
28 # Cartman: "I'm trying to make the best of a bad situation, I don't
29 # need to hear crap from a bunch of hippy freaks living in
30 # denial. Screw you guys, I'm going home."
32 # Kyle: "But Cartman, we're trying to..."
34 # Cartman: "uhh.. screw you guys... home."
36 ###############################################################################
42 from datetime import datetime
45 from daklib import daklog
46 from daklib.queue import *
47 from daklib import utils
48 from daklib.dbconn import *
49 from daklib.dak_exceptions import *
50 from daklib.regexes import re_default_answer, re_issource, re_fdnic
51 from daklib.urgencylog import UrgencyLog
52 from daklib.summarystats import SummaryStats
53 from daklib.config import Config
55 ###############################################################################
60 ###############################################################################
65 # Initialize config and connection to db
69 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
70 ('h',"help","Dinstall::Options::Help"),
71 ('n',"no-action","Dinstall::Options::No-Action"),
72 ('p',"no-lock", "Dinstall::Options::No-Lock"),
73 ('s',"no-mail", "Dinstall::Options::No-Mail"),
74 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
76 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
77 "version", "directory"]:
78 if not cnf.has_key("Dinstall::Options::%s" % (i)):
79 cnf["Dinstall::Options::%s" % (i)] = ""
81 changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
82 Options = cnf.SubTree("Dinstall::Options")
87 # If we have a directory flag, use it to find our files
88 if cnf["Dinstall::Options::Directory"] != "":
89 # Note that we clobber the list of files we were given in this case
90 # so warn if the user has done both
91 if len(changes_files) > 0:
92 utils.warn("Directory provided so ignoring files given on command line")
94 changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"])
98 ###############################################################################
100 def usage (exit_code=0):
101 print """Usage: dak process-accepted [OPTION]... [CHANGES]...
102 -a, --automatic automatic run
103 -h, --help show this help and exit.
104 -n, --no-action don't do anything
105 -p, --no-lock don't check lockfile !! for cron.daily only !!
106 -s, --no-mail don't send any mail
107 -V, --version display the version number and exit"""
110 ###############################################################################
112 def action (u, stable_queue=None, log_urgency=True, session=None):
113 (summary, short_summary) = u.build_summaries()
114 pi = u.package_info()
116 (prompt, answer) = ("", "XXX")
117 if Options["No-Action"] or Options["Automatic"]:
120 if len(u.rejects) > 0:
121 print "REJECT\n" + pi
122 prompt = "[R]eject, Skip, Quit ?"
123 if Options["Automatic"]:
126 print "INSTALL to " + ", ".join(u.pkg.changes["distribution"].keys())
128 prompt = "[I]nstall, Skip, Quit ?"
129 if Options["Automatic"]:
132 while prompt.find(answer) == -1:
133 answer = utils.our_raw_input(prompt)
134 m = re_default_answer.match(prompt)
137 answer = answer[:1].upper()
141 Logger.log(["unaccepted", u.pkg.changes_file])
144 stable_install(u, summary, short_summary, stable_queue, log_urgency)
146 install(u, session, log_urgency)
151 ###############################################################################
152 def add_poolfile(filename, datadict, location_id, session):
153 poolfile = PoolFile()
154 poolfile.filename = filename
155 poolfile.filesize = datadict["size"]
156 poolfile.md5sum = datadict["md5sum"]
157 poolfile.sha1sum = datadict["sha1sum"]
158 poolfile.sha256sum = datadict["sha256sum"]
159 poolfile.location_id = location_id
161 session.add(poolfile)
162 # Flush to get a file id (NB: This is not a commit)
167 def add_dsc_to_db(u, filename, session):
168 entry = u.pkg.files[filename]
171 source.source = u.pkg.dsc["source"]
172 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
173 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
174 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
175 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
176 source.install_date = datetime.now().date()
178 dsc_component = entry["component"]
179 dsc_location_id = entry["location id"]
181 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
183 # Set up a new poolfile if necessary
184 if not entry.has_key("files id") or not entry["files id"]:
185 filename = entry["pool name"] + filename
186 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
187 entry["files id"] = poolfile.file_id
189 source.poolfile_id = entry["files id"]
193 for suite_name in u.pkg.changes["distribution"].keys():
194 sa = SrcAssociation()
195 sa.source_id = source.source_id
196 sa.suite_id = get_suite(suite_name).suite_id
201 # Add the source files to the DB (files and dsc_files)
203 dscfile.source_id = source.source_id
204 dscfile.poolfile_id = entry["files id"]
207 for dsc_file, dentry in u.pkg.dsc_files.items():
209 df.source_id = source.source_id
211 # If the .orig tarball is already in the pool, it's
212 # files id is stored in dsc_files by check_dsc().
213 files_id = dentry.get("files id", None)
215 # Find the entry in the files hash
216 # TODO: Bail out here properly
218 for f, e in u.pkg.files.items():
224 filename = dfentry["pool name"] + dsc_file
226 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
227 # FIXME: needs to check for -1/-2 and or handle exception
228 if found and obj is not None:
229 files_id = obj.file_id
231 # If still not found, add it
233 # HACK: Force sha1sum etc into dentry
234 dentry["sha1sum"] = dfentry["sha1sum"]
235 dentry["sha256sum"] = dfentry["sha256sum"]
236 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
237 files_id = poolfile.file_id
239 df.poolfile_id = files_id
244 # Add the src_uploaders to the DB
245 uploader_ids = [source.maintainer_id]
246 if u.pkg.dsc.has_key("uploaders"):
247 for up in u.pkg.dsc["uploaders"].split(","):
249 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
252 for up in uploader_ids:
253 if added_ids.has_key(up):
254 utils.warn("Already saw uploader %s for source %s" % (up, source.source))
260 su.maintainer_id = up
261 su.source_id = source.source_id
266 return dsc_component, dsc_location_id
268 def add_deb_to_db(u, filename, session):
270 Contrary to what you might expect, this routine deals with both
271 debs and udebs. That info is in 'dbtype', whilst 'type' is
272 'deb' for both of them
275 entry = u.pkg.files[filename]
278 bin.package = entry["package"]
279 bin.version = entry["version"]
280 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
281 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
282 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
283 bin.binarytype = entry["dbtype"]
286 filename = entry["pool name"] + filename
287 fullpath = os.path.join(cnf["Dir::Pool"], filename)
288 if not entry.get("location id", None):
289 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
291 if not entry.get("files id", None):
292 poolfile = add_poolfile(filename, entry, entry["location id"], session)
293 entry["files id"] = poolfile.file_id
295 bin.poolfile_id = entry["files id"]
298 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
299 if len(bin_sources) != 1:
300 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
301 (bin.package, bin.version, bin.architecture.arch_string,
302 filename, bin.binarytype, u.pkg.changes["fingerprint"])
304 bin.source_id = bin_sources[0].source_id
306 # Add and flush object so it has an ID
310 # Add BinAssociations
311 for suite_name in u.pkg.changes["distribution"].keys():
312 ba = BinAssociation()
313 ba.binary_id = bin.binary_id
314 ba.suite_id = get_suite(suite_name).suite_id
319 # Deal with contents - disabled for now
320 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
322 # print "REJECT\nCould not determine contents of package %s" % bin.package
324 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
327 def install(u, session, log_urgency=True):
329 summarystats = SummaryStats()
333 Logger.log(["installing changes", u.pkg.changes_file])
335 # Ensure that we have all the hashes we need below.
337 if len(u.rejects) > 0:
338 # There were errors. Print them and SKIP the changes.
339 for msg in u.rejects:
343 # Add the .dsc file to the DB first
344 for newfile, entry in u.pkg.files.items():
345 if entry["type"] == "dsc":
346 dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
348 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
349 for newfile, entry in u.pkg.files.items():
350 if entry["type"] == "deb":
351 add_deb_to_db(u, newfile, session)
353 # If this is a sourceful diff only upload that is moving
354 # cross-component we need to copy the .orig files into the new
355 # component too for the same reasons as above.
356 if u.pkg.changes["architecture"].has_key("source"):
357 for orig_file in u.pkg.orig_files.keys():
358 if not u.pkg.orig_files[orig_file].has_key("id"):
359 continue # Skip if it's not in the pool
360 orig_file_id = u.pkg.orig_files[orig_file]["id"]
361 if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
362 continue # Skip if the location didn't change
365 oldf = get_poolfile_by_id(orig_file_id, session)
366 old_filename = os.path.join(oldf.location.path, oldf.filename)
367 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
368 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
370 new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
372 # TODO: Care about size/md5sum collisions etc
373 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
376 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
377 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
379 # TODO: Check that there's only 1 here
380 source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
381 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
382 dscf.poolfile_id = newf.file_id
386 # Install the files into the pool
387 for newfile, entry in u.pkg.files.items():
388 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
389 utils.move(newfile, destination)
390 Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
391 summarystats.accept_bytes += float(entry["size"])
393 # Copy the .changes file across for suite which need it.
396 for suite_name in u.pkg.changes["distribution"].keys():
397 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
398 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
399 # and the .dak file...
400 if cnf.has_key("Suite::%s::CopyDotDak" % (suite_name)):
401 copy_dot_dak[cnf["Suite::%s::CopyDotDak" % (suite_name)]] = ""
403 for dest in copy_changes.keys():
404 utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
406 for dest in copy_dot_dak.keys():
407 utils.copy(u.pkg.changes_file[:-8]+".dak", dest)
409 # We're done - commit the database changes
412 # Move the .changes into the 'done' directory
413 utils.move(u.pkg.changes_file,
414 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
416 # Remove the .dak file
417 os.unlink(u.pkg.changes_file[:-8] + ".dak")
419 if u.pkg.changes["architecture"].has_key("source") and log_urgency:
420 UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
422 # Our SQL session will automatically start a new transaction after
425 # Undo the work done in queue.py(accept) to help auto-building
427 now_date = datetime.now()
429 for suite_name in u.pkg.changes["distribution"].keys():
430 if suite_name not in cnf.ValueList("Dinstall::QueueBuildSuites"):
433 suite = get_suite(suite_name, session)
434 dest_dir = cnf["Dir::QueueBuild"]
436 if cnf.FindB("Dinstall::SecurityQueueBuild"):
437 dest_dir = os.path.join(dest_dir, suite_name)
439 for newfile, entry in u.pkg.files.items():
440 dest = os.path.join(dest_dir, newfile)
442 qb = get_queue_build(dest, suite.suite_id, session)
444 # Remove it from the list of packages for later processing by apt-ftparchive
446 qb.last_used = now_date
450 if not cnf.FindB("Dinstall::SecurityQueueBuild"):
451 # Update the symlink to point to the new location in the pool
452 pool_location = utils.poolify(u.pkg.changes["source"], entry["component"])
453 src = os.path.join(cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
454 if os.path.islink(dest):
456 os.symlink(src, dest)
458 # Update last_used on any non-uploaded .orig symlink
459 for orig_file in u.pkg.orig_files.keys():
460 # Determine the .orig.tar.gz file name
461 if not u.pkg.orig_files[orig_file].has_key("id"):
462 continue # Skip files not in the pool
463 # XXX: do we really want to update the orig_files dict here
464 # instead of using a temporary variable?
465 u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
467 # Remove it from the list of packages for later processing by apt-ftparchive
468 qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
471 qb.last_used = now_date
477 summarystats.accept_count += 1
479 ################################################################################
481 def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
482 summarystats = SummaryStats()
484 fromsuite_name = fromsuite_name.lower()
485 tosuite_name = "Stable"
486 if fromsuite_name == "oldstable-proposed-updates":
487 tosuite_name = "OldStable"
489 print "Installing from %s to %s." % (fromsuite_name, tosuite_name)
491 fromsuite = get_suite(fromsuite_name)
492 tosuite = get_suite(tosuite_name)
494 # Add the source to stable (and remove it from proposed-updates)
495 for newfile, entry in u.pkg.files.items():
496 if entry["type"] == "dsc":
497 package = u.pkg.dsc["source"]
498 # NB: not files[file]["version"], that has no epoch
499 version = u.pkg.dsc["version"]
501 source = get_sources_from_name(package, version, session)
503 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
506 # Remove from old suite
507 old = session.query(SrcAssociation).filter_by(source_id = source.source_id)
508 old = old.filter_by(suite_id = fromsuite.suite_id)
512 new = SrcAssociation()
513 new.source_id = source.source_id
514 new.suite_id = tosuite.suite_id
517 # Add the binaries to stable (and remove it/them from proposed-updates)
518 for newfile, entry in u.pkg.files.items():
519 if entry["type"] == "deb":
520 package = entry["package"]
521 version = entry["version"]
522 architecture = entry["architecture"]
524 binary = get_binaries_from_name(package, version, [architecture, 'all'])
527 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
530 # Remove from old suite
531 old = session.query(BinAssociation).filter_by(binary_id = binary.binary_id)
532 old = old.filter_by(suite_id = fromsuite.suite_id)
536 new = BinAssociation()
537 new.binary_id = binary.binary_id
538 new.suite_id = tosuite.suite_id
543 utils.move(u.pkg.changes_file,
544 os.path.join(cnf["Dir::Morgue"], 'process-accepted', os.path.basename(u.pkg.changes_file)))
546 ## Update the Stable ChangeLog file
547 # TODO: URGH - Use a proper tmp file
548 new_changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + ".ChangeLog"
549 changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + "ChangeLog"
550 if os.path.exists(new_changelog_filename):
551 os.unlink(new_changelog_filename)
553 new_changelog = utils.open_file(new_changelog_filename, 'w')
554 for newfile, entry in u.pkg.files.items():
555 if entry["type"] == "deb":
556 new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.suite_name,
558 entry["architecture"],
560 elif re_issource.match(newfile):
561 new_changelog.write("%s/%s/source/%s\n" % (tosuite.suite_name,
565 new_changelog.write("%s\n" % (newfile))
567 chop_changes = re_fdnic.sub("\n", u.pkg.changes["changes"])
568 new_changelog.write(chop_changes + '\n\n')
570 if os.access(changelog_filename, os.R_OK) != 0:
571 changelog = utils.open_file(changelog_filename)
572 new_changelog.write(changelog.read())
574 new_changelog.close()
576 if os.access(changelog_filename, os.R_OK) != 0:
577 os.unlink(changelog_filename)
578 utils.move(new_changelog_filename, changelog_filename)
580 summarystats.accept_count += 1
582 if not Options["No-Mail"] and u.pkg.changes["architecture"].has_key("source"):
583 u.Subst["__SUITE__"] = " into %s" % (tosuite)
584 u.Subst["__SUMMARY__"] = summary
585 u.Subst["__BCC__"] = "X-DAK: dak process-accepted"
587 if cnf.has_key("Dinstall::Bcc"):
588 u.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
590 template = os.path.join(cnf["Dir::Templates"], 'process-accepted.install')
592 mail_message = utils.TemplateSubst(u.Subst, template)
593 utils.send_mail(mail_message)
594 u.announce(short_summary, True)
596 # Finally remove the .dak file
597 dot_dak_file = os.path.join(cnf["Suite::%s::CopyDotDak" % (fromsuite.suite_name)],
598 os.path.basename(u.pkg.changes_file[:-8]+".dak"))
599 os.unlink(dot_dak_file)
601 ################################################################################
603 def process_it(changes_file, stable_queue, log_urgency, session):
607 overwrite_checks = True
609 # Absolutize the filename to avoid the requirement of being in the
610 # same directory as the .changes file.
611 cfile = os.path.abspath(changes_file)
613 # And since handling of installs to stable munges with the CWD
614 # save and restore it.
615 u.prevdir = os.getcwd()
619 cfile = os.path.basename(old)
620 os.chdir(cnf["Suite::%s::CopyDotDak" % (stable_queue)])
621 # overwrite_checks should not be performed if installing to stable
622 overwrite_checks = False
624 u.pkg.load_dot_dak(cfile)
628 u.pkg.changes_file = old
630 u.accepted_checks(overwrite_checks, session)
631 action(u, stable_queue, log_urgency, session)
636 ###############################################################################
642 summarystats = SummaryStats()
643 changes_files = init()
647 # -n/--dry-run invalidates some other options which would involve things happening
648 if Options["No-Action"]:
649 Options["Automatic"] = ""
651 # Check that we aren't going to clash with the daily cron job
653 if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (cnf["Dir::Root"])) and not Options["No-Lock"]:
654 utils.fubar("Archive maintenance in progress. Try again later.")
656 # If running from within proposed-updates; assume an install to stable
658 if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
659 stable_queue = "Oldstable-Proposed-Updates"
660 elif os.getenv('PWD').find('proposed-updates') != -1:
661 stable_queue = "Proposed-Updates"
663 # Obtain lock if not in no-action mode and initialize the log
664 if not Options["No-Action"]:
665 lock_fd = os.open(cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
667 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
669 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
670 utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
673 Logger = daklog.Logger(cnf, "process-accepted")
674 if not stable_queue and cnf.get("Dir::UrgencyLog"):
675 # Initialise UrgencyLog()
679 # Sort the .changes files so that we process sourceful ones first
680 changes_files.sort(utils.changes_compare)
683 # Process the changes files
684 for changes_file in changes_files:
685 print "\n" + changes_file
686 session = DBConn().session()
687 process_it(changes_file, stable_queue, log_urgency, session)
690 if summarystats.accept_count:
692 if summarystats.accept_count > 1:
694 sys.stderr.write("Installed %d package %s, %s.\n" % (summarystats.accept_count, sets,
695 utils.size_type(int(summarystats.accept_bytes))))
696 Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes])
698 if not Options["No-Action"]:
703 ###############################################################################
705 if __name__ == '__main__':