4 Installs Debian packages from queue/accepted into the pool
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
8 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
9 @license: GNU General Public License version 2 or later
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, write to the Free Software
24 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 ###############################################################################
28 # Cartman: "I'm trying to make the best of a bad situation, I don't
29 # need to hear crap from a bunch of hippy freaks living in
30 # denial. Screw you guys, I'm going home."
32 # Kyle: "But Cartman, we're trying to..."
34 # Cartman: "uhh.. screw you guys... home."
36 ###############################################################################
42 from datetime import datetime
44 import apt_pkg, commands
46 from daklib import daklog
47 from daklib.queue import *
48 from daklib import utils
49 from daklib.dbconn import *
50 from daklib.binary import copy_temporary_contents
51 from daklib.dak_exceptions import *
52 from daklib.regexes import re_default_answer, re_issource, re_fdnic
53 from daklib.urgencylog import UrgencyLog
54 from daklib.summarystats import SummaryStats
55 from daklib.config import Config
57 ###############################################################################
62 ###############################################################################
67 # Initialize config and connection to db
71 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
72 ('h',"help","Dinstall::Options::Help"),
73 ('n',"no-action","Dinstall::Options::No-Action"),
74 ('p',"no-lock", "Dinstall::Options::No-Lock"),
75 ('s',"no-mail", "Dinstall::Options::No-Mail"),
76 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
78 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
79 "version", "directory"]:
80 if not cnf.has_key("Dinstall::Options::%s" % (i)):
81 cnf["Dinstall::Options::%s" % (i)] = ""
83 changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
84 Options = cnf.SubTree("Dinstall::Options")
89 # If we have a directory flag, use it to find our files
90 if cnf["Dinstall::Options::Directory"] != "":
91 # Note that we clobber the list of files we were given in this case
92 # so warn if the user has done both
93 if len(changes_files) > 0:
94 utils.warn("Directory provided so ignoring files given on command line")
96 changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"])
100 ###############################################################################
102 def usage (exit_code=0):
103 print """Usage: dak process-accepted [OPTION]... [CHANGES]...
104 -a, --automatic automatic run
105 -h, --help show this help and exit.
106 -n, --no-action don't do anything
107 -p, --no-lock don't check lockfile !! for cron.daily only !!
108 -s, --no-mail don't send any mail
109 -V, --version display the version number and exit"""
112 ###############################################################################
114 def action (u, stable_queue=None, log_urgency=True, session=None):
115 (summary, short_summary) = u.build_summaries()
116 pi = u.package_info()
118 (prompt, answer) = ("", "XXX")
119 if Options["No-Action"] or Options["Automatic"]:
122 if len(u.rejects) > 0:
123 print "REJECT\n" + pi
124 prompt = "[R]eject, Skip, Quit ?"
125 if Options["Automatic"]:
128 print "INSTALL to " + ", ".join(u.pkg.changes["distribution"].keys())
130 prompt = "[I]nstall, Skip, Quit ?"
131 if Options["Automatic"]:
134 while prompt.find(answer) == -1:
135 answer = utils.our_raw_input(prompt)
136 m = re_default_answer.match(prompt)
139 answer = answer[:1].upper()
143 Logger.log(["unaccepted", u.pkg.changes_file])
146 stable_install(u, summary, short_summary, stable_queue, log_urgency)
148 install(u, session, log_urgency)
153 ###############################################################################
154 def add_poolfile(filename, datadict, location_id, session):
155 poolfile = PoolFile()
156 poolfile.filename = filename
157 poolfile.filesize = datadict["size"]
158 poolfile.md5sum = datadict["md5sum"]
159 poolfile.sha1sum = datadict["sha1sum"]
160 poolfile.sha256sum = datadict["sha256sum"]
161 poolfile.location_id = location_id
163 session.add(poolfile)
164 # Flush to get a file id (NB: This is not a commit)
169 def add_dsc_to_db(u, filename, session):
170 entry = u.pkg.files[filename]
173 source.source = u.pkg.dsc["source"]
174 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
175 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
176 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
177 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
178 source.install_date = datetime.now().date()
180 dsc_component = entry["component"]
181 dsc_location_id = entry["location id"]
183 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
185 # Set up a new poolfile if necessary
186 if not entry.has_key("files id") or not entry["files id"]:
187 filename = entry["pool name"] + filename
188 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
189 entry["files id"] = poolfile.file_id
191 source.poolfile_id = entry["files id"]
195 for suite_name in u.pkg.changes["distribution"].keys():
196 sa = SrcAssociation()
197 sa.source_id = source.source_id
198 sa.suite_id = get_suite(suite_name).suite_id
203 # Add the source files to the DB (files and dsc_files)
205 dscfile.source_id = source.source_id
206 dscfile.poolfile_id = entry["files id"]
209 for dsc_file, dentry in u.pkg.dsc_files.items():
211 df.source_id = source.source_id
213 # If the .orig.tar.gz is already in the pool, it's
214 # files id is stored in dsc_files by check_dsc().
215 files_id = dentry.get("files id", None)
217 # Find the entry in the files hash
218 # TODO: Bail out here properly
220 for f, e in u.pkg.files.items():
226 filename = dfentry["pool name"] + dsc_file
228 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
229 # FIXME: needs to check for -1/-2 and or handle exception
230 if found and obj is not None:
231 files_id = obj.file_id
233 # If still not found, add it
235 # HACK: Force sha1sum etc into dentry
236 dentry["sha1sum"] = dfentry["sha1sum"]
237 dentry["sha256sum"] = dfentry["sha256sum"]
238 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
239 files_id = poolfile.file_id
241 df.poolfile_id = files_id
246 # Add the src_uploaders to the DB
247 uploader_ids = [source.maintainer_id]
248 if u.pkg.dsc.has_key("uploaders"):
249 for up in u.pkg.dsc["uploaders"].split(","):
251 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
254 for up in uploader_ids:
255 if added_ids.has_key(up):
256 utils.warn("Already saw uploader %s for source %s" % (up, source.source))
262 su.maintainer_id = up
263 su.source_id = source.source_id
268 return dsc_component, dsc_location_id
270 def add_deb_to_db(u, filename, session):
272 Contrary to what you might expect, this routine deals with both
273 debs and udebs. That info is in 'dbtype', whilst 'type' is
274 'deb' for both of them
277 entry = u.pkg.files[filename]
280 bin.package = entry["package"]
281 bin.version = entry["version"]
282 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
283 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
284 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
285 bin.binarytype = entry["dbtype"]
288 filename = entry["pool name"] + filename
289 fullpath = os.path.join(cnf["Dir::Pool"], filename)
290 if not entry.get("location id", None):
291 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
293 if not entry.get("files id", None):
294 poolfile = add_poolfile(filename, entry, entry["location id"], session)
295 entry["files id"] = poolfile.file_id
297 bin.poolfile_id = entry["files id"]
300 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
301 if len(bin_sources) != 1:
302 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
303 (bin.package, bin.version, bin.architecture.arch_string,
304 filename, bin.binarytype, u.pkg.changes["fingerprint"])
306 bin.source_id = bin_sources[0].source_id
308 # Add and flush object so it has an ID
312 # Add BinAssociations
313 for suite_name in u.pkg.changes["distribution"].keys():
314 ba = BinAssociation()
315 ba.binary_id = bin.binary_id
316 ba.suite_id = get_suite(suite_name).suite_id
322 contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
324 print "REJECT\nCould not determine contents of package %s" % bin.package
326 raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
329 def install(u, session, log_urgency=True):
331 summarystats = SummaryStats()
335 Logger.log(["installing changes", u.pkg.changes_file])
337 # Ensure that we have all the hashes we need below.
339 if len(u.rejects) > 0:
340 # There were errors. Print them and SKIP the changes.
341 for msg in u.rejects:
345 # Add the .dsc file to the DB first
346 for newfile, entry in u.pkg.files.items():
347 if entry["type"] == "dsc":
348 dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
350 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
351 for newfile, entry in u.pkg.files.items():
352 if entry["type"] == "deb":
353 add_deb_to_db(u, newfile, session)
355 # If this is a sourceful diff only upload that is moving
356 # cross-component we need to copy the .orig.tar.gz into the new
357 # component too for the same reasons as above.
359 if u.pkg.changes["architecture"].has_key("source") and u.pkg.orig_tar_id and \
360 u.pkg.orig_tar_location != dsc_location_id:
362 oldf = get_poolfile_by_id(u.pkg.orig_tar_id, session)
363 old_filename = os.path.join(oldf.location.path, oldf.filename)
364 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
365 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
367 new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
369 # TODO: Care about size/md5sum collisions etc
370 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
373 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
374 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
376 # TODO: Check that there's only 1 here
377 source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
378 dscf = get_dscfiles(source_id = source.source_id, poolfile_id=u.pkg.orig_tar_id, session=session)[0]
379 dscf.poolfile_id = newf.file_id
383 # Install the files into the pool
384 for newfile, entry in u.pkg.files.items():
385 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
386 utils.move(newfile, destination)
387 Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
388 summarystats.accept_bytes += float(entry["size"])
390 # Copy the .changes file across for suite which need it.
393 for suite_name in u.pkg.changes["distribution"].keys():
394 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
395 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
396 # and the .dak file...
397 if cnf.has_key("Suite::%s::CopyDotDak" % (suite_name)):
398 copy_dot_dak[cnf["Suite::%s::CopyDotDak" % (suite_name)]] = ""
400 for dest in copy_changes.keys():
401 utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
403 for dest in copy_dot_dak.keys():
404 utils.copy(u.pkg.changes_file[:-8]+".dak", dest)
406 # We're done - commit the database changes
409 # Move the .changes into the 'done' directory
410 utils.move(u.pkg.changes_file,
411 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
413 # Remove the .dak file
414 os.unlink(u.pkg.changes_file[:-8] + ".dak")
416 if u.pkg.changes["architecture"].has_key("source") and log_urgency:
417 UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
419 # Our SQL session will automatically start a new transaction after
422 # Undo the work done in queue.py(accept) to help auto-building
424 now_date = datetime.now()
426 for suite_name in u.pkg.changes["distribution"].keys():
427 if suite_name not in cnf.ValueList("Dinstall::QueueBuildSuites"):
430 suite = get_suite(suite_name, session)
431 dest_dir = cnf["Dir::QueueBuild"]
433 if cnf.FindB("Dinstall::SecurityQueueBuild"):
434 dest_dir = os.path.join(dest_dir, suite_name)
436 for newfile, entry in u.pkg.files.items():
437 dest = os.path.join(dest_dir, newfile)
439 qb = get_queue_build(dest, suite.suite_id, session)
441 # Remove it from the list of packages for later processing by apt-ftparchive
443 qb.last_used = now_date
447 if not cnf.FindB("Dinstall::SecurityQueueBuild"):
448 # Update the symlink to point to the new location in the pool
449 pool_location = utils.poolify(u.pkg.changes["source"], entry["component"])
450 src = os.path.join(cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
451 if os.path.islink(dest):
453 os.symlink(src, dest)
455 # Update last_used on any non-upload .orig.tar.gz symlink
456 if u.pkg.orig_tar_id:
457 # Determine the .orig.tar.gz file name
458 for dsc_file in u.pkg.dsc_files.keys():
459 if dsc_file.endswith(".orig.tar.gz"):
460 u.pkg.orig_tar_gz = os.path.join(dest_dir, dsc_file)
462 # Remove it from the list of packages for later processing by apt-ftparchive
463 qb = get_queue_build(u.pkg.orig_tar_gz, suite.suite_id, session)
466 qb.last_used = now_date
472 summarystats.accept_count += 1
474 ################################################################################
476 def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
477 summarystats = SummaryStats()
479 fromsuite_name = fromsuite_name.lower()
480 tosuite_name = "Stable"
481 if fromsuite_name == "oldstable-proposed-updates":
482 tosuite_name = "OldStable"
484 print "Installing from %s to %s." % (fromsuite_name, tosuite_name)
486 fromsuite = get_suite(fromsuite_name)
487 tosuite = get_suite(tosuite_name)
489 # Add the source to stable (and remove it from proposed-updates)
490 for newfile, entry in u.pkg.files.items():
491 if entry["type"] == "dsc":
492 package = u.pkg.dsc["source"]
493 # NB: not files[file]["version"], that has no epoch
494 version = u.pkg.dsc["version"]
496 source = get_sources_from_name(package, version, session)
498 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
501 # Remove from old suite
502 old = session.query(SrcAssociation).filter_by(source_id = source.source_id)
503 old = old.filter_by(suite_id = fromsuite.suite_id)
507 new = SrcAssociation()
508 new.source_id = source.source_id
509 new.suite_id = tosuite.suite_id
512 # Add the binaries to stable (and remove it/them from proposed-updates)
513 for newfile, entry in u.pkg.files.items():
514 if entry["type"] == "deb":
515 package = entry["package"]
516 version = entry["version"]
517 architecture = entry["architecture"]
519 binary = get_binaries_from_name(package, version, [architecture, 'all'])
522 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
525 # Remove from old suite
526 old = session.query(BinAssociation).filter_by(binary_id = binary.binary_id)
527 old = old.filter_by(suite_id = fromsuite.suite_id)
531 new = BinAssociation()
532 new.binary_id = binary.binary_id
533 new.suite_id = tosuite.suite_id
538 utils.move(u.pkg.changes_file,
539 os.path.join(cnf["Dir::Morgue"], 'process-accepted', os.path.basename(u.pkg.changes_file)))
541 ## Update the Stable ChangeLog file
542 # TODO: URGH - Use a proper tmp file
543 new_changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + ".ChangeLog"
544 changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + "ChangeLog"
545 if os.path.exists(new_changelog_filename):
546 os.unlink(new_changelog_filename)
548 new_changelog = utils.open_file(new_changelog_filename, 'w')
549 for newfile, entry in u.pkg.files.items():
550 if entry["type"] == "deb":
551 new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.suite_name,
553 entry["architecture"],
555 elif re_issource.match(newfile):
556 new_changelog.write("%s/%s/source/%s\n" % (tosuite.suite_name,
560 new_changelog.write("%s\n" % (newfile))
562 chop_changes = re_fdnic.sub("\n", u.pkg.changes["changes"])
563 new_changelog.write(chop_changes + '\n\n')
565 if os.access(changelog_filename, os.R_OK) != 0:
566 changelog = utils.open_file(changelog_filename)
567 new_changelog.write(changelog.read())
569 new_changelog.close()
571 if os.access(changelog_filename, os.R_OK) != 0:
572 os.unlink(changelog_filename)
573 utils.move(new_changelog_filename, changelog_filename)
575 summarystats.accept_count += 1
577 if not Options["No-Mail"] and u.pkg.changes["architecture"].has_key("source"):
578 u.Subst["__SUITE__"] = " into %s" % (tosuite)
579 u.Subst["__SUMMARY__"] = summary
580 u.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
582 if cnf.has_key("Dinstall::Bcc"):
583 u.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
585 template = os.path.join(cnf["Dir::Templates"], 'process-accepted.install')
587 mail_message = utils.TemplateSubst(u.Subst, template)
588 utils.send_mail(mail_message)
589 u.announce(short_summary, True)
591 # Finally remove the .dak file
592 dot_dak_file = os.path.join(cnf["Suite::%s::CopyDotDak" % (fromsuite.suite_name)],
593 os.path.basename(u.pkg.changes_file[:-8]+".dak"))
594 os.unlink(dot_dak_file)
596 ################################################################################
598 def process_it(changes_file, stable_queue, log_urgency, session):
602 overwrite_checks = True
604 # Absolutize the filename to avoid the requirement of being in the
605 # same directory as the .changes file.
606 cfile = os.path.abspath(changes_file)
608 # And since handling of installs to stable munges with the CWD
609 # save and restore it.
610 u.prevdir = os.getcwd()
614 cfile = os.path.basename(old)
615 os.chdir(cnf["Suite::%s::CopyDotDak" % (stable_queue)])
616 # overwrite_checks should not be performed if installing to stable
617 overwrite_checks = False
619 u.pkg.load_dot_dak(cfile)
623 u.pkg.changes_file = old
625 u.accepted_checks(overwrite_checks, session)
626 action(u, stable_queue, log_urgency, session)
631 ###############################################################################
637 summarystats = SummaryStats()
638 changes_files = init()
642 # -n/--dry-run invalidates some other options which would involve things happening
643 if Options["No-Action"]:
644 Options["Automatic"] = ""
646 # Check that we aren't going to clash with the daily cron job
648 if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (cnf["Dir::Root"])) and not Options["No-Lock"]:
649 utils.fubar("Archive maintenance in progress. Try again later.")
651 # If running from within proposed-updates; assume an install to stable
653 if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
654 stable_queue = "Oldstable-Proposed-Updates"
655 elif os.getenv('PWD').find('proposed-updates') != -1:
656 stable_queue = "Proposed-Updates"
658 # Obtain lock if not in no-action mode and initialize the log
659 if not Options["No-Action"]:
660 lock_fd = os.open(cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
662 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
664 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
665 utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
668 Logger = daklog.Logger(cnf, "process-accepted")
669 if not stable_queue and cnf.get("Dir::UrgencyLog"):
670 # Initialise UrgencyLog()
674 # Sort the .changes files so that we process sourceful ones first
675 changes_files.sort(utils.changes_compare)
678 # Process the changes files
679 for changes_file in changes_files:
680 print "\n" + changes_file
681 session = DBConn().session()
682 process_it(changes_file, stable_queue, log_urgency, session)
685 if summarystats.accept_count:
687 if summarystats.accept_count > 1:
689 sys.stderr.write("Installed %d package %s, %s.\n" % (summarystats.accept_count, sets,
690 utils.size_type(int(summarystats.accept_bytes))))
691 Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes])
693 if not Options["No-Action"]:
698 ###############################################################################
700 if __name__ == '__main__':