3 # Installs Debian packages from queue/accepted into the pool
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 # Cartman: "I'm trying to make the best of a bad situation, I don't
23 # need to hear crap from a bunch of hippy freaks living in
24 # denial. Screw you guys, I'm going home."
26 # Kyle: "But Cartman, we're trying to..."
28 # Cartman: "uhh.. screw you guys... home."
30 ###############################################################################
32 import errno, fcntl, os, sys, time, re
34 from daklib import database
35 from daklib import logging
36 from daklib import queue
37 from daklib import utils
38 from daklib.dak_exceptions import *
40 ###############################################################################
60 installing_to_stable = 0
62 ###############################################################################
64 # FIXME: this should go away to some Debian specific file
65 # FIXME: should die if file already exists
68 "Urgency Logger object"
69 def __init__ (self, Cnf):
70 "Initialize a new Urgency Logger object"
72 self.timestamp = time.strftime("%Y%m%d%H%M%S")
73 # Create the log directory if it doesn't exist
74 self.log_dir = Cnf["Dir::UrgencyLog"]
75 if not os.path.exists(self.log_dir) or not os.access(self.log_dir, os.W_OK):
76 utils.warn("UrgencyLog directory %s does not exist or is not writeable, using /srv/ftp.debian.org/tmp/ instead" % (self.log_dir))
77 self.log_dir = '/srv/ftp.debian.org/tmp/'
79 self.log_filename = "%s/.install-urgencies-%s.new" % (self.log_dir, self.timestamp)
80 self.log_file = utils.open_file(self.log_filename, 'w')
83 def log (self, source, version, urgency):
85 self.log_file.write(" ".join([source, version, urgency])+'\n')
90 "Close a Logger object"
94 new_filename = "%s/install-urgencies-%s" % (self.log_dir, self.timestamp)
95 utils.move(self.log_filename, new_filename)
97 os.unlink(self.log_filename)
99 ###############################################################################
101 def reject (str, prefix="Rejected: "):
102 global reject_message
104 reject_message += prefix + str + "\n"
106 # Recheck anything that relies on the database; since that's not
107 # frozen between accept and our run time.
112 for file in files.keys():
113 # The .orig.tar.gz can disappear out from under us is it's a
114 # duplicate of one in the archive.
115 if not files.has_key(file):
117 # Check that the source still exists
118 if files[file]["type"] == "deb":
119 source_version = files[file]["source version"]
120 source_package = files[file]["source package"]
121 if not changes["architecture"].has_key("source") \
122 and not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
123 reject("no source found for %s %s (%s)." % (source_package, source_version, file))
125 # Version and file overwrite checks
126 if not installing_to_stable:
127 if files[file]["type"] == "deb":
128 reject(Upload.check_binary_against_db(file), "")
129 elif files[file]["type"] == "dsc":
130 reject(Upload.check_source_against_db(file), "")
131 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(file)
132 reject(reject_msg, "")
134 # propogate in the case it is in the override tables:
135 if changes.has_key("propdistribution"):
136 for suite in changes["propdistribution"].keys():
137 if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
140 nopropogate[suite] = 1
142 for suite in propogate.keys():
143 if suite in nopropogate:
145 changes["distribution"][suite] = 1
147 for file in files.keys():
148 # Check the package is still in the override tables
149 for suite in changes["distribution"].keys():
150 if not Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
151 reject("%s is NEW for %s." % (file, suite))
153 ###############################################################################
156 global Cnf, Options, Upload, projectB, changes, dsc, dsc_files, files, pkg, Subst
158 Cnf = utils.get_conf()
160 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
161 ('h',"help","Dinstall::Options::Help"),
162 ('n',"no-action","Dinstall::Options::No-Action"),
163 ('p',"no-lock", "Dinstall::Options::No-Lock"),
164 ('s',"no-mail", "Dinstall::Options::No-Mail")]
166 for i in ["automatic", "help", "no-action", "no-lock", "no-mail", "version"]:
167 if not Cnf.has_key("Dinstall::Options::%s" % (i)):
168 Cnf["Dinstall::Options::%s" % (i)] = ""
170 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
171 Options = Cnf.SubTree("Dinstall::Options")
176 Upload = queue.Upload(Cnf)
177 projectB = Upload.projectB
179 changes = Upload.pkg.changes
181 dsc_files = Upload.pkg.dsc_files
182 files = Upload.pkg.files
188 ###############################################################################
190 def usage (exit_code=0):
191 print """Usage: dak process-accepted [OPTION]... [CHANGES]...
192 -a, --automatic automatic run
193 -h, --help show this help and exit.
194 -n, --no-action don't do anything
195 -p, --no-lock don't check lockfile !! for cron.daily only !!
196 -s, --no-mail don't send any mail
197 -V, --version display the version number and exit"""
200 ###############################################################################
203 (summary, short_summary) = Upload.build_summaries()
205 (prompt, answer) = ("", "XXX")
206 if Options["No-Action"] or Options["Automatic"]:
209 if reject_message.find("Rejected") != -1:
210 print "REJECT\n" + reject_message,
211 prompt = "[R]eject, Skip, Quit ?"
212 if Options["Automatic"]:
215 print "INSTALL to " + ", ".join(changes["distribution"].keys())
216 print reject_message + summary,
217 prompt = "[I]nstall, Skip, Quit ?"
218 if Options["Automatic"]:
221 while prompt.find(answer) == -1:
222 answer = utils.our_raw_input(prompt)
223 m = queue.re_default_answer.match(prompt)
226 answer = answer[:1].upper()
231 if not installing_to_stable:
234 stable_install(summary, short_summary)
238 ###############################################################################
240 # Our reject is not really a reject, but an unaccept, but since a) the
241 # code for that is non-trivial (reopen bugs, unannounce etc.), b) this
242 # should be exteremly rare, for now we'll go with whining at our admin
246 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
247 Subst["__REJECT_MESSAGE__"] = reject_message
248 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
249 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.unaccept")
251 # Write the rejection email out as the <foo>.reason file
252 reason_filename = os.path.basename(pkg.changes_file[:-8]) + ".reason"
253 reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
254 # If we fail here someone is probably trying to exploit the race
255 # so let's just raise an exception ...
256 if os.path.exists(reject_filename):
257 os.unlink(reject_filename)
258 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
259 os.write(fd, reject_mail_message)
262 utils.send_mail(reject_mail_message)
263 Logger.log(["unaccepted", pkg.changes_file])
265 ###############################################################################
268 global install_count, install_bytes
272 Logger.log(["installing changes",pkg.changes_file])
274 # Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
275 projectB.query("BEGIN WORK")
277 # Check the hashes are all present: HACK: Can go away once all dak files
278 # are known to be newer than the shasum changes
279 utils.ensure_hashes(changes, dsc, files, dsc_files)
281 # Add the .dsc file to the DB
282 for file in files.keys():
283 if files[file]["type"] == "dsc":
284 package = dsc["source"]
285 version = dsc["version"] # NB: not files[file]["version"], that has no epoch
286 maintainer = dsc["maintainer"]
287 maintainer = maintainer.replace("'", "\\'")
288 maintainer_id = database.get_or_set_maintainer_id(maintainer)
289 changedby = changes["changed-by"]
290 changedby = changedby.replace("'", "\\'")
291 changedby_id = database.get_or_set_maintainer_id(changedby)
292 fingerprint_id = database.get_or_set_fingerprint_id(dsc["fingerprint"])
293 install_date = time.strftime("%Y-%m-%d")
294 filename = files[file]["pool name"] + file
295 dsc_component = files[file]["component"]
296 dsc_location_id = files[file]["location id"]
297 if not files[file].has_key("files id") or not files[file]["files id"]:
298 files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
299 projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, '%s', %s)"
300 % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id))
302 for suite in changes["distribution"].keys():
303 suite_id = database.get_suite_id(suite)
304 projectB.query("INSERT INTO src_associations (suite, source) VALUES (%d, currval('source_id_seq'))" % (suite_id))
306 # Add the source files to the DB (files and dsc_files)
307 projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[file]["files id"]))
308 for dsc_file in dsc_files.keys():
309 filename = files[file]["pool name"] + dsc_file
310 # If the .orig.tar.gz is already in the pool, it's
311 # files id is stored in dsc_files by check_dsc().
312 files_id = dsc_files[dsc_file].get("files id", None)
314 files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
315 # FIXME: needs to check for -1/-2 and or handle exception
317 files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
318 projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id))
320 # Add the src_uploaders to the DB
321 if dsc.get("dm-upload-allowed", "no") == "yes":
322 uploader_ids = [maintainer_id]
323 if dsc.has_key("uploaders"):
324 for u in dsc["uploaders"].split(","):
325 u = u.replace("'", "\\'")
328 database.get_or_set_maintainer_id(u))
330 for u in uploader_ids:
331 if added_ids.has_key(u):
332 utils.warn("Already saw uploader %s for source %s" % (u, package))
335 projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u))
338 # Add the .deb files to the DB
339 for file in files.keys():
340 if files[file]["type"] == "deb":
341 package = files[file]["package"]
342 version = files[file]["version"]
343 maintainer = files[file]["maintainer"]
344 maintainer = maintainer.replace("'", "\\'")
345 maintainer_id = database.get_or_set_maintainer_id(maintainer)
346 fingerprint_id = database.get_or_set_fingerprint_id(changes["fingerprint"])
347 architecture = files[file]["architecture"]
348 architecture_id = database.get_architecture_id (architecture)
349 type = files[file]["dbtype"]
350 source = files[file]["source package"]
351 source_version = files[file]["source version"]
352 filename = files[file]["pool name"] + file
353 if not files[file].has_key("location id") or not files[file]["location id"]:
354 files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
355 if not files[file].has_key("files id") or not files[file]["files id"]:
356 files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
357 source_id = database.get_source_id (source, source_version)
359 projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)"
360 % (package, version, maintainer_id, source_id, architecture_id, files[file]["files id"], type, fingerprint_id))
362 raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, file, type, sig_fpr)
363 for suite in changes["distribution"].keys():
364 suite_id = database.get_suite_id(suite)
365 projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
367 # If the .orig.tar.gz is in a legacy directory we need to poolify
368 # it, so that apt-get source (and anything else that goes by the
369 # "Directory:" field in the Sources.gz file) works.
370 orig_tar_id = Upload.pkg.orig_tar_id
371 orig_tar_location = Upload.pkg.orig_tar_location
372 legacy_source_untouchable = Upload.pkg.legacy_source_untouchable
373 if orig_tar_id and orig_tar_location == "legacy":
374 q = projectB.query("SELECT DISTINCT ON (f.id) l.path, f.filename, f.id as files_id, df.source, df.id as dsc_files_id, f.size, f.md5sum FROM files f, dsc_files df, location l WHERE df.source IN (SELECT source FROM dsc_files WHERE file = %s) AND f.id = df.file AND l.id = f.location AND (l.type = 'legacy' OR l.type = 'legacy-mixed')" % (orig_tar_id))
377 # Is this an old upload superseded by a newer -sa upload? (See check_dsc() for details)
378 if legacy_source_untouchable.has_key(qid["files_id"]):
380 # First move the files to the new location
381 legacy_filename = qid["path"] + qid["filename"]
382 pool_location = utils.poolify (changes["source"], files[file]["component"])
383 pool_filename = pool_location + os.path.basename(qid["filename"])
384 destination = Cnf["Dir::Pool"] + pool_location
385 utils.move(legacy_filename, destination)
386 # Then Update the DB's files table
387 q = projectB.query("UPDATE files SET filename = '%s', location = '%s' WHERE id = '%s'" % (pool_filename, dsc_location_id, qid["files_id"]))
389 # If this is a sourceful diff only upload that is moving non-legacy
390 # cross-component we need to copy the .orig.tar.gz into the new
391 # component too for the same reasons as above.
393 if changes["architecture"].has_key("source") and orig_tar_id and \
394 orig_tar_location != "legacy" and orig_tar_location != dsc_location_id:
395 q = projectB.query("SELECT l.path, f.filename, f.size, f.md5sum, f.sha1sum, f.sha256sum FROM files f, location l WHERE f.id = %s AND f.location = l.id" % (orig_tar_id))
396 ql = q.getresult()[0]
397 old_filename = ql[0] + ql[1]
401 file_sha256sum = ql[5]
402 new_filename = utils.poolify(changes["source"], dsc_component) + os.path.basename(old_filename)
403 new_files_id = database.get_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
404 if new_files_id == None:
405 utils.copy(old_filename, Cnf["Dir::Pool"] + new_filename)
406 new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, file_sha1sum, file_sha256sum, dsc_location_id)
407 projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, source_id, orig_tar_id))
409 # Install the files into the pool
410 for file in files.keys():
411 destination = Cnf["Dir::Pool"] + files[file]["pool name"] + file
412 utils.move(file, destination)
413 Logger.log(["installed", file, files[file]["type"], files[file]["size"], files[file]["architecture"]])
414 install_bytes += float(files[file]["size"])
416 # Copy the .changes file across for suite which need it.
419 for suite in changes["distribution"].keys():
420 if Cnf.has_key("Suite::%s::CopyChanges" % (suite)):
421 copy_changes[Cnf["Suite::%s::CopyChanges" % (suite)]] = ""
422 # and the .dak file...
423 if Cnf.has_key("Suite::%s::CopyDotDak" % (suite)):
424 copy_dot_dak[Cnf["Suite::%s::CopyDotDak" % (suite)]] = ""
425 for dest in copy_changes.keys():
426 utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
427 for dest in copy_dot_dak.keys():
428 utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
430 projectB.query("COMMIT WORK")
432 # Move the .changes into the 'done' directory
433 utils.move (pkg.changes_file,
434 os.path.join(Cnf["Dir::Queue::Done"], os.path.basename(pkg.changes_file)))
436 # Remove the .dak file
437 os.unlink(Upload.pkg.changes_file[:-8]+".dak")
439 if changes["architecture"].has_key("source") and Urgency_Logger:
440 Urgency_Logger.log(dsc["source"], dsc["version"], changes["urgency"])
442 # Undo the work done in queue.py(accept) to help auto-building
444 projectB.query("BEGIN WORK")
445 for suite in changes["distribution"].keys():
446 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
448 now_date = time.strftime("%Y-%m-%d %H:%M")
449 suite_id = database.get_suite_id(suite)
450 dest_dir = Cnf["Dir::QueueBuild"]
451 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
452 dest_dir = os.path.join(dest_dir, suite)
453 for file in files.keys():
454 dest = os.path.join(dest_dir, file)
455 # Remove it from the list of packages for later processing by apt-ftparchive
456 projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, dest, suite_id))
457 if not Cnf.FindB("Dinstall::SecurityQueueBuild"):
458 # Update the symlink to point to the new location in the pool
459 pool_location = utils.poolify (changes["source"], files[file]["component"])
460 src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(file))
461 if os.path.islink(dest):
463 os.symlink(src, dest)
464 # Update last_used on any non-upload .orig.tar.gz symlink
466 # Determine the .orig.tar.gz file name
467 for dsc_file in dsc_files.keys():
468 if dsc_file.endswith(".orig.tar.gz"):
469 orig_tar_gz = os.path.join(dest_dir, dsc_file)
470 # Remove it from the list of packages for later processing by apt-ftparchive
471 projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, orig_tar_gz, suite_id))
472 projectB.query("COMMIT WORK")
477 ################################################################################
479 def stable_install (summary, short_summary):
482 print "Installing to stable."
484 # Begin a transaction; if we bomb out anywhere between here and
485 # the COMMIT WORK below, the DB won't be changed.
486 projectB.query("BEGIN WORK")
488 # Add the source to stable (and remove it from proposed-updates)
489 for file in files.keys():
490 if files[file]["type"] == "dsc":
491 package = dsc["source"]
492 version = dsc["version"]; # NB: not files[file]["version"], that has no epoch
493 q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version))
496 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
498 suite_id = database.get_suite_id('proposed-updates')
499 projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id))
500 suite_id = database.get_suite_id('stable')
501 projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id))
503 # Add the binaries to stable (and remove it/them from proposed-updates)
504 for file in files.keys():
505 if files[file]["type"] == "deb":
506 package = files[file]["package"]
507 version = files[file]["version"]
508 architecture = files[file]["architecture"]
509 q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND b.architecture = a.id" % (package, version, architecture))
512 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
515 suite_id = database.get_suite_id('proposed-updates')
516 projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id))
517 suite_id = database.get_suite_id('stable')
518 projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id))
520 projectB.query("COMMIT WORK")
522 utils.move (pkg.changes_file, Cnf["Dir::Morgue"] + '/process-accepted/' + os.path.basename(pkg.changes_file))
524 ## Update the Stable ChangeLog file
525 new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + ".ChangeLog"
526 changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + "ChangeLog"
527 if os.path.exists(new_changelog_filename):
528 os.unlink (new_changelog_filename)
530 new_changelog = utils.open_file(new_changelog_filename, 'w')
531 for file in files.keys():
532 if files[file]["type"] == "deb":
533 new_changelog.write("stable/%s/binary-%s/%s\n" % (files[file]["component"], files[file]["architecture"], file))
534 elif utils.re_issource.match(file):
535 new_changelog.write("stable/%s/source/%s\n" % (files[file]["component"], file))
537 new_changelog.write("%s\n" % (file))
538 chop_changes = queue.re_fdnic.sub("\n", changes["changes"])
539 new_changelog.write(chop_changes + '\n\n')
540 if os.access(changelog_filename, os.R_OK) != 0:
541 changelog = utils.open_file(changelog_filename)
542 new_changelog.write(changelog.read())
543 new_changelog.close()
544 if os.access(changelog_filename, os.R_OK) != 0:
545 os.unlink(changelog_filename)
546 utils.move(new_changelog_filename, changelog_filename)
550 if not Options["No-Mail"] and changes["architecture"].has_key("source"):
551 Subst["__SUITE__"] = " into stable"
552 Subst["__SUMMARY__"] = summary
553 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.install")
554 utils.send_mail(mail_message)
555 Upload.announce(short_summary, 1)
557 # Finally remove the .dak file
558 dot_dak_file = os.path.join(Cnf["Suite::Proposed-Updates::CopyDotDak"], os.path.basename(Upload.pkg.changes_file[:-8]+".dak"))
559 os.unlink(dot_dak_file)
561 ################################################################################
563 def process_it (changes_file):
564 global reject_message
568 # Absolutize the filename to avoid the requirement of being in the
569 # same directory as the .changes file.
570 pkg.changes_file = os.path.abspath(changes_file)
572 # And since handling of installs to stable munges with the CWD
573 # save and restore it.
574 pkg.directory = os.getcwd()
576 if installing_to_stable:
577 old = Upload.pkg.changes_file
578 Upload.pkg.changes_file = os.path.basename(old)
579 os.chdir(Cnf["Suite::Proposed-Updates::CopyDotDak"])
583 Upload.update_subst()
585 if installing_to_stable:
586 Upload.pkg.changes_file = old
592 os.chdir(pkg.directory)
594 ###############################################################################
597 global projectB, Logger, Urgency_Logger, installing_to_stable
599 changes_files = init()
601 # -n/--dry-run invalidates some other options which would involve things happening
602 if Options["No-Action"]:
603 Options["Automatic"] = ""
605 # Check that we aren't going to clash with the daily cron job
607 if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (Cnf["Dir::Root"])) and not Options["No-Lock"]:
608 utils.fubar("Archive maintenance in progress. Try again later.")
610 # If running from within proposed-updates; assume an install to stable
611 if os.getcwd().find('proposed-updates') != -1:
612 installing_to_stable = 1
614 # Obtain lock if not in no-action mode and initialize the log
615 if not Options["No-Action"]:
616 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
618 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
620 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
621 utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
624 Logger = Upload.Logger = logging.Logger(Cnf, "process-accepted")
625 if not installing_to_stable and Cnf.get("Dir::UrgencyLog"):
626 Urgency_Logger = Urgency_Log(Cnf)
628 # Initialize the substitution template mapping global
629 bcc = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
630 if Cnf.has_key("Dinstall::Bcc"):
631 Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
633 Subst["__BCC__"] = bcc
635 # Sort the .changes files so that we process sourceful ones first
636 changes_files.sort(utils.changes_compare)
638 # Process the changes files
639 for changes_file in changes_files:
640 print "\n" + changes_file
641 process_it (changes_file)
645 if install_count > 1:
647 sys.stderr.write("Installed %d package %s, %s.\n" % (install_count, sets, utils.size_type(int(install_bytes))))
648 Logger.log(["total",install_count,install_bytes])
650 if not Options["No-Action"]:
653 Urgency_Logger.close()
655 ###############################################################################
657 if __name__ == '__main__':