]> git.decadent.org.uk Git - dak.git/blob - dak/process_accepted.py
before I rip out pending_*
[dak.git] / dak / process_accepted.py
1 #!/usr/bin/env python
2
3 """
4 Installs Debian packages from queue/accepted into the pool
5
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2006  James Troup <james@nocrew.org>
8 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
9 @license: GNU General Public License version 2 or later
10
11 """
12 # This program is free software; you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation; either version 2 of the License, or
15 # (at your option) any later version.
16
17 # This program is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
20 # GNU General Public License for more details.
21
22 # You should have received a copy of the GNU General Public License
23 # along with this program; if not, write to the Free Software
24 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
25
26 ###############################################################################
27
28 #    Cartman: "I'm trying to make the best of a bad situation, I don't
29 #              need to hear crap from a bunch of hippy freaks living in
30 #              denial.  Screw you guys, I'm going home."
31 #
32 #    Kyle: "But Cartman, we're trying to..."
33 #
34 #    Cartman: "uhh.. screw you guys... home."
35
36 ###############################################################################
37
38 import errno
39 import fcntl
40 import os
41 import sys
42 from datetime import datetime
43 import apt_pkg
44
45 from daklib import daklog
46 from daklib.queue import *
47 from daklib import utils
48 from daklib.dbconn import *
49 from daklib.dak_exceptions import *
50 from daklib.regexes import re_default_answer, re_issource, re_fdnic
51 from daklib.urgencylog import UrgencyLog
52 from daklib.summarystats import SummaryStats
53 from daklib.config import Config
54
55 ###############################################################################
56
57 Options = None
58 Logger = None
59
60 ###############################################################################
61
62 def init():
63     global Options
64
65     # Initialize config and connection to db
66     cnf = Config()
67     DBConn()
68
69     Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
70                  ('h',"help","Dinstall::Options::Help"),
71                  ('n',"no-action","Dinstall::Options::No-Action"),
72                  ('p',"no-lock", "Dinstall::Options::No-Lock"),
73                  ('s',"no-mail", "Dinstall::Options::No-Mail"),
74                  ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
75
76     for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
77               "version", "directory"]:
78         if not cnf.has_key("Dinstall::Options::%s" % (i)):
79             cnf["Dinstall::Options::%s" % (i)] = ""
80
81     changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
82     Options = cnf.SubTree("Dinstall::Options")
83
84     if Options["Help"]:
85         usage()
86
87     # If we have a directory flag, use it to find our files
88     if cnf["Dinstall::Options::Directory"] != "":
89         # Note that we clobber the list of files we were given in this case
90         # so warn if the user has done both
91         if len(changes_files) > 0:
92             utils.warn("Directory provided so ignoring files given on command line")
93
94         changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"])
95
96     return changes_files
97
98 ###############################################################################
99
100 def usage (exit_code=0):
101     print """Usage: dak process-accepted [OPTION]... [CHANGES]...
102   -a, --automatic           automatic run
103   -h, --help                show this help and exit.
104   -n, --no-action           don't do anything
105   -p, --no-lock             don't check lockfile !! for cron.daily only !!
106   -s, --no-mail             don't send any mail
107   -V, --version             display the version number and exit"""
108     sys.exit(exit_code)
109
110 ###############################################################################
111
112 def action (u, stable_queue=None, log_urgency=True, session=None):
113     (summary, short_summary) = u.build_summaries()
114     pi = u.package_info()
115
116     (prompt, answer) = ("", "XXX")
117     if Options["No-Action"] or Options["Automatic"]:
118         answer = 'S'
119
120     if len(u.rejects) > 0:
121         print "REJECT\n" + pi
122         prompt = "[R]eject, Skip, Quit ?"
123         if Options["Automatic"]:
124             answer = 'R'
125     else:
126         print "INSTALL to " + ", ".join(u.pkg.changes["distribution"].keys())
127         print pi + summary,
128         prompt = "[I]nstall, Skip, Quit ?"
129         if Options["Automatic"]:
130             answer = 'I'
131
132     while prompt.find(answer) == -1:
133         answer = utils.our_raw_input(prompt)
134         m = re_default_answer.match(prompt)
135         if answer == "":
136             answer = m.group(1)
137         answer = answer[:1].upper()
138
139     if answer == 'R':
140         u.do_unaccept()
141         Logger.log(["unaccepted", u.pkg.changes_file])
142     elif answer == 'I':
143         if stable_queue:
144             stable_install(u, summary, short_summary, stable_queue, log_urgency)
145         else:
146             install(u, session, log_urgency)
147     elif answer == 'Q':
148         sys.exit(0)
149
150
151 ###############################################################################
152 def add_poolfile(filename, datadict, location_id, session):
153     poolfile = PoolFile()
154     poolfile.filename = filename
155     poolfile.filesize = datadict["size"]
156     poolfile.md5sum = datadict["md5sum"]
157     poolfile.sha1sum = datadict["sha1sum"]
158     poolfile.sha256sum = datadict["sha256sum"]
159     poolfile.location_id = location_id
160
161     session.add(poolfile)
162     # Flush to get a file id (NB: This is not a commit)
163     session.flush()
164
165     return poolfile
166
167 def add_dsc_to_db(u, filename, session):
168     entry = u.pkg.files[filename]
169     source = DBSource()
170
171     source.source = u.pkg.dsc["source"]
172     source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
173     source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
174     source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
175     source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
176     source.install_date = datetime.now().date()
177
178     dsc_component = entry["component"]
179     dsc_location_id = entry["location id"]
180
181     source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
182
183     # Set up a new poolfile if necessary
184     if not entry.has_key("files id") or not entry["files id"]:
185         filename = entry["pool name"] + filename
186         poolfile = add_poolfile(filename, entry, dsc_location_id, session)
187         entry["files id"] = poolfile.file_id
188
189     source.poolfile_id = entry["files id"]
190     session.add(source)
191     session.flush()
192
193     for suite_name in u.pkg.changes["distribution"].keys():
194         sa = SrcAssociation()
195         sa.source_id = source.source_id
196         sa.suite_id = get_suite(suite_name).suite_id
197         session.add(sa)
198
199     session.flush()
200
201     # Add the source files to the DB (files and dsc_files)
202     dscfile = DSCFile()
203     dscfile.source_id = source.source_id
204     dscfile.poolfile_id = entry["files id"]
205     session.add(dscfile)
206
207     for dsc_file, dentry in u.pkg.dsc_files.items():
208         df = DSCFile()
209         df.source_id = source.source_id
210
211         # If the .orig tarball is already in the pool, it's
212         # files id is stored in dsc_files by check_dsc().
213         files_id = dentry.get("files id", None)
214
215         # Find the entry in the files hash
216         # TODO: Bail out here properly
217         dfentry = None
218         for f, e in u.pkg.files.items():
219             if f == dsc_file:
220                 dfentry = e
221                 break
222
223         if files_id is None:
224             filename = dfentry["pool name"] + dsc_file
225
226             (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
227             # FIXME: needs to check for -1/-2 and or handle exception
228             if found and obj is not None:
229                 files_id = obj.file_id
230
231             # If still not found, add it
232             if files_id is None:
233                 # HACK: Force sha1sum etc into dentry
234                 dentry["sha1sum"] = dfentry["sha1sum"]
235                 dentry["sha256sum"] = dfentry["sha256sum"]
236                 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
237                 files_id = poolfile.file_id
238
239         df.poolfile_id = files_id
240         session.add(df)
241
242     session.flush()
243
244     # Add the src_uploaders to the DB
245     uploader_ids = [source.maintainer_id]
246     if u.pkg.dsc.has_key("uploaders"):
247         for up in u.pkg.dsc["uploaders"].split(","):
248             up = up.strip()
249             uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
250
251     added_ids = {}
252     for up in uploader_ids:
253         if added_ids.has_key(up):
254             utils.warn("Already saw uploader %s for source %s" % (up, source.source))
255             continue
256
257         added_ids[u]=1
258
259         su = SrcUploader()
260         su.maintainer_id = up
261         su.source_id = source.source_id
262         session.add(su)
263
264     session.flush()
265
266     return dsc_component, dsc_location_id
267
268 def add_deb_to_db(u, filename, session):
269     """
270     Contrary to what you might expect, this routine deals with both
271     debs and udebs.  That info is in 'dbtype', whilst 'type' is
272     'deb' for both of them
273     """
274     cnf = Config()
275     entry = u.pkg.files[filename]
276
277     bin = DBBinary()
278     bin.package = entry["package"]
279     bin.version = entry["version"]
280     bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
281     bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
282     bin.arch_id = get_architecture(entry["architecture"], session).arch_id
283     bin.binarytype = entry["dbtype"]
284
285     # Find poolfile id
286     filename = entry["pool name"] + filename
287     fullpath = os.path.join(cnf["Dir::Pool"], filename)
288     if not entry.get("location id", None):
289         entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
290
291     if not entry.get("files id", None):
292         poolfile = add_poolfile(filename, entry, entry["location id"], session)
293         entry["files id"] = poolfile.file_id
294
295     bin.poolfile_id = entry["files id"]
296
297     # Find source id
298     bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
299     if len(bin_sources) != 1:
300         raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
301                                   (bin.package, bin.version, bin.architecture.arch_string,
302                                    filename, bin.binarytype, u.pkg.changes["fingerprint"])
303
304     bin.source_id = bin_sources[0].source_id
305
306     # Add and flush object so it has an ID
307     session.add(bin)
308     session.flush()
309
310     # Add BinAssociations
311     for suite_name in u.pkg.changes["distribution"].keys():
312         ba = BinAssociation()
313         ba.binary_id = bin.binary_id
314         suite = get_suite(suite_name)
315         ba.suite_id = suite.suite_id
316
317         component_id = bin.poolfile.location.component_id;
318         component_id = bin.poolfile.location.component_id;
319
320         contents = copy_temporary_contents(bin os.path.basename(filename), None, session)
321         if not contents:
322             print "REJECT\nCould not determine contents of package %s" % bin.package
323             session.rollback()
324             raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
325
326                                                      
327         session.add(ba)
328
329
330     session.flush()
331
332
333
334 def install(u, session, log_urgency=True):
335     cnf = Config()
336     summarystats = SummaryStats()
337
338     print "Installing."
339
340     Logger.log(["installing changes", u.pkg.changes_file])
341
342     # Ensure that we have all the hashes we need below.
343     u.ensure_hashes()
344     if len(u.rejects) > 0:
345         # There were errors.  Print them and SKIP the changes.
346         for msg in u.rejects:
347             utils.warn(msg)
348         return
349
350     # Add the .dsc file to the DB first
351     for newfile, entry in u.pkg.files.items():
352         if entry["type"] == "dsc":
353             dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
354
355     # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
356     for newfile, entry in u.pkg.files.items():
357         if entry["type"] == "deb":
358             add_deb_to_db(u, newfile, session)
359
360     # If this is a sourceful diff only upload that is moving
361     # cross-component we need to copy the .orig files into the new
362     # component too for the same reasons as above.
363     if u.pkg.changes["architecture"].has_key("source"):
364         for orig_file in u.pkg.orig_files.keys():
365             if not u.pkg.orig_files[orig_file].has_key("id"):
366                 continue # Skip if it's not in the pool
367             orig_file_id = u.pkg.orig_files[orig_file]["id"]
368             if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
369                 continue # Skip if the location didn't change
370
371             # Do the move
372             oldf = get_poolfile_by_id(orig_file_id, session)
373             old_filename = os.path.join(oldf.location.path, oldf.filename)
374             old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
375                        'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
376
377             new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
378
379             # TODO: Care about size/md5sum collisions etc
380             (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
381
382             if newf is None:
383                 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
384                 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
385
386                 # TODO: Check that there's only 1 here
387                 source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
388                 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
389                 dscf.poolfile_id = newf.file_id
390                 session.add(dscf)
391                 session.flush()
392
393     # Install the files into the pool
394     for newfile, entry in u.pkg.files.items():
395         destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
396         utils.move(newfile, destination)
397         Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
398         summarystats.accept_bytes += float(entry["size"])
399
400     # Copy the .changes file across for suite which need it.
401     copy_changes = {}
402     copy_dot_dak = {}
403     for suite_name in u.pkg.changes["distribution"].keys():
404         if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
405             copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
406         # and the .dak file...
407         if cnf.has_key("Suite::%s::CopyDotDak" % (suite_name)):
408             copy_dot_dak[cnf["Suite::%s::CopyDotDak" % (suite_name)]] = ""
409
410     for dest in copy_changes.keys():
411         utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
412
413     for dest in copy_dot_dak.keys():
414         utils.copy(u.pkg.changes_file[:-8]+".dak", dest)
415
416     # We're done - commit the database changes
417     session.commit()
418
419     # Move the .changes into the 'done' directory
420     utils.move(u.pkg.changes_file,
421                os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
422
423     # Remove the .dak file
424     os.unlink(u.pkg.changes_file[:-8] + ".dak")
425
426     if u.pkg.changes["architecture"].has_key("source") and log_urgency:
427         UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
428
429     # Our SQL session will automatically start a new transaction after
430     # the last commit
431
432     # Undo the work done in queue.py(accept) to help auto-building
433     # from accepted.
434     now_date = datetime.now()
435
436     for suite_name in u.pkg.changes["distribution"].keys():
437         if suite_name not in cnf.ValueList("Dinstall::QueueBuildSuites"):
438             continue
439
440         suite = get_suite(suite_name, session)
441         dest_dir = cnf["Dir::QueueBuild"]
442
443         if cnf.FindB("Dinstall::SecurityQueueBuild"):
444             dest_dir = os.path.join(dest_dir, suite_name)
445
446         for newfile, entry in u.pkg.files.items():
447             dest = os.path.join(dest_dir, newfile)
448
449             qb = get_queue_build(dest, suite.suite_id, session)
450
451             # Remove it from the list of packages for later processing by apt-ftparchive
452             if qb:
453                 qb.last_used = now_date
454                 qb.in_queue = False
455                 session.add(qb)
456
457             if not cnf.FindB("Dinstall::SecurityQueueBuild"):
458                 # Update the symlink to point to the new location in the pool
459                 pool_location = utils.poolify(u.pkg.changes["source"], entry["component"])
460                 src = os.path.join(cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
461                 if os.path.islink(dest):
462                     os.unlink(dest)
463                 os.symlink(src, dest)
464
465         # Update last_used on any non-uploaded .orig symlink
466         for orig_file in u.pkg.orig_files.keys():
467             # Determine the .orig.tar.gz file name
468             if not u.pkg.orig_files[orig_file].has_key("id"):
469                 continue # Skip files not in the pool
470             # XXX: do we really want to update the orig_files dict here
471             # instead of using a temporary variable?
472             u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
473
474             # Remove it from the list of packages for later processing by apt-ftparchive
475             qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
476             if qb:
477                 qb.in_queue = False
478                 qb.last_used = now_date
479                 session.add(qb)
480
481     session.commit()
482
483     # Finally...
484     summarystats.accept_count += 1
485
486 ################################################################################
487
488 def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
489     summarystats = SummaryStats()
490
491     fromsuite_name = fromsuite_name.lower()
492     tosuite_name = "Stable"
493     if fromsuite_name == "oldstable-proposed-updates":
494         tosuite_name = "OldStable"
495
496     print "Installing from %s to %s." % (fromsuite_name, tosuite_name)
497
498     fromsuite = get_suite(fromsuite_name)
499     tosuite = get_suite(tosuite_name)
500
501     # Add the source to stable (and remove it from proposed-updates)
502     for newfile, entry in u.pkg.files.items():
503         if entry["type"] == "dsc":
504             package = u.pkg.dsc["source"]
505             # NB: not files[file]["version"], that has no epoch
506             version = u.pkg.dsc["version"]
507
508             source = get_sources_from_name(package, version, session)
509             if len(source) < 1:
510                 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
511             source = source[0]
512
513             # Remove from old suite
514             old = session.query(SrcAssociation).filter_by(source_id = source.source_id)
515             old = old.filter_by(suite_id = fromsuite.suite_id)
516             old.delete()
517
518             # Add to new suite
519             new = SrcAssociation()
520             new.source_id = source.source_id
521             new.suite_id = tosuite.suite_id
522             session.add(new)
523
524     # Add the binaries to stable (and remove it/them from proposed-updates)
525     for newfile, entry in u.pkg.files.items():
526         if entry["type"] == "deb":
527             package = entry["package"]
528             version = entry["version"]
529             architecture = entry["architecture"]
530
531             binary = get_binaries_from_name(package, version, [architecture, 'all'])
532
533             if len(binary) < 1:
534                 utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
535             binary = binary[0]
536
537             # Remove from old suite
538             old = session.query(BinAssociation).filter_by(binary_id = binary.binary_id)
539             old = old.filter_by(suite_id = fromsuite.suite_id)
540             old.delete()
541
542             # Add to new suite
543             new = BinAssociation()
544             new.binary_id = binary.binary_id
545             new.suite_id = tosuite.suite_id
546             session.add(new)
547
548     session.commit()
549
550     utils.move(u.pkg.changes_file,
551                os.path.join(cnf["Dir::Morgue"], 'process-accepted', os.path.basename(u.pkg.changes_file)))
552
553     ## Update the Stable ChangeLog file
554     # TODO: URGH - Use a proper tmp file
555     new_changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + ".ChangeLog"
556     changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + "ChangeLog"
557     if os.path.exists(new_changelog_filename):
558         os.unlink(new_changelog_filename)
559
560     new_changelog = utils.open_file(new_changelog_filename, 'w')
561     for newfile, entry in u.pkg.files.items():
562         if entry["type"] == "deb":
563             new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.suite_name,
564                                                           entry["component"],
565                                                           entry["architecture"],
566                                                           newfile))
567         elif re_issource.match(newfile):
568             new_changelog.write("%s/%s/source/%s\n" % (tosuite.suite_name,
569                                                        entry["component"],
570                                                        newfile))
571         else:
572             new_changelog.write("%s\n" % (newfile))
573
574     chop_changes = re_fdnic.sub("\n", u.pkg.changes["changes"])
575     new_changelog.write(chop_changes + '\n\n')
576
577     if os.access(changelog_filename, os.R_OK) != 0:
578         changelog = utils.open_file(changelog_filename)
579         new_changelog.write(changelog.read())
580
581     new_changelog.close()
582
583     if os.access(changelog_filename, os.R_OK) != 0:
584         os.unlink(changelog_filename)
585     utils.move(new_changelog_filename, changelog_filename)
586
587     summarystats.accept_count += 1
588
589     if not Options["No-Mail"] and u.pkg.changes["architecture"].has_key("source"):
590         u.Subst["__SUITE__"] = " into %s" % (tosuite)
591         u.Subst["__SUMMARY__"] = summary
592         u.Subst["__BCC__"] = "X-DAK: dak process-accepted"
593
594         if cnf.has_key("Dinstall::Bcc"):
595             u.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
596
597         template = os.path.join(cnf["Dir::Templates"], 'process-accepted.install')
598
599         mail_message = utils.TemplateSubst(u.Subst, template)
600         utils.send_mail(mail_message)
601         u.announce(short_summary, True)
602
603     # Finally remove the .dak file
604     dot_dak_file = os.path.join(cnf["Suite::%s::CopyDotDak" % (fromsuite.suite_name)],
605                                 os.path.basename(u.pkg.changes_file[:-8]+".dak"))
606     os.unlink(dot_dak_file)
607
608 ################################################################################
609
610 def process_it(changes_file, stable_queue, log_urgency, session):
611     cnf = Config()
612     u = Upload()
613
614     overwrite_checks = True
615
616     # Absolutize the filename to avoid the requirement of being in the
617     # same directory as the .changes file.
618     cfile = os.path.abspath(changes_file)
619
620     # And since handling of installs to stable munges with the CWD
621     # save and restore it.
622     u.prevdir = os.getcwd()
623
624     if stable_queue:
625         old = cfile
626         cfile = os.path.basename(old)
627         os.chdir(cnf["Suite::%s::CopyDotDak" % (stable_queue)])
628         # overwrite_checks should not be performed if installing to stable
629         overwrite_checks = False
630
631     u.pkg.load_dot_dak(cfile)
632     u.update_subst()
633
634     if stable_queue:
635         u.pkg.changes_file = old
636
637     u.accepted_checks(overwrite_checks, session)
638     action(u, stable_queue, log_urgency, session)
639
640     # Restore CWD
641     os.chdir(u.prevdir)
642
643 ###############################################################################
644
645 def main():
646     global Logger
647
648     cnf = Config()
649     summarystats = SummaryStats()
650     changes_files = init()
651     log_urgency = False
652     stable_queue = None
653
654     # -n/--dry-run invalidates some other options which would involve things happening
655     if Options["No-Action"]:
656         Options["Automatic"] = ""
657
658     # Check that we aren't going to clash with the daily cron job
659
660     if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (cnf["Dir::Root"])) and not Options["No-Lock"]:
661         utils.fubar("Archive maintenance in progress.  Try again later.")
662
663     # If running from within proposed-updates; assume an install to stable
664     queue = ""
665     if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
666         stable_queue = "Oldstable-Proposed-Updates"
667     elif os.getenv('PWD').find('proposed-updates') != -1:
668         stable_queue = "Proposed-Updates"
669
670     # Obtain lock if not in no-action mode and initialize the log
671     if not Options["No-Action"]:
672         lock_fd = os.open(cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
673         try:
674             fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
675         except IOError, e:
676             if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
677                 utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
678             else:
679                 raise
680         Logger = daklog.Logger(cnf, "process-accepted")
681         if not stable_queue and cnf.get("Dir::UrgencyLog"):
682             # Initialise UrgencyLog()
683             log_urgency = True
684             UrgencyLog()
685
686     # Sort the .changes files so that we process sourceful ones first
687     changes_files.sort(utils.changes_compare)
688
689
690     # Process the changes files
691     for changes_file in changes_files:
692         print "\n" + changes_file
693         session = DBConn().session()
694         process_it(changes_file, stable_queue, log_urgency, session)
695         session.close()
696
697     if summarystats.accept_count:
698         sets = "set"
699         if summarystats.accept_count > 1:
700             sets = "sets"
701         sys.stderr.write("Installed %d package %s, %s.\n" % (summarystats.accept_count, sets,
702                                                              utils.size_type(int(summarystats.accept_bytes))))
703         Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes])
704
705     if not Options["No-Action"]:
706         Logger.close()
707         if log_urgency:
708             UrgencyLog().close()
709
710 ###############################################################################
711
712 if __name__ == '__main__':
713     main()