3 """ Checks Debian packages from Incoming """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Originally based on dinstall by Guy Maor <maor@debian.org>
22 ################################################################################
24 # Computer games don't affect kids. I mean if Pacman affected our generation as
25 # kids, we'd all run around in a darkened room munching pills and listening to
29 ################################################################################
31 import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback, tarfile
32 import apt_inst, apt_pkg
33 from debian_bundle import deb822
34 from daklib.dbconn import DBConn
35 from daklib.binary import Binary
36 from daklib import logging
37 from daklib import queue
38 from daklib import utils
39 from daklib.dak_exceptions import *
40 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
41 re_strip_revision, re_strip_srcver, re_spacestrip, \
42 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
43 re_isadeb, re_extract_src_version, re_issource, re_default_answer
47 ################################################################################
50 ################################################################################
61 # Aliases to the real vars in the Upload class; hysterical raisins.
69 ###############################################################################
72 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
76 Cnf = apt_pkg.newConfiguration()
77 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
79 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
80 ('h',"help","Dinstall::Options::Help"),
81 ('n',"no-action","Dinstall::Options::No-Action"),
82 ('p',"no-lock", "Dinstall::Options::No-Lock"),
83 ('s',"no-mail", "Dinstall::Options::No-Mail")]
85 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
86 "override-distribution", "version"]:
87 Cnf["Dinstall::Options::%s" % (i)] = ""
89 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
90 Options = Cnf.SubTree("Dinstall::Options")
95 Upload = queue.Upload(Cnf)
97 changes = Upload.pkg.changes
99 dsc_files = Upload.pkg.dsc_files
100 files = Upload.pkg.files
105 ################################################################################
107 def usage (exit_code=0):
108 print """Usage: dinstall [OPTION]... [CHANGES]...
109 -a, --automatic automatic run
110 -h, --help show this help and exit.
111 -n, --no-action don't do anything
112 -p, --no-lock don't check lockfile !! for cron.daily only !!
113 -s, --no-mail don't send any mail
114 -V, --version display the version number and exit"""
117 ################################################################################
119 def reject (str, prefix="Rejected: "):
120 global reject_message
122 reject_message += prefix + str + "\n"
124 ################################################################################
128 Create a temporary directory that can be used for unpacking files into for
131 tmpdir = tempfile.mkdtemp()
134 ################################################################################
136 def copy_to_holding(filename):
139 base_filename = os.path.basename(filename)
141 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
143 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
146 # Shouldn't happen, but will if, for example, someone lists a
147 # file twice in the .changes.
148 if errno.errorcode[e.errno] == 'EEXIST':
149 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
154 shutil.copy(filename, dest)
156 # In either case (ENOENT or EACCES) we want to remove the
157 # O_CREAT | O_EXCLed ghost file, so add the file to the list
158 # of 'in holding' even if it's not the real file.
159 if errno.errorcode[e.errno] == 'ENOENT':
160 reject("%s: can not copy to holding area: file not found." % (base_filename))
163 elif errno.errorcode[e.errno] == 'EACCES':
164 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
169 in_holding[base_filename] = ""
171 ################################################################################
177 os.chdir(Cnf["Dir::Queue::Holding"])
178 for f in in_holding.keys():
179 if os.path.exists(f):
180 if f.find('/') != -1:
181 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
187 ################################################################################
190 filename = pkg.changes_file
192 # Parse the .changes field into a dictionary
194 changes.update(utils.parse_changes(filename))
195 except CantOpenError:
196 reject("%s: can't read file." % (filename))
198 except ParseChangesError, line:
199 reject("%s: parse error, can't grok: %s." % (filename, line))
202 # Parse the Files field from the .changes into another dictionary
204 files.update(utils.build_file_list(changes))
205 except ParseChangesError, line:
206 reject("%s: parse error, can't grok: %s." % (filename, line))
207 except UnknownFormatError, format:
208 reject("%s: unknown format '%s'." % (filename, format))
211 # Check for mandatory fields
212 for i in ("source", "binary", "architecture", "version", "distribution",
213 "maintainer", "files", "changes", "description"):
214 if not changes.has_key(i):
215 reject("%s: Missing mandatory field `%s'." % (filename, i))
216 return 0 # Avoid <undef> errors during later tests
218 # Strip a source version in brackets from the source field
219 if re_strip_srcver.search(changes["source"]):
220 changes["source"] = re_strip_srcver.sub('', changes["source"])
222 # Ensure the source field is a valid package name.
223 if not re_valid_pkg_name.match(changes["source"]):
224 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
226 # Split multi-value fields into a lower-level dictionary
227 for i in ("architecture", "distribution", "binary", "closes"):
228 o = changes.get(i, "")
235 # Fix the Maintainer: field to be RFC822/2047 compatible
237 (changes["maintainer822"], changes["maintainer2047"],
238 changes["maintainername"], changes["maintaineremail"]) = \
239 utils.fix_maintainer (changes["maintainer"])
240 except ParseMaintError, msg:
241 reject("%s: Maintainer field ('%s') failed to parse: %s" \
242 % (filename, changes["maintainer"], msg))
244 # ...likewise for the Changed-By: field if it exists.
246 (changes["changedby822"], changes["changedby2047"],
247 changes["changedbyname"], changes["changedbyemail"]) = \
248 utils.fix_maintainer (changes.get("changed-by", ""))
249 except ParseMaintError, msg:
250 (changes["changedby822"], changes["changedby2047"],
251 changes["changedbyname"], changes["changedbyemail"]) = \
253 reject("%s: Changed-By field ('%s') failed to parse: %s" \
254 % (filename, changes["changed-by"], msg))
256 # Ensure all the values in Closes: are numbers
257 if changes.has_key("closes"):
258 for i in changes["closes"].keys():
259 if re_isanum.match (i) == None:
260 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
263 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
264 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
265 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
267 # Check there isn't already a changes file of the same name in one
268 # of the queue directories.
269 base_filename = os.path.basename(filename)
270 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
271 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
272 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
274 # Check the .changes is non-empty
276 reject("%s: nothing to do (Files field is empty)." % (base_filename))
281 ################################################################################
283 def check_distributions():
284 "Check and map the Distribution field of a .changes file."
286 # Handle suite mappings
287 for m in Cnf.ValueList("SuiteMappings"):
290 if mtype == "map" or mtype == "silent-map":
291 (source, dest) = args[1:3]
292 if changes["distribution"].has_key(source):
293 del changes["distribution"][source]
294 changes["distribution"][dest] = 1
295 if mtype != "silent-map":
296 reject("Mapping %s to %s." % (source, dest),"")
297 if changes.has_key("distribution-version"):
298 if changes["distribution-version"].has_key(source):
299 changes["distribution-version"][source]=dest
300 elif mtype == "map-unreleased":
301 (source, dest) = args[1:3]
302 if changes["distribution"].has_key(source):
303 for arch in changes["architecture"].keys():
304 if arch not in Cnf.ValueList("Suite::%s::Architectures" % (source)):
305 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
306 del changes["distribution"][source]
307 changes["distribution"][dest] = 1
309 elif mtype == "ignore":
311 if changes["distribution"].has_key(suite):
312 del changes["distribution"][suite]
313 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
314 elif mtype == "reject":
316 if changes["distribution"].has_key(suite):
317 reject("Uploads to %s are not accepted." % (suite))
318 elif mtype == "propup-version":
319 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
321 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
322 if changes["distribution"].has_key(args[1]):
323 changes.setdefault("distribution-version", {})
324 for suite in args[2:]: changes["distribution-version"][suite]=suite
326 # Ensure there is (still) a target distribution
327 if changes["distribution"].keys() == []:
328 reject("no valid distribution.")
330 # Ensure target distributions exist
331 for suite in changes["distribution"].keys():
332 if not Cnf.has_key("Suite::%s" % (suite)):
333 reject("Unknown distribution `%s'." % (suite))
335 ################################################################################
340 archive = utils.where_am_i()
341 file_keys = files.keys()
343 # if reprocess is 2 we've already done this and we're checking
344 # things again for the new .orig.tar.gz.
345 # [Yes, I'm fully aware of how disgusting this is]
346 if not Options["No-Action"] and reprocess < 2:
348 os.chdir(pkg.directory)
353 # Check there isn't already a .changes or .dak file of the same name in
354 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
355 # [NB: this check must be done post-suite mapping]
356 base_filename = os.path.basename(pkg.changes_file)
357 dot_dak_filename = base_filename[:-8]+".dak"
358 for suite in changes["distribution"].keys():
359 copychanges = "Suite::%s::CopyChanges" % (suite)
360 if Cnf.has_key(copychanges) and \
361 os.path.exists(Cnf[copychanges]+"/"+base_filename):
362 reject("%s: a file with this name already exists in %s" \
363 % (base_filename, Cnf[copychanges]))
365 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
366 if Cnf.has_key(copy_dot_dak) and \
367 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
368 reject("%s: a file with this name already exists in %s" \
369 % (dot_dak_filename, Cnf[copy_dot_dak]))
375 cursor = DBConn().cursor()
376 # Check for packages that have moved from one component to another
377 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
378 cursor.execute("""PREPARE moved_pkg_q AS
379 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
380 component c, architecture a, files f
381 WHERE b.package = $1 AND s.suite_name = $2
382 AND (a.arch_string = $3 OR a.arch_string = 'all')
383 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
384 AND f.location = l.id
385 AND l.component = c.id
386 AND b.file = f.id""")
389 # Ensure the file does not already exist in one of the accepted directories
390 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
391 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
392 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
393 reject("%s file already exists in the %s directory." % (f, d))
394 if not re_taint_free.match(f):
395 reject("!!WARNING!! tainted filename: '%s'." % (f))
396 # Check the file is readable
397 if os.access(f, os.R_OK) == 0:
398 # When running in -n, copy_to_holding() won't have
399 # generated the reject_message, so we need to.
400 if Options["No-Action"]:
401 if os.path.exists(f):
402 reject("Can't read `%s'. [permission denied]" % (f))
404 reject("Can't read `%s'. [file not found]" % (f))
405 files[f]["type"] = "unreadable"
407 # If it's byhand skip remaining checks
408 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
409 files[f]["byhand"] = 1
410 files[f]["type"] = "byhand"
411 # Checks for a binary package...
412 elif re_isadeb.match(f):
414 files[f]["type"] = "deb"
416 # Extract package control information
417 deb_file = utils.open_file(f)
419 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
421 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
423 # Can't continue, none of the checks on control would work.
427 # Check for mandatory fields
428 for field in [ "Package", "Architecture", "Version" ]:
429 if control.Find(field) == None:
430 reject("%s: No %s field in control." % (f, field))
434 # Ensure the package name matches the one give in the .changes
435 if not changes["binary"].has_key(control.Find("Package", "")):
436 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
438 # Validate the package field
439 package = control.Find("Package")
440 if not re_valid_pkg_name.match(package):
441 reject("%s: invalid package name '%s'." % (f, package))
443 # Validate the version field
444 version = control.Find("Version")
445 if not re_valid_version.match(version):
446 reject("%s: invalid version number '%s'." % (f, version))
448 # Ensure the architecture of the .deb is one we know about.
449 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
450 architecture = control.Find("Architecture")
451 upload_suite = changes["distribution"].keys()[0]
452 if architecture not in Cnf.ValueList("Suite::%s::Architectures" % (default_suite)) and architecture not in Cnf.ValueList("Suite::%s::Architectures" % (upload_suite)):
453 reject("Unknown architecture '%s'." % (architecture))
455 # Ensure the architecture of the .deb is one of the ones
456 # listed in the .changes.
457 if not changes["architecture"].has_key(architecture):
458 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
460 # Sanity-check the Depends field
461 depends = control.Find("Depends")
463 reject("%s: Depends field is empty." % (f))
465 # Sanity-check the Provides field
466 provides = control.Find("Provides")
468 provide = re_spacestrip.sub('', provides)
470 reject("%s: Provides field is empty." % (f))
471 prov_list = provide.split(",")
472 for prov in prov_list:
473 if not re_valid_pkg_name.match(prov):
474 reject("%s: Invalid Provides field content %s." % (f, prov))
477 # Check the section & priority match those given in the .changes (non-fatal)
478 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
479 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
480 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
481 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
483 files[f]["package"] = package
484 files[f]["architecture"] = architecture
485 files[f]["version"] = version
486 files[f]["maintainer"] = control.Find("Maintainer", "")
487 if f.endswith(".udeb"):
488 files[f]["dbtype"] = "udeb"
489 elif f.endswith(".deb"):
490 files[f]["dbtype"] = "deb"
492 reject("%s is neither a .deb or a .udeb." % (f))
493 files[f]["source"] = control.Find("Source", files[f]["package"])
494 # Get the source version
495 source = files[f]["source"]
497 if source.find("(") != -1:
498 m = re_extract_src_version.match(source)
500 source_version = m.group(2)
501 if not source_version:
502 source_version = files[f]["version"]
503 files[f]["source package"] = source
504 files[f]["source version"] = source_version
506 # Ensure the filename matches the contents of the .deb
507 m = re_isadeb.match(f)
509 file_package = m.group(1)
510 if files[f]["package"] != file_package:
511 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
512 epochless_version = re_no_epoch.sub('', control.Find("Version"))
514 file_version = m.group(2)
515 if epochless_version != file_version:
516 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
518 file_architecture = m.group(3)
519 if files[f]["architecture"] != file_architecture:
520 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
522 # Check for existent source
523 source_version = files[f]["source version"]
524 source_package = files[f]["source package"]
525 if changes["architecture"].has_key("source"):
526 if source_version != changes["version"]:
527 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
529 # Check in the SQL database
530 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
531 # Check in one of the other directories
532 source_epochless_version = re_no_epoch.sub('', source_version)
533 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
534 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
535 files[f]["byhand"] = 1
536 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
540 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
541 if Cnf.has_key("Dir::Queue::%s" % (myq)):
542 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
545 if not dsc_file_exists:
546 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
547 # Check the version and for file overwrites
548 reject(Upload.check_binary_against_db(f),"")
550 Binary(f).scan_package()
552 # Checks for a source package...
554 m = re_issource.match(f)
557 files[f]["package"] = m.group(1)
558 files[f]["version"] = m.group(2)
559 files[f]["type"] = m.group(3)
561 # Ensure the source package name matches the Source filed in the .changes
562 if changes["source"] != files[f]["package"]:
563 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
565 # Ensure the source version matches the version in the .changes file
566 if files[f]["type"] == "orig.tar.gz":
567 changes_version = changes["chopversion2"]
569 changes_version = changes["chopversion"]
570 if changes_version != files[f]["version"]:
571 reject("%s: should be %s according to changes file." % (f, changes_version))
573 # Ensure the .changes lists source in the Architecture field
574 if not changes["architecture"].has_key("source"):
575 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
577 # Check the signature of a .dsc file
578 if files[f]["type"] == "dsc":
579 dsc["fingerprint"] = utils.check_signature(f, reject)
581 files[f]["architecture"] = "source"
583 # Not a binary or source package? Assume byhand...
585 files[f]["byhand"] = 1
586 files[f]["type"] = "byhand"
588 # Per-suite file checks
589 files[f]["oldfiles"] = {}
590 for suite in changes["distribution"].keys():
592 if files[f].has_key("byhand"):
595 # Handle component mappings
596 for m in Cnf.ValueList("ComponentMappings"):
597 (source, dest) = m.split()
598 if files[f]["component"] == source:
599 files[f]["original component"] = source
600 files[f]["component"] = dest
602 # Ensure the component is valid for the target suite
603 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
604 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
605 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
608 # Validate the component
609 component = files[f]["component"]
610 component_id = DBConn().get_component_id(component)
611 if component_id == -1:
612 reject("file '%s' has unknown component '%s'." % (f, component))
615 # See if the package is NEW
616 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
619 # Validate the priority
620 if files[f]["priority"].find('/') != -1:
621 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
623 # Determine the location
624 location = Cnf["Dir::Pool"]
625 location_id = DBConn().get_location_id(location, component, archive)
626 if location_id == -1:
627 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
628 files[f]["location id"] = location_id
630 # Check the md5sum & size against existing files (if any)
631 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
632 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
634 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
636 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
637 files[f]["files id"] = files_id
639 # Check for packages that have moved from one component to another
640 files[f]['suite'] = suite
641 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
642 ql = cursor.fetchone()
644 files[f]["othercomponents"] = ql[0][0]
646 # If the .changes file says it has source, it must have source.
647 if changes["architecture"].has_key("source"):
649 reject("no source found and Architecture line in changes mention source.")
651 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
652 reject("source only uploads are not supported.")
654 ###############################################################################
659 # Ensure there is source to check
660 if not changes["architecture"].has_key("source"):
665 for f in files.keys():
666 if files[f]["type"] == "dsc":
668 reject("can not process a .changes file with multiple .dsc's.")
673 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
675 reject("source uploads must contain a dsc file")
678 # Parse the .dsc file
680 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
681 except CantOpenError:
682 # if not -n copy_to_holding() will have done this for us...
683 if Options["No-Action"]:
684 reject("%s: can't read file." % (dsc_filename))
685 except ParseChangesError, line:
686 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
687 except InvalidDscError, line:
688 reject("%s: syntax error on line %s." % (dsc_filename, line))
689 # Build up the file list of files mentioned by the .dsc
691 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
692 except NoFilesFieldError:
693 reject("%s: no Files: field." % (dsc_filename))
695 except UnknownFormatError, format:
696 reject("%s: unknown format '%s'." % (dsc_filename, format))
698 except ParseChangesError, line:
699 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
702 # Enforce mandatory fields
703 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
704 if not dsc.has_key(i):
705 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
708 # Validate the source and version fields
709 if not re_valid_pkg_name.match(dsc["source"]):
710 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
711 if not re_valid_version.match(dsc["version"]):
712 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
714 # Bumping the version number of the .dsc breaks extraction by stable's
715 # dpkg-source. So let's not do that...
716 if dsc["format"] != "1.0":
717 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
719 # Validate the Maintainer field
721 utils.fix_maintainer (dsc["maintainer"])
722 except ParseMaintError, msg:
723 reject("%s: Maintainer field ('%s') failed to parse: %s" \
724 % (dsc_filename, dsc["maintainer"], msg))
726 # Validate the build-depends field(s)
727 for field_name in [ "build-depends", "build-depends-indep" ]:
728 field = dsc.get(field_name)
730 # Check for broken dpkg-dev lossage...
731 if field.startswith("ARRAY"):
732 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
734 # Have apt try to parse them...
736 apt_pkg.ParseSrcDepends(field)
738 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
741 # Ensure the version number in the .dsc matches the version number in the .changes
742 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
743 changes_version = files[dsc_filename]["version"]
744 if epochless_dsc_version != files[dsc_filename]["version"]:
745 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
747 # Ensure there is a .tar.gz in the .dsc file
749 for f in dsc_files.keys():
750 m = re_issource.match(f)
752 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
755 if ftype == "orig.tar.gz" or ftype == "tar.gz":
758 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
760 # Ensure source is newer than existing source in target suites
761 reject(Upload.check_source_against_db(dsc_filename),"")
763 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
764 reject(reject_msg, "")
766 if not Options["No-Action"]:
767 copy_to_holding(is_in_incoming)
768 orig_tar_gz = os.path.basename(is_in_incoming)
769 files[orig_tar_gz] = {}
770 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
771 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
772 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
773 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
774 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
775 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
776 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
777 files[orig_tar_gz]["type"] = "orig.tar.gz"
782 ################################################################################
784 def get_changelog_versions(source_dir):
785 """Extracts a the source package and (optionally) grabs the
786 version history out of debian/changelog for the BTS."""
788 # Find the .dsc (again)
790 for f in files.keys():
791 if files[f]["type"] == "dsc":
794 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
798 # Create a symlink mirror of the source files in our temporary directory
799 for f in files.keys():
800 m = re_issource.match(f)
802 src = os.path.join(source_dir, f)
803 # If a file is missing for whatever reason, give up.
804 if not os.path.exists(src):
807 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
809 dest = os.path.join(os.getcwd(), f)
810 os.symlink(src, dest)
812 # If the orig.tar.gz is not a part of the upload, create a symlink to the
815 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
816 os.symlink(pkg.orig_tar_gz, dest)
819 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
820 (result, output) = commands.getstatusoutput(cmd)
822 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
823 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
826 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
829 # Get the upstream version
830 upstr_version = re_no_epoch.sub('', dsc["version"])
831 if re_strip_revision.search(upstr_version):
832 upstr_version = re_strip_revision.sub('', upstr_version)
834 # Ensure the changelog file exists
835 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
836 if not os.path.exists(changelog_filename):
837 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
840 # Parse the changelog
841 dsc["bts changelog"] = ""
842 changelog_file = utils.open_file(changelog_filename)
843 for line in changelog_file.readlines():
844 m = re_changelog_versions.match(line)
846 dsc["bts changelog"] += line
847 changelog_file.close()
849 # Check we found at least one revision in the changelog
850 if not dsc["bts changelog"]:
851 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
853 ########################################
857 # a) there's no source
858 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
859 # or c) the orig.tar.gz is MIA
860 if not changes["architecture"].has_key("source") or reprocess == 2 \
861 or pkg.orig_tar_gz == -1:
864 tmpdir = create_tmpdir()
866 # Move into the temporary directory
870 # Get the changelog version history
871 get_changelog_versions(cwd)
873 # Move back and cleanup the temporary tree
876 shutil.rmtree(tmpdir)
878 if errno.errorcode[e.errno] != 'EACCES':
879 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
881 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
882 # We probably have u-r or u-w directories so chmod everything
884 cmd = "chmod -R u+rwx %s" % (tmpdir)
885 result = os.system(cmd)
887 utils.fubar("'%s' failed with result %s." % (cmd, result))
888 shutil.rmtree(tmpdir)
890 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
892 ################################################################################
894 # FIXME: should be a debian specific check called from a hook
896 def check_urgency ():
897 if changes["architecture"].has_key("source"):
898 if not changes.has_key("urgency"):
899 changes["urgency"] = Cnf["Urgency::Default"]
900 changes["urgency"] = changes["urgency"].lower()
901 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
902 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
903 changes["urgency"] = Cnf["Urgency::Default"]
905 ################################################################################
908 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
909 utils.check_size(".changes", files)
910 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
911 utils.check_size(".dsc", dsc_files)
913 # This is stupid API, but it'll have to do for now until
914 # we actually have proper abstraction
915 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
918 ################################################################################
920 # Sanity check the time stamps of files inside debs.
921 # [Files in the near future cause ugly warnings and extreme time
922 # travel can cause errors on extraction]
924 def check_timestamps():
926 def __init__(self, future_cutoff, past_cutoff):
928 self.future_cutoff = future_cutoff
929 self.past_cutoff = past_cutoff
932 self.future_files = {}
933 self.ancient_files = {}
935 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
936 if MTime > self.future_cutoff:
937 self.future_files[Name] = MTime
938 if MTime < self.past_cutoff:
939 self.ancient_files[Name] = MTime
942 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
943 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
944 tar = Tar(future_cutoff, past_cutoff)
945 for filename in files.keys():
946 if files[filename]["type"] == "deb":
949 deb_file = utils.open_file(filename)
950 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
953 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
954 except SystemError, e:
955 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
956 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
959 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
962 future_files = tar.future_files.keys()
964 num_future_files = len(future_files)
965 future_file = future_files[0]
966 future_date = tar.future_files[future_file]
967 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
968 % (filename, num_future_files, future_file,
969 time.ctime(future_date)))
971 ancient_files = tar.ancient_files.keys()
973 num_ancient_files = len(ancient_files)
974 ancient_file = ancient_files[0]
975 ancient_date = tar.ancient_files[ancient_file]
976 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
977 % (filename, num_ancient_files, ancient_file,
978 time.ctime(ancient_date)))
980 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
982 ################################################################################
984 def lookup_uid_from_fingerprint(fpr):
986 Return the uid,name,isdm for a given gpg fingerprint
989 @param fpr: a 40 byte GPG fingerprint
991 @return (uid, name, isdm)
993 cursor = DBConn().cursor()
994 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
995 qs = cursor.fetchone()
999 return (None, None, None)
1001 def check_signed_by_key():
1002 """Ensure the .changes is signed by an authorized uploader."""
1004 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1005 if uid_name == None:
1008 # match claimed name with actual name:
1010 uid, uid_email = changes["fingerprint"], uid
1011 may_nmu, may_sponsor = 1, 1
1012 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1013 # and can't get one in there if we don't allow nmu/sponsorship
1016 may_nmu, may_sponsor = 0, 0
1018 uid_email = "%s@debian.org" % (uid)
1019 may_nmu, may_sponsor = 1, 1
1021 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1023 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1025 if uid_name == "": sponsored = 1
1028 if ("source" in changes["architecture"] and
1029 uid_email and utils.is_email_alias(uid_email)):
1030 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1031 if (changes["maintaineremail"] not in sponsor_addresses and
1032 changes["changedbyemail"] not in sponsor_addresses):
1033 changes["sponsoremail"] = uid_email
1035 if sponsored and not may_sponsor:
1036 reject("%s is not authorised to sponsor uploads" % (uid))
1038 if not sponsored and not may_nmu:
1040 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1042 highest_sid, highest_version = None, None
1044 should_reject = True
1046 si = cursor.fetchone()
1050 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1052 highest_version = si[1]
1054 if highest_sid == None:
1055 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1058 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1061 m = cursor.fetchone()
1065 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1066 if email == uid_email or name == uid_name:
1070 if should_reject == True:
1071 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1073 for b in changes["binary"].keys():
1074 for suite in changes["distribution"].keys():
1075 suite_id = DBConn().get_suite_id(suite)
1077 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1079 s = cursor.fetchone()
1083 if s[0] != changes["source"]:
1084 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1086 for f in files.keys():
1087 if files[f].has_key("byhand"):
1088 reject("%s may not upload BYHAND file %s" % (uid, f))
1089 if files[f].has_key("new"):
1090 reject("%s may not upload NEW file %s" % (uid, f))
1093 ################################################################################
1094 ################################################################################
1096 # If any file of an upload has a recent mtime then chances are good
1097 # the file is still being uploaded.
1099 def upload_too_new():
1101 # Move back to the original directory to get accurate time stamps
1103 os.chdir(pkg.directory)
1104 file_list = pkg.files.keys()
1105 file_list.extend(pkg.dsc_files.keys())
1106 file_list.append(pkg.changes_file)
1109 last_modified = time.time()-os.path.getmtime(f)
1110 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1118 ################################################################################
1121 # changes["distribution"] may not exist in corner cases
1122 # (e.g. unreadable changes files)
1123 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1124 changes["distribution"] = {}
1126 (summary, short_summary) = Upload.build_summaries()
1128 # q-unapproved hax0ring
1130 "New": { "is": is_new, "process": acknowledge_new },
1131 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1132 "Byhand" : { "is": is_byhand, "process": do_byhand },
1133 "OldStableUpdate" : { "is": is_oldstableupdate,
1134 "process": do_oldstableupdate },
1135 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1136 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1137 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1139 queues = [ "New", "Autobyhand", "Byhand" ]
1140 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1141 queues += [ "Unembargo", "Embargo" ]
1143 queues += [ "OldStableUpdate", "StableUpdate" ]
1145 (prompt, answer) = ("", "XXX")
1146 if Options["No-Action"] or Options["Automatic"]:
1151 if reject_message.find("Rejected") != -1:
1152 if upload_too_new():
1153 print "SKIP (too new)\n" + reject_message,
1154 prompt = "[S]kip, Quit ?"
1156 print "REJECT\n" + reject_message,
1157 prompt = "[R]eject, Skip, Quit ?"
1158 if Options["Automatic"]:
1163 if queue_info[q]["is"]():
1167 print "%s for %s\n%s%s" % (
1168 qu.upper(), ", ".join(changes["distribution"].keys()),
1169 reject_message, summary),
1170 queuekey = qu[0].upper()
1171 if queuekey in "RQSA":
1173 prompt = "[D]ivert, Skip, Quit ?"
1175 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1176 if Options["Automatic"]:
1179 print "ACCEPT\n" + reject_message + summary,
1180 prompt = "[A]ccept, Skip, Quit ?"
1181 if Options["Automatic"]:
1184 while prompt.find(answer) == -1:
1185 answer = utils.our_raw_input(prompt)
1186 m = re_default_answer.match(prompt)
1189 answer = answer[:1].upper()
1192 os.chdir (pkg.directory)
1193 Upload.do_reject(0, reject_message)
1195 accept(summary, short_summary)
1196 remove_from_unchecked()
1197 elif answer == queuekey:
1198 queue_info[qu]["process"](summary, short_summary)
1199 remove_from_unchecked()
1203 def remove_from_unchecked():
1204 os.chdir (pkg.directory)
1205 for f in files.keys():
1207 os.unlink(pkg.changes_file)
1209 ################################################################################
1211 def accept (summary, short_summary):
1212 Upload.accept(summary, short_summary)
1213 Upload.check_override()
1215 ################################################################################
1217 def move_to_dir (dest, perms=0660, changesperms=0664):
1218 utils.move (pkg.changes_file, dest, perms=changesperms)
1219 file_keys = files.keys()
1221 utils.move (f, dest, perms=perms)
1223 ################################################################################
1225 def is_unembargo ():
1226 cursor = DBConn().cursor()
1227 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1228 if cursor.fetchone():
1231 oldcwd = os.getcwd()
1232 os.chdir(Cnf["Dir::Queue::Disembargo"])
1233 disdir = os.getcwd()
1236 if pkg.directory == disdir:
1237 if changes["architecture"].has_key("source"):
1238 if Options["No-Action"]: return 1
1240 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1242 cursor.execute( "COMMIT" )
1247 def queue_unembargo (summary, short_summary):
1248 print "Moving to UNEMBARGOED holding area."
1249 Logger.log(["Moving to unembargoed", pkg.changes_file])
1251 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1252 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1253 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1255 # Check for override disparities
1256 Upload.Subst["__SUMMARY__"] = summary
1257 Upload.check_override()
1259 # Send accept mail, announce to lists, close bugs and check for
1260 # override disparities
1261 if not Cnf["Dinstall::Options::No-Mail"]:
1262 Upload.Subst["__SUITE__"] = ""
1263 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1264 utils.send_mail(mail_message)
1265 Upload.announce(short_summary, 1)
1267 ################################################################################
1270 # if embargoed queues are enabled always embargo
1273 def queue_embargo (summary, short_summary):
1274 print "Moving to EMBARGOED holding area."
1275 Logger.log(["Moving to embargoed", pkg.changes_file])
1277 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1278 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1279 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1281 # Check for override disparities
1282 Upload.Subst["__SUMMARY__"] = summary
1283 Upload.check_override()
1285 # Send accept mail, announce to lists, close bugs and check for
1286 # override disparities
1287 if not Cnf["Dinstall::Options::No-Mail"]:
1288 Upload.Subst["__SUITE__"] = ""
1289 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1290 utils.send_mail(mail_message)
1291 Upload.announce(short_summary, 1)
1293 ################################################################################
1295 def is_stableupdate ():
1296 if not changes["distribution"].has_key("proposed-updates"):
1299 if not changes["architecture"].has_key("source"):
1300 pusuite = DBConn().get_suite_id("proposed-updates")
1301 cursor = DBConn().cursor()
1302 cursor.execute( """SELECT 1 FROM source s
1303 JOIN src_associations sa ON (s.id = sa.source)
1304 WHERE s.source = %(source)s
1305 AND s.version = '%(version)s'
1306 AND sa.suite = %(suite)d""",
1307 {'source' : changes['source'],
1308 'version' : changes['version'],
1311 if cursor.fetchone():
1312 # source is already in proposed-updates so no need to hold
1317 def do_stableupdate (summary, short_summary):
1318 print "Moving to PROPOSED-UPDATES holding area."
1319 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1321 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1322 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1324 # Check for override disparities
1325 Upload.Subst["__SUMMARY__"] = summary
1326 Upload.check_override()
1328 ################################################################################
1330 def is_oldstableupdate ():
1331 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1334 if not changes["architecture"].has_key("source"):
1335 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1336 cursor = DBConn().cursor()
1337 cursor.execute( """"SELECT 1 FROM source s
1338 JOIN src_associations sa ON (s.id = sa.source)
1339 WHERE s.source = %(source)s
1340 AND s.version = %(version)s
1341 AND sa.suite = %d""",
1342 {'source' : changes['source'],
1343 'version' : changes['version'],
1345 if cursor.fetchone():
1350 def do_oldstableupdate (summary, short_summary):
1351 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1352 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1354 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1355 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1357 # Check for override disparities
1358 Upload.Subst["__SUMMARY__"] = summary
1359 Upload.check_override()
1361 ################################################################################
1363 def is_autobyhand ():
1366 for f in files.keys():
1367 if files[f].has_key("byhand"):
1370 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1371 # don't contain underscores, and ARCH doesn't contain dots.
1372 # further VER matches the .changes Version:, and ARCH should be in
1373 # the .changes Architecture: list.
1374 if f.count("_") < 2:
1378 (pckg, ver, archext) = f.split("_", 2)
1379 if archext.count(".") < 1 or changes["version"] != ver:
1383 ABH = Cnf.SubTree("AutomaticByHandPackages")
1384 if not ABH.has_key(pckg) or \
1385 ABH["%s::Source" % (pckg)] != changes["source"]:
1386 print "not match %s %s" % (pckg, changes["source"])
1390 (arch, ext) = archext.split(".", 1)
1391 if arch not in changes["architecture"]:
1395 files[f]["byhand-arch"] = arch
1396 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1398 return any_auto and all_auto
1400 def do_autobyhand (summary, short_summary):
1401 print "Attempting AUTOBYHAND."
1403 for f in files.keys():
1405 if not files[f].has_key("byhand"):
1407 if not files[f].has_key("byhand-script"):
1411 os.system("ls -l %s" % byhandfile)
1412 result = os.system("%s %s %s %s %s" % (
1413 files[f]["byhand-script"], byhandfile,
1414 changes["version"], files[f]["byhand-arch"],
1415 os.path.abspath(pkg.changes_file)))
1417 os.unlink(byhandfile)
1420 print "Error processing %s, left as byhand." % (f)
1424 do_byhand(summary, short_summary)
1426 accept(summary, short_summary)
1428 ################################################################################
1431 for f in files.keys():
1432 if files[f].has_key("byhand"):
1436 def do_byhand (summary, short_summary):
1437 print "Moving to BYHAND holding area."
1438 Logger.log(["Moving to byhand", pkg.changes_file])
1440 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1441 move_to_dir(Cnf["Dir::Queue::Byhand"])
1443 # Check for override disparities
1444 Upload.Subst["__SUMMARY__"] = summary
1445 Upload.check_override()
1447 ################################################################################
1450 for f in files.keys():
1451 if files[f].has_key("new"):
1455 def acknowledge_new (summary, short_summary):
1456 Subst = Upload.Subst
1458 print "Moving to NEW holding area."
1459 Logger.log(["Moving to new", pkg.changes_file])
1461 Upload.dump_vars(Cnf["Dir::Queue::New"])
1462 move_to_dir(Cnf["Dir::Queue::New"])
1464 if not Options["No-Mail"]:
1465 print "Sending new ack."
1466 Subst["__SUMMARY__"] = summary
1467 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1468 utils.send_mail(new_ack_message)
1470 ################################################################################
1472 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1473 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1474 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1475 # not have processed it during it's checks of -2. If -1 has been
1476 # deleted or otherwise not checked by 'dak process-unchecked', the
1477 # .orig.tar.gz will not have been checked at all. To get round this,
1478 # we force the .orig.tar.gz into the .changes structure and reprocess
1479 # the .changes file.
1481 def process_it (changes_file):
1482 global reprocess, reject_message
1484 # Reset some globals
1487 # Some defaults in case we can't fully process the .changes file
1488 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1489 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1492 # Absolutize the filename to avoid the requirement of being in the
1493 # same directory as the .changes file.
1494 pkg.changes_file = os.path.abspath(changes_file)
1496 # Remember where we are so we can come back after cd-ing into the
1497 # holding directory.
1498 pkg.directory = os.getcwd()
1501 # If this is the Real Thing(tm), copy things into a private
1502 # holding directory first to avoid replacable file races.
1503 if not Options["No-Action"]:
1504 os.chdir(Cnf["Dir::Queue::Holding"])
1505 copy_to_holding(pkg.changes_file)
1506 # Relativize the filename so we use the copy in holding
1507 # rather than the original...
1508 pkg.changes_file = os.path.basename(pkg.changes_file)
1509 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1510 if changes["fingerprint"]:
1511 valid_changes_p = check_changes()
1516 check_distributions()
1518 valid_dsc_p = check_dsc()
1524 check_signed_by_key()
1525 Upload.update_subst(reject_message)
1531 traceback.print_exc(file=sys.stderr)
1534 # Restore previous WD
1535 os.chdir(pkg.directory)
1537 ###############################################################################
1540 global Cnf, Options, Logger
1542 changes_files = init()
1544 # -n/--dry-run invalidates some other options which would involve things happening
1545 if Options["No-Action"]:
1546 Options["Automatic"] = ""
1548 # Ensure all the arguments we were given are .changes files
1549 for f in changes_files:
1550 if not f.endswith(".changes"):
1551 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1552 changes_files.remove(f)
1554 if changes_files == []:
1555 utils.fubar("Need at least one .changes file as an argument.")
1557 # Check that we aren't going to clash with the daily cron job
1559 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1560 utils.fubar("Archive maintenance in progress. Try again later.")
1562 # Obtain lock if not in no-action mode and initialize the log
1564 if not Options["No-Action"]:
1565 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1567 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1569 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1570 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1573 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1575 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1576 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1577 if Cnf.has_key("Dinstall::Bcc"):
1578 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1580 Upload.Subst["__BCC__"] = bcc
1583 # Sort the .changes files so that we process sourceful ones first
1584 changes_files.sort(utils.changes_compare)
1586 # Process the changes files
1587 for changes_file in changes_files:
1588 print "\n" + changes_file
1590 process_it (changes_file)
1592 if not Options["No-Action"]:
1595 accept_count = Upload.accept_count
1596 accept_bytes = Upload.accept_bytes
1599 if accept_count > 1:
1601 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1602 Logger.log(["total",accept_count,accept_bytes])
1604 if not Options["No-Action"]:
1607 ################################################################################
1609 if __name__ == '__main__':