3 """ Checks Debian packages from Incoming """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Originally based on dinstall by Guy Maor <maor@debian.org>
22 ################################################################################
24 # Computer games don't affect kids. I mean if Pacman affected our generation as
25 # kids, we'd all run around in a darkened room munching pills and listening to
29 ################################################################################
43 import apt_inst, apt_pkg
44 from debian_bundle import deb822
45 from daklib.dbconn import DBConn
46 from daklib.binary import Binary
47 from daklib import logging
48 from daklib import queue
49 from daklib import utils
50 from daklib.dak_exceptions import *
51 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
52 re_strip_revision, re_strip_srcver, re_spacestrip, \
53 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
54 re_isadeb, re_extract_src_version, re_issource, re_default_answer
58 ################################################################################
61 ################################################################################
72 # Aliases to the real vars in the Upload class; hysterical raisins.
80 ###############################################################################
83 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
87 Cnf = apt_pkg.newConfiguration()
88 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
90 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
91 ('h',"help","Dinstall::Options::Help"),
92 ('n',"no-action","Dinstall::Options::No-Action"),
93 ('p',"no-lock", "Dinstall::Options::No-Lock"),
94 ('s',"no-mail", "Dinstall::Options::No-Mail"),
95 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
97 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
98 "override-distribution", "version", "directory"]:
99 Cnf["Dinstall::Options::%s" % (i)] = ""
101 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
102 Options = Cnf.SubTree("Dinstall::Options")
107 # If we have a directory flag, use it to find our files
108 if Cnf["Dinstall::Options::Directory"] != "":
109 # Note that we clobber the list of files we were given in this case
110 # so warn if the user has done both
111 if len(changes_files) > 0:
112 utils.warn("Directory provided so ignoring files given on command line")
114 changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
116 Upload = queue.Upload(Cnf)
118 changes = Upload.pkg.changes
120 dsc_files = Upload.pkg.dsc_files
121 files = Upload.pkg.files
126 ################################################################################
128 def usage (exit_code=0):
129 print """Usage: dinstall [OPTION]... [CHANGES]...
130 -a, --automatic automatic run
131 -h, --help show this help and exit.
132 -n, --no-action don't do anything
133 -p, --no-lock don't check lockfile !! for cron.daily only !!
134 -s, --no-mail don't send any mail
135 -V, --version display the version number and exit"""
138 ################################################################################
140 def reject (str, prefix="Rejected: "):
141 global reject_message
143 reject_message += prefix + str + "\n"
145 ################################################################################
149 Create a temporary directory that can be used for unpacking files into for
152 tmpdir = tempfile.mkdtemp()
155 ################################################################################
157 def copy_to_holding(filename):
160 base_filename = os.path.basename(filename)
162 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
164 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
167 # Shouldn't happen, but will if, for example, someone lists a
168 # file twice in the .changes.
169 if errno.errorcode[e.errno] == 'EEXIST':
170 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
175 shutil.copy(filename, dest)
177 # In either case (ENOENT or EACCES) we want to remove the
178 # O_CREAT | O_EXCLed ghost file, so add the file to the list
179 # of 'in holding' even if it's not the real file.
180 if errno.errorcode[e.errno] == 'ENOENT':
181 reject("%s: can not copy to holding area: file not found." % (base_filename))
184 elif errno.errorcode[e.errno] == 'EACCES':
185 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
190 in_holding[base_filename] = ""
192 ################################################################################
198 os.chdir(Cnf["Dir::Queue::Holding"])
199 for f in in_holding.keys():
200 if os.path.exists(f):
201 if f.find('/') != -1:
202 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
208 ################################################################################
211 filename = pkg.changes_file
213 # Parse the .changes field into a dictionary
215 changes.update(utils.parse_changes(filename))
216 except CantOpenError:
217 reject("%s: can't read file." % (filename))
219 except ParseChangesError, line:
220 reject("%s: parse error, can't grok: %s." % (filename, line))
222 except ChangesUnicodeError:
223 reject("%s: changes file not proper utf-8" % (filename))
226 # Parse the Files field from the .changes into another dictionary
228 files.update(utils.build_file_list(changes))
229 except ParseChangesError, line:
230 reject("%s: parse error, can't grok: %s." % (filename, line))
231 except UnknownFormatError, format:
232 reject("%s: unknown format '%s'." % (filename, format))
235 # Check for mandatory fields
236 for i in ("source", "binary", "architecture", "version", "distribution",
237 "maintainer", "files", "changes", "description"):
238 if not changes.has_key(i):
239 reject("%s: Missing mandatory field `%s'." % (filename, i))
240 return 0 # Avoid <undef> errors during later tests
242 # Strip a source version in brackets from the source field
243 if re_strip_srcver.search(changes["source"]):
244 changes["source"] = re_strip_srcver.sub('', changes["source"])
246 # Ensure the source field is a valid package name.
247 if not re_valid_pkg_name.match(changes["source"]):
248 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
250 # Split multi-value fields into a lower-level dictionary
251 for i in ("architecture", "distribution", "binary", "closes"):
252 o = changes.get(i, "")
259 # Fix the Maintainer: field to be RFC822/2047 compatible
261 (changes["maintainer822"], changes["maintainer2047"],
262 changes["maintainername"], changes["maintaineremail"]) = \
263 utils.fix_maintainer (changes["maintainer"])
264 except ParseMaintError, msg:
265 reject("%s: Maintainer field ('%s') failed to parse: %s" \
266 % (filename, changes["maintainer"], msg))
268 # ...likewise for the Changed-By: field if it exists.
270 (changes["changedby822"], changes["changedby2047"],
271 changes["changedbyname"], changes["changedbyemail"]) = \
272 utils.fix_maintainer (changes.get("changed-by", ""))
273 except ParseMaintError, msg:
274 (changes["changedby822"], changes["changedby2047"],
275 changes["changedbyname"], changes["changedbyemail"]) = \
277 reject("%s: Changed-By field ('%s') failed to parse: %s" \
278 % (filename, changes["changed-by"], msg))
280 # Ensure all the values in Closes: are numbers
281 if changes.has_key("closes"):
282 for i in changes["closes"].keys():
283 if re_isanum.match (i) == None:
284 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
287 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
288 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
289 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
291 # Check there isn't already a changes file of the same name in one
292 # of the queue directories.
293 base_filename = os.path.basename(filename)
294 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
295 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
296 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
298 # Check the .changes is non-empty
300 reject("%s: nothing to do (Files field is empty)." % (base_filename))
305 ################################################################################
307 def check_distributions():
308 "Check and map the Distribution field of a .changes file."
310 # Handle suite mappings
311 for m in Cnf.ValueList("SuiteMappings"):
314 if mtype == "map" or mtype == "silent-map":
315 (source, dest) = args[1:3]
316 if changes["distribution"].has_key(source):
317 del changes["distribution"][source]
318 changes["distribution"][dest] = 1
319 if mtype != "silent-map":
320 reject("Mapping %s to %s." % (source, dest),"")
321 if changes.has_key("distribution-version"):
322 if changes["distribution-version"].has_key(source):
323 changes["distribution-version"][source]=dest
324 elif mtype == "map-unreleased":
325 (source, dest) = args[1:3]
326 if changes["distribution"].has_key(source):
327 for arch in changes["architecture"].keys():
328 if arch not in database.get_suite_architectures(source):
329 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
330 del changes["distribution"][source]
331 changes["distribution"][dest] = 1
333 elif mtype == "ignore":
335 if changes["distribution"].has_key(suite):
336 del changes["distribution"][suite]
337 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
338 elif mtype == "reject":
340 if changes["distribution"].has_key(suite):
341 reject("Uploads to %s are not accepted." % (suite))
342 elif mtype == "propup-version":
343 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
345 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
346 if changes["distribution"].has_key(args[1]):
347 changes.setdefault("distribution-version", {})
348 for suite in args[2:]: changes["distribution-version"][suite]=suite
350 # Ensure there is (still) a target distribution
351 if changes["distribution"].keys() == []:
352 reject("no valid distribution.")
354 # Ensure target distributions exist
355 for suite in changes["distribution"].keys():
356 if not Cnf.has_key("Suite::%s" % (suite)):
357 reject("Unknown distribution `%s'." % (suite))
359 ################################################################################
364 archive = utils.where_am_i()
365 file_keys = files.keys()
367 # if reprocess is 2 we've already done this and we're checking
368 # things again for the new .orig.tar.gz.
369 # [Yes, I'm fully aware of how disgusting this is]
370 if not Options["No-Action"] and reprocess < 2:
372 os.chdir(pkg.directory)
377 # Check there isn't already a .changes or .dak file of the same name in
378 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
379 # [NB: this check must be done post-suite mapping]
380 base_filename = os.path.basename(pkg.changes_file)
381 dot_dak_filename = base_filename[:-8]+".dak"
382 for suite in changes["distribution"].keys():
383 copychanges = "Suite::%s::CopyChanges" % (suite)
384 if Cnf.has_key(copychanges) and \
385 os.path.exists(Cnf[copychanges]+"/"+base_filename):
386 reject("%s: a file with this name already exists in %s" \
387 % (base_filename, Cnf[copychanges]))
389 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
390 if Cnf.has_key(copy_dot_dak) and \
391 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
392 reject("%s: a file with this name already exists in %s" \
393 % (dot_dak_filename, Cnf[copy_dot_dak]))
399 cursor = DBConn().cursor()
400 # Check for packages that have moved from one component to another
401 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
402 cursor.execute("""PREPARE moved_pkg_q AS
403 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
404 component c, architecture a, files f
405 WHERE b.package = $1 AND s.suite_name = $2
406 AND (a.arch_string = $3 OR a.arch_string = 'all')
407 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
408 AND f.location = l.id
409 AND l.component = c.id
410 AND b.file = f.id""")
413 # Ensure the file does not already exist in one of the accepted directories
414 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
415 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
416 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
417 reject("%s file already exists in the %s directory." % (f, d))
418 if not re_taint_free.match(f):
419 reject("!!WARNING!! tainted filename: '%s'." % (f))
420 # Check the file is readable
421 if os.access(f, os.R_OK) == 0:
422 # When running in -n, copy_to_holding() won't have
423 # generated the reject_message, so we need to.
424 if Options["No-Action"]:
425 if os.path.exists(f):
426 reject("Can't read `%s'. [permission denied]" % (f))
428 reject("Can't read `%s'. [file not found]" % (f))
429 files[f]["type"] = "unreadable"
431 # If it's byhand skip remaining checks
432 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
433 files[f]["byhand"] = 1
434 files[f]["type"] = "byhand"
435 # Checks for a binary package...
436 elif re_isadeb.match(f):
438 files[f]["type"] = "deb"
440 # Extract package control information
441 deb_file = utils.open_file(f)
443 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
445 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
447 # Can't continue, none of the checks on control would work.
451 # Check for mandatory fields
452 for field in [ "Package", "Architecture", "Version" ]:
453 if control.Find(field) == None:
454 reject("%s: No %s field in control." % (f, field))
458 # Ensure the package name matches the one give in the .changes
459 if not changes["binary"].has_key(control.Find("Package", "")):
460 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
462 # Validate the package field
463 package = control.Find("Package")
464 if not re_valid_pkg_name.match(package):
465 reject("%s: invalid package name '%s'." % (f, package))
467 # Validate the version field
468 version = control.Find("Version")
469 if not re_valid_version.match(version):
470 reject("%s: invalid version number '%s'." % (f, version))
472 # Ensure the architecture of the .deb is one we know about.
473 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
474 architecture = control.Find("Architecture")
475 upload_suite = changes["distribution"].keys()[0]
476 if architecture not in database.get_suite_architectures(default_suite) and architecture not in database.get_suite_architectures(upload_suite):
477 reject("Unknown architecture '%s'." % (architecture))
479 # Ensure the architecture of the .deb is one of the ones
480 # listed in the .changes.
481 if not changes["architecture"].has_key(architecture):
482 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
484 # Sanity-check the Depends field
485 depends = control.Find("Depends")
487 reject("%s: Depends field is empty." % (f))
489 # Sanity-check the Provides field
490 provides = control.Find("Provides")
492 provide = re_spacestrip.sub('', provides)
494 reject("%s: Provides field is empty." % (f))
495 prov_list = provide.split(",")
496 for prov in prov_list:
497 if not re_valid_pkg_name.match(prov):
498 reject("%s: Invalid Provides field content %s." % (f, prov))
501 # Check the section & priority match those given in the .changes (non-fatal)
502 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
503 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
504 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
505 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
507 files[f]["package"] = package
508 files[f]["architecture"] = architecture
509 files[f]["version"] = version
510 files[f]["maintainer"] = control.Find("Maintainer", "")
511 if f.endswith(".udeb"):
512 files[f]["dbtype"] = "udeb"
513 elif f.endswith(".deb"):
514 files[f]["dbtype"] = "deb"
516 reject("%s is neither a .deb or a .udeb." % (f))
517 files[f]["source"] = control.Find("Source", files[f]["package"])
518 # Get the source version
519 source = files[f]["source"]
521 if source.find("(") != -1:
522 m = re_extract_src_version.match(source)
524 source_version = m.group(2)
525 if not source_version:
526 source_version = files[f]["version"]
527 files[f]["source package"] = source
528 files[f]["source version"] = source_version
530 # Ensure the filename matches the contents of the .deb
531 m = re_isadeb.match(f)
533 file_package = m.group(1)
534 if files[f]["package"] != file_package:
535 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
536 epochless_version = re_no_epoch.sub('', control.Find("Version"))
538 file_version = m.group(2)
539 if epochless_version != file_version:
540 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
542 file_architecture = m.group(3)
543 if files[f]["architecture"] != file_architecture:
544 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
546 # Check for existent source
547 source_version = files[f]["source version"]
548 source_package = files[f]["source package"]
549 if changes["architecture"].has_key("source"):
550 if source_version != changes["version"]:
551 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
553 # Check in the SQL database
554 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
555 # Check in one of the other directories
556 source_epochless_version = re_no_epoch.sub('', source_version)
557 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
558 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
559 files[f]["byhand"] = 1
560 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
564 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
565 if Cnf.has_key("Dir::Queue::%s" % (myq)):
566 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
569 if not dsc_file_exists:
570 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
571 # Check the version and for file overwrites
572 reject(Upload.check_binary_against_db(f),"")
574 Binary(f).scan_package()
576 # Checks for a source package...
578 m = re_issource.match(f)
581 files[f]["package"] = m.group(1)
582 files[f]["version"] = m.group(2)
583 files[f]["type"] = m.group(3)
585 # Ensure the source package name matches the Source filed in the .changes
586 if changes["source"] != files[f]["package"]:
587 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
589 # Ensure the source version matches the version in the .changes file
590 if files[f]["type"] == "orig.tar.gz":
591 changes_version = changes["chopversion2"]
593 changes_version = changes["chopversion"]
594 if changes_version != files[f]["version"]:
595 reject("%s: should be %s according to changes file." % (f, changes_version))
597 # Ensure the .changes lists source in the Architecture field
598 if not changes["architecture"].has_key("source"):
599 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
601 # Check the signature of a .dsc file
602 if files[f]["type"] == "dsc":
603 dsc["fingerprint"] = utils.check_signature(f, reject)
605 files[f]["architecture"] = "source"
607 # Not a binary or source package? Assume byhand...
609 files[f]["byhand"] = 1
610 files[f]["type"] = "byhand"
612 # Per-suite file checks
613 files[f]["oldfiles"] = {}
614 for suite in changes["distribution"].keys():
616 if files[f].has_key("byhand"):
619 # Handle component mappings
620 for m in Cnf.ValueList("ComponentMappings"):
621 (source, dest) = m.split()
622 if files[f]["component"] == source:
623 files[f]["original component"] = source
624 files[f]["component"] = dest
626 # Ensure the component is valid for the target suite
627 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
628 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
629 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
632 # Validate the component
633 component = files[f]["component"]
634 component_id = DBConn().get_component_id(component)
635 if component_id == -1:
636 reject("file '%s' has unknown component '%s'." % (f, component))
639 # See if the package is NEW
640 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
643 # Validate the priority
644 if files[f]["priority"].find('/') != -1:
645 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
647 # Determine the location
648 location = Cnf["Dir::Pool"]
649 location_id = DBConn().get_location_id(location, component, archive)
650 if location_id == -1:
651 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
652 files[f]["location id"] = location_id
654 # Check the md5sum & size against existing files (if any)
655 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
656 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
658 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
660 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
661 files[f]["files id"] = files_id
663 # Check for packages that have moved from one component to another
664 files[f]['suite'] = suite
665 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
666 ql = cursor.fetchone()
668 files[f]["othercomponents"] = ql[0][0]
670 # If the .changes file says it has source, it must have source.
671 if changes["architecture"].has_key("source"):
673 reject("no source found and Architecture line in changes mention source.")
675 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
676 reject("source only uploads are not supported.")
678 ###############################################################################
683 # Ensure there is source to check
684 if not changes["architecture"].has_key("source"):
689 for f in files.keys():
690 if files[f]["type"] == "dsc":
692 reject("can not process a .changes file with multiple .dsc's.")
697 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
699 reject("source uploads must contain a dsc file")
702 # Parse the .dsc file
704 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
705 except CantOpenError:
706 # if not -n copy_to_holding() will have done this for us...
707 if Options["No-Action"]:
708 reject("%s: can't read file." % (dsc_filename))
709 except ParseChangesError, line:
710 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
711 except InvalidDscError, line:
712 reject("%s: syntax error on line %s." % (dsc_filename, line))
713 except ChangesUnicodeError:
714 reject("%s: dsc file not proper utf-8." % (dsc_filename))
716 # Build up the file list of files mentioned by the .dsc
718 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
719 except NoFilesFieldError:
720 reject("%s: no Files: field." % (dsc_filename))
722 except UnknownFormatError, format:
723 reject("%s: unknown format '%s'." % (dsc_filename, format))
725 except ParseChangesError, line:
726 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
729 # Enforce mandatory fields
730 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
731 if not dsc.has_key(i):
732 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
735 # Validate the source and version fields
736 if not re_valid_pkg_name.match(dsc["source"]):
737 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
738 if not re_valid_version.match(dsc["version"]):
739 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
741 # Bumping the version number of the .dsc breaks extraction by stable's
742 # dpkg-source. So let's not do that...
743 if dsc["format"] != "1.0":
744 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
746 # Validate the Maintainer field
748 utils.fix_maintainer (dsc["maintainer"])
749 except ParseMaintError, msg:
750 reject("%s: Maintainer field ('%s') failed to parse: %s" \
751 % (dsc_filename, dsc["maintainer"], msg))
753 # Validate the build-depends field(s)
754 for field_name in [ "build-depends", "build-depends-indep" ]:
755 field = dsc.get(field_name)
757 # Check for broken dpkg-dev lossage...
758 if field.startswith("ARRAY"):
759 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
761 # Have apt try to parse them...
763 apt_pkg.ParseSrcDepends(field)
765 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
768 # Ensure the version number in the .dsc matches the version number in the .changes
769 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
770 changes_version = files[dsc_filename]["version"]
771 if epochless_dsc_version != files[dsc_filename]["version"]:
772 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
774 # Ensure there is a .tar.gz in the .dsc file
776 for f in dsc_files.keys():
777 m = re_issource.match(f)
779 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
782 if ftype == "orig.tar.gz" or ftype == "tar.gz":
785 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
787 # Ensure source is newer than existing source in target suites
788 reject(Upload.check_source_against_db(dsc_filename),"")
790 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
791 reject(reject_msg, "")
793 if not Options["No-Action"]:
794 copy_to_holding(is_in_incoming)
795 orig_tar_gz = os.path.basename(is_in_incoming)
796 files[orig_tar_gz] = {}
797 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
798 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
799 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
800 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
801 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
802 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
803 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
804 files[orig_tar_gz]["type"] = "orig.tar.gz"
809 ################################################################################
811 def get_changelog_versions(source_dir):
812 """Extracts a the source package and (optionally) grabs the
813 version history out of debian/changelog for the BTS."""
815 # Find the .dsc (again)
817 for f in files.keys():
818 if files[f]["type"] == "dsc":
821 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
825 # Create a symlink mirror of the source files in our temporary directory
826 for f in files.keys():
827 m = re_issource.match(f)
829 src = os.path.join(source_dir, f)
830 # If a file is missing for whatever reason, give up.
831 if not os.path.exists(src):
834 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
836 dest = os.path.join(os.getcwd(), f)
837 os.symlink(src, dest)
839 # If the orig.tar.gz is not a part of the upload, create a symlink to the
842 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
843 os.symlink(pkg.orig_tar_gz, dest)
846 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
847 (result, output) = commands.getstatusoutput(cmd)
849 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
850 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
853 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
856 # Get the upstream version
857 upstr_version = re_no_epoch.sub('', dsc["version"])
858 if re_strip_revision.search(upstr_version):
859 upstr_version = re_strip_revision.sub('', upstr_version)
861 # Ensure the changelog file exists
862 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
863 if not os.path.exists(changelog_filename):
864 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
867 # Parse the changelog
868 dsc["bts changelog"] = ""
869 changelog_file = utils.open_file(changelog_filename)
870 for line in changelog_file.readlines():
871 m = re_changelog_versions.match(line)
873 dsc["bts changelog"] += line
874 changelog_file.close()
876 # Check we found at least one revision in the changelog
877 if not dsc["bts changelog"]:
878 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
880 ########################################
884 # a) there's no source
885 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
886 # or c) the orig.tar.gz is MIA
887 if not changes["architecture"].has_key("source") or reprocess == 2 \
888 or pkg.orig_tar_gz == -1:
891 tmpdir = create_tmpdir()
893 # Move into the temporary directory
897 # Get the changelog version history
898 get_changelog_versions(cwd)
900 # Move back and cleanup the temporary tree
903 shutil.rmtree(tmpdir)
905 if errno.errorcode[e.errno] != 'EACCES':
906 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
908 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
909 # We probably have u-r or u-w directories so chmod everything
911 cmd = "chmod -R u+rwx %s" % (tmpdir)
912 result = os.system(cmd)
914 utils.fubar("'%s' failed with result %s." % (cmd, result))
915 shutil.rmtree(tmpdir)
917 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
919 ################################################################################
921 # FIXME: should be a debian specific check called from a hook
923 def check_urgency ():
924 if changes["architecture"].has_key("source"):
925 if not changes.has_key("urgency"):
926 changes["urgency"] = Cnf["Urgency::Default"]
927 changes["urgency"] = changes["urgency"].lower()
928 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
929 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
930 changes["urgency"] = Cnf["Urgency::Default"]
932 ################################################################################
935 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
936 utils.check_size(".changes", files)
937 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
938 utils.check_size(".dsc", dsc_files)
940 # This is stupid API, but it'll have to do for now until
941 # we actually have proper abstraction
942 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
945 ################################################################################
947 # Sanity check the time stamps of files inside debs.
948 # [Files in the near future cause ugly warnings and extreme time
949 # travel can cause errors on extraction]
951 def check_timestamps():
953 def __init__(self, future_cutoff, past_cutoff):
955 self.future_cutoff = future_cutoff
956 self.past_cutoff = past_cutoff
959 self.future_files = {}
960 self.ancient_files = {}
962 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
963 if MTime > self.future_cutoff:
964 self.future_files[Name] = MTime
965 if MTime < self.past_cutoff:
966 self.ancient_files[Name] = MTime
969 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
970 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
971 tar = Tar(future_cutoff, past_cutoff)
972 for filename in files.keys():
973 if files[filename]["type"] == "deb":
976 deb_file = utils.open_file(filename)
977 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
980 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
981 except SystemError, e:
982 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
983 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
986 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
989 future_files = tar.future_files.keys()
991 num_future_files = len(future_files)
992 future_file = future_files[0]
993 future_date = tar.future_files[future_file]
994 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
995 % (filename, num_future_files, future_file,
996 time.ctime(future_date)))
998 ancient_files = tar.ancient_files.keys()
1000 num_ancient_files = len(ancient_files)
1001 ancient_file = ancient_files[0]
1002 ancient_date = tar.ancient_files[ancient_file]
1003 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1004 % (filename, num_ancient_files, ancient_file,
1005 time.ctime(ancient_date)))
1007 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1009 ################################################################################
1011 def lookup_uid_from_fingerprint(fpr):
1013 Return the uid,name,isdm for a given gpg fingerprint
1016 @param fpr: a 40 byte GPG fingerprint
1018 @return (uid, name, isdm)
1020 cursor = DBConn().cursor()
1021 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
1022 qs = cursor.fetchone()
1026 return (None, None, None)
1028 def check_signed_by_key():
1029 """Ensure the .changes is signed by an authorized uploader."""
1031 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1032 if uid_name == None:
1035 # match claimed name with actual name:
1037 uid, uid_email = changes["fingerprint"], uid
1038 may_nmu, may_sponsor = 1, 1
1039 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1040 # and can't get one in there if we don't allow nmu/sponsorship
1043 may_nmu, may_sponsor = 0, 0
1045 uid_email = "%s@debian.org" % (uid)
1046 may_nmu, may_sponsor = 1, 1
1048 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1050 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1052 if uid_name == "": sponsored = 1
1055 if ("source" in changes["architecture"] and
1056 uid_email and utils.is_email_alias(uid_email)):
1057 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1058 if (changes["maintaineremail"] not in sponsor_addresses and
1059 changes["changedbyemail"] not in sponsor_addresses):
1060 changes["sponsoremail"] = uid_email
1062 if sponsored and not may_sponsor:
1063 reject("%s is not authorised to sponsor uploads" % (uid))
1065 if not sponsored and not may_nmu:
1067 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1069 highest_sid, highest_version = None, None
1071 should_reject = True
1073 si = cursor.fetchone()
1077 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1079 highest_version = si[1]
1081 if highest_sid == None:
1082 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1085 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1088 m = cursor.fetchone()
1092 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1093 if email == uid_email or name == uid_name:
1097 if should_reject == True:
1098 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1100 for b in changes["binary"].keys():
1101 for suite in changes["distribution"].keys():
1102 suite_id = DBConn().get_suite_id(suite)
1104 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1106 s = cursor.fetchone()
1110 if s[0] != changes["source"]:
1111 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1113 for f in files.keys():
1114 if files[f].has_key("byhand"):
1115 reject("%s may not upload BYHAND file %s" % (uid, f))
1116 if files[f].has_key("new"):
1117 reject("%s may not upload NEW file %s" % (uid, f))
1120 ################################################################################
1121 ################################################################################
1123 # If any file of an upload has a recent mtime then chances are good
1124 # the file is still being uploaded.
1126 def upload_too_new():
1128 # Move back to the original directory to get accurate time stamps
1130 os.chdir(pkg.directory)
1131 file_list = pkg.files.keys()
1132 file_list.extend(pkg.dsc_files.keys())
1133 file_list.append(pkg.changes_file)
1136 last_modified = time.time()-os.path.getmtime(f)
1137 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1145 ################################################################################
1148 # changes["distribution"] may not exist in corner cases
1149 # (e.g. unreadable changes files)
1150 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1151 changes["distribution"] = {}
1153 (summary, short_summary) = Upload.build_summaries()
1155 # q-unapproved hax0ring
1157 "New": { "is": is_new, "process": acknowledge_new },
1158 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1159 "Byhand" : { "is": is_byhand, "process": do_byhand },
1160 "OldStableUpdate" : { "is": is_oldstableupdate,
1161 "process": do_oldstableupdate },
1162 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1163 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1164 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1166 queues = [ "New", "Autobyhand", "Byhand" ]
1167 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1168 queues += [ "Unembargo", "Embargo" ]
1170 queues += [ "OldStableUpdate", "StableUpdate" ]
1172 (prompt, answer) = ("", "XXX")
1173 if Options["No-Action"] or Options["Automatic"]:
1178 if reject_message.find("Rejected") != -1:
1179 if upload_too_new():
1180 print "SKIP (too new)\n" + reject_message,
1181 prompt = "[S]kip, Quit ?"
1183 print "REJECT\n" + reject_message,
1184 prompt = "[R]eject, Skip, Quit ?"
1185 if Options["Automatic"]:
1190 if queue_info[q]["is"]():
1194 print "%s for %s\n%s%s" % (
1195 qu.upper(), ", ".join(changes["distribution"].keys()),
1196 reject_message, summary),
1197 queuekey = qu[0].upper()
1198 if queuekey in "RQSA":
1200 prompt = "[D]ivert, Skip, Quit ?"
1202 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1203 if Options["Automatic"]:
1206 print "ACCEPT\n" + reject_message + summary,
1207 prompt = "[A]ccept, Skip, Quit ?"
1208 if Options["Automatic"]:
1211 while prompt.find(answer) == -1:
1212 answer = utils.our_raw_input(prompt)
1213 m = re_default_answer.match(prompt)
1216 answer = answer[:1].upper()
1219 os.chdir (pkg.directory)
1220 Upload.do_reject(0, reject_message)
1222 accept(summary, short_summary)
1223 remove_from_unchecked()
1224 elif answer == queuekey:
1225 queue_info[qu]["process"](summary, short_summary)
1226 remove_from_unchecked()
1230 def remove_from_unchecked():
1231 os.chdir (pkg.directory)
1232 for f in files.keys():
1234 os.unlink(pkg.changes_file)
1236 ################################################################################
1238 def accept (summary, short_summary):
1239 Upload.accept(summary, short_summary)
1240 Upload.check_override()
1242 ################################################################################
1244 def move_to_dir (dest, perms=0660, changesperms=0664):
1245 utils.move (pkg.changes_file, dest, perms=changesperms)
1246 file_keys = files.keys()
1248 utils.move (f, dest, perms=perms)
1250 ################################################################################
1252 def is_unembargo ():
1253 cursor = DBConn().cursor()
1254 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1255 if cursor.fetchone():
1258 oldcwd = os.getcwd()
1259 os.chdir(Cnf["Dir::Queue::Disembargo"])
1260 disdir = os.getcwd()
1263 if pkg.directory == disdir:
1264 if changes["architecture"].has_key("source"):
1265 if Options["No-Action"]: return 1
1267 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1269 cursor.execute( "COMMIT" )
1274 def queue_unembargo (summary, short_summary):
1275 print "Moving to UNEMBARGOED holding area."
1276 Logger.log(["Moving to unembargoed", pkg.changes_file])
1278 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1279 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1280 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1282 # Check for override disparities
1283 Upload.Subst["__SUMMARY__"] = summary
1284 Upload.check_override()
1286 # Send accept mail, announce to lists, close bugs and check for
1287 # override disparities
1288 if not Cnf["Dinstall::Options::No-Mail"]:
1289 Upload.Subst["__SUITE__"] = ""
1290 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1291 utils.send_mail(mail_message)
1292 Upload.announce(short_summary, 1)
1294 ################################################################################
1297 # if embargoed queues are enabled always embargo
1300 def queue_embargo (summary, short_summary):
1301 print "Moving to EMBARGOED holding area."
1302 Logger.log(["Moving to embargoed", pkg.changes_file])
1304 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1305 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1306 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1308 # Check for override disparities
1309 Upload.Subst["__SUMMARY__"] = summary
1310 Upload.check_override()
1312 # Send accept mail, announce to lists, close bugs and check for
1313 # override disparities
1314 if not Cnf["Dinstall::Options::No-Mail"]:
1315 Upload.Subst["__SUITE__"] = ""
1316 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1317 utils.send_mail(mail_message)
1318 Upload.announce(short_summary, 1)
1320 ################################################################################
1322 def is_stableupdate ():
1323 if not changes["distribution"].has_key("proposed-updates"):
1326 if not changes["architecture"].has_key("source"):
1327 pusuite = DBConn().get_suite_id("proposed-updates")
1328 cursor = DBConn().cursor()
1329 cursor.execute( """SELECT 1 FROM source s
1330 JOIN src_associations sa ON (s.id = sa.source)
1331 WHERE s.source = %(source)s
1332 AND s.version = '%(version)s'
1333 AND sa.suite = %(suite)d""",
1334 {'source' : changes['source'],
1335 'version' : changes['version'],
1338 if cursor.fetchone():
1339 # source is already in proposed-updates so no need to hold
1344 def do_stableupdate (summary, short_summary):
1345 print "Moving to PROPOSED-UPDATES holding area."
1346 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1348 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1349 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1351 # Check for override disparities
1352 Upload.Subst["__SUMMARY__"] = summary
1353 Upload.check_override()
1355 ################################################################################
1357 def is_oldstableupdate ():
1358 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1361 if not changes["architecture"].has_key("source"):
1362 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1363 cursor = DBConn().cursor()
1364 cursor.execute( """"SELECT 1 FROM source s
1365 JOIN src_associations sa ON (s.id = sa.source)
1366 WHERE s.source = %(source)s
1367 AND s.version = %(version)s
1368 AND sa.suite = %d""",
1369 {'source' : changes['source'],
1370 'version' : changes['version'],
1372 if cursor.fetchone():
1377 def do_oldstableupdate (summary, short_summary):
1378 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1379 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1381 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1382 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1384 # Check for override disparities
1385 Upload.Subst["__SUMMARY__"] = summary
1386 Upload.check_override()
1388 ################################################################################
1390 def is_autobyhand ():
1393 for f in files.keys():
1394 if files[f].has_key("byhand"):
1397 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1398 # don't contain underscores, and ARCH doesn't contain dots.
1399 # further VER matches the .changes Version:, and ARCH should be in
1400 # the .changes Architecture: list.
1401 if f.count("_") < 2:
1405 (pckg, ver, archext) = f.split("_", 2)
1406 if archext.count(".") < 1 or changes["version"] != ver:
1410 ABH = Cnf.SubTree("AutomaticByHandPackages")
1411 if not ABH.has_key(pckg) or \
1412 ABH["%s::Source" % (pckg)] != changes["source"]:
1413 print "not match %s %s" % (pckg, changes["source"])
1417 (arch, ext) = archext.split(".", 1)
1418 if arch not in changes["architecture"]:
1422 files[f]["byhand-arch"] = arch
1423 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1425 return any_auto and all_auto
1427 def do_autobyhand (summary, short_summary):
1428 print "Attempting AUTOBYHAND."
1430 for f in files.keys():
1432 if not files[f].has_key("byhand"):
1434 if not files[f].has_key("byhand-script"):
1438 os.system("ls -l %s" % byhandfile)
1439 result = os.system("%s %s %s %s %s" % (
1440 files[f]["byhand-script"], byhandfile,
1441 changes["version"], files[f]["byhand-arch"],
1442 os.path.abspath(pkg.changes_file)))
1444 os.unlink(byhandfile)
1447 print "Error processing %s, left as byhand." % (f)
1451 do_byhand(summary, short_summary)
1453 accept(summary, short_summary)
1455 ################################################################################
1458 for f in files.keys():
1459 if files[f].has_key("byhand"):
1463 def do_byhand (summary, short_summary):
1464 print "Moving to BYHAND holding area."
1465 Logger.log(["Moving to byhand", pkg.changes_file])
1467 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1468 move_to_dir(Cnf["Dir::Queue::Byhand"])
1470 # Check for override disparities
1471 Upload.Subst["__SUMMARY__"] = summary
1472 Upload.check_override()
1474 ################################################################################
1477 for f in files.keys():
1478 if files[f].has_key("new"):
1482 def acknowledge_new (summary, short_summary):
1483 Subst = Upload.Subst
1485 print "Moving to NEW holding area."
1486 Logger.log(["Moving to new", pkg.changes_file])
1488 Upload.dump_vars(Cnf["Dir::Queue::New"])
1489 move_to_dir(Cnf["Dir::Queue::New"])
1491 if not Options["No-Mail"]:
1492 print "Sending new ack."
1493 Subst["__SUMMARY__"] = summary
1494 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1495 utils.send_mail(new_ack_message)
1497 ################################################################################
1499 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1500 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1501 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1502 # not have processed it during it's checks of -2. If -1 has been
1503 # deleted or otherwise not checked by 'dak process-unchecked', the
1504 # .orig.tar.gz will not have been checked at all. To get round this,
1505 # we force the .orig.tar.gz into the .changes structure and reprocess
1506 # the .changes file.
1508 def process_it (changes_file):
1509 global reprocess, reject_message
1511 # Reset some globals
1514 # Some defaults in case we can't fully process the .changes file
1515 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1516 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1519 # Absolutize the filename to avoid the requirement of being in the
1520 # same directory as the .changes file.
1521 pkg.changes_file = os.path.abspath(changes_file)
1523 # Remember where we are so we can come back after cd-ing into the
1524 # holding directory.
1525 pkg.directory = os.getcwd()
1528 # If this is the Real Thing(tm), copy things into a private
1529 # holding directory first to avoid replacable file races.
1530 if not Options["No-Action"]:
1531 os.chdir(Cnf["Dir::Queue::Holding"])
1532 copy_to_holding(pkg.changes_file)
1533 # Relativize the filename so we use the copy in holding
1534 # rather than the original...
1535 pkg.changes_file = os.path.basename(pkg.changes_file)
1536 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1537 if changes["fingerprint"]:
1538 valid_changes_p = check_changes()
1543 check_distributions()
1545 valid_dsc_p = check_dsc()
1551 check_signed_by_key()
1552 Upload.update_subst(reject_message)
1558 traceback.print_exc(file=sys.stderr)
1561 # Restore previous WD
1562 os.chdir(pkg.directory)
1564 ###############################################################################
1567 global Cnf, Options, Logger
1569 changes_files = init()
1571 # -n/--dry-run invalidates some other options which would involve things happening
1572 if Options["No-Action"]:
1573 Options["Automatic"] = ""
1575 # Ensure all the arguments we were given are .changes files
1576 for f in changes_files:
1577 if not f.endswith(".changes"):
1578 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1579 changes_files.remove(f)
1581 if changes_files == []:
1582 utils.fubar("Need at least one .changes file as an argument.")
1584 # Check that we aren't going to clash with the daily cron job
1586 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1587 utils.fubar("Archive maintenance in progress. Try again later.")
1589 # Obtain lock if not in no-action mode and initialize the log
1591 if not Options["No-Action"]:
1592 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1594 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1596 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1597 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1600 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1602 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1603 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1604 if Cnf.has_key("Dinstall::Bcc"):
1605 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1607 Upload.Subst["__BCC__"] = bcc
1610 # Sort the .changes files so that we process sourceful ones first
1611 changes_files.sort(utils.changes_compare)
1613 # Process the changes files
1614 for changes_file in changes_files:
1615 print "\n" + changes_file
1617 process_it (changes_file)
1619 if not Options["No-Action"]:
1622 accept_count = Upload.accept_count
1623 accept_bytes = Upload.accept_bytes
1626 if accept_count > 1:
1628 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1629 Logger.log(["total",accept_count,accept_bytes])
1631 if not Options["No-Action"]:
1634 ################################################################################
1636 if __name__ == '__main__':