3 """ Checks Debian packages from Incoming """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Originally based on dinstall by Guy Maor <maor@debian.org>
22 ################################################################################
24 # Computer games don't affect kids. I mean if Pacman affected our generation as
25 # kids, we'd all run around in a darkened room munching pills and listening to
29 ################################################################################
45 from debian_bundle import deb822
46 from daklib.dbconn import DBConn
47 from daklib.binary import Binary
48 from daklib import logging
49 from daklib import queue
50 from daklib import utils
51 from daklib.dak_exceptions import *
52 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
53 re_strip_revision, re_strip_srcver, re_spacestrip, \
54 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
55 re_isadeb, re_extract_src_version, re_issource, re_default_answer
59 ################################################################################
62 ################################################################################
73 # Aliases to the real vars in the Upload class; hysterical raisins.
81 ###############################################################################
84 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
88 Cnf = apt_pkg.newConfiguration()
89 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
91 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
92 ('h',"help","Dinstall::Options::Help"),
93 ('n',"no-action","Dinstall::Options::No-Action"),
94 ('p',"no-lock", "Dinstall::Options::No-Lock"),
95 ('s',"no-mail", "Dinstall::Options::No-Mail"),
96 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
98 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
99 "override-distribution", "version", "directory"]:
100 Cnf["Dinstall::Options::%s" % (i)] = ""
102 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
103 Options = Cnf.SubTree("Dinstall::Options")
108 # If we have a directory flag, use it to find our files
109 if Cnf["Dinstall::Options::Directory"] != "":
110 # Note that we clobber the list of files we were given in this case
111 # so warn if the user has done both
112 if len(changes_files) > 0:
113 utils.warn("Directory provided so ignoring files given on command line")
115 changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
117 Upload = queue.Upload(Cnf)
119 changes = Upload.pkg.changes
121 dsc_files = Upload.pkg.dsc_files
122 files = Upload.pkg.files
127 ################################################################################
129 def usage (exit_code=0):
130 print """Usage: dinstall [OPTION]... [CHANGES]...
131 -a, --automatic automatic run
132 -h, --help show this help and exit.
133 -n, --no-action don't do anything
134 -p, --no-lock don't check lockfile !! for cron.daily only !!
135 -s, --no-mail don't send any mail
136 -V, --version display the version number and exit"""
139 ################################################################################
141 def reject (str, prefix="Rejected: "):
142 global reject_message
144 reject_message += prefix + str + "\n"
146 ################################################################################
150 Create a temporary directory that can be used for unpacking files into for
153 tmpdir = tempfile.mkdtemp()
156 ################################################################################
158 def copy_to_holding(filename):
161 base_filename = os.path.basename(filename)
163 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
165 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
168 # Shouldn't happen, but will if, for example, someone lists a
169 # file twice in the .changes.
170 if errno.errorcode[e.errno] == 'EEXIST':
171 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
176 shutil.copy(filename, dest)
178 # In either case (ENOENT or EACCES) we want to remove the
179 # O_CREAT | O_EXCLed ghost file, so add the file to the list
180 # of 'in holding' even if it's not the real file.
181 if errno.errorcode[e.errno] == 'ENOENT':
182 reject("%s: can not copy to holding area: file not found." % (base_filename))
185 elif errno.errorcode[e.errno] == 'EACCES':
186 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
191 in_holding[base_filename] = ""
193 ################################################################################
199 os.chdir(Cnf["Dir::Queue::Holding"])
200 for f in in_holding.keys():
201 if os.path.exists(f):
202 if f.find('/') != -1:
203 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
209 ################################################################################
212 filename = pkg.changes_file
214 # Parse the .changes field into a dictionary
216 changes.update(utils.parse_changes(filename))
217 except CantOpenError:
218 reject("%s: can't read file." % (filename))
220 except ParseChangesError, line:
221 reject("%s: parse error, can't grok: %s." % (filename, line))
223 except ChangesUnicodeError:
224 reject("%s: changes file not proper utf-8" % (filename))
227 # Parse the Files field from the .changes into another dictionary
229 files.update(utils.build_file_list(changes))
230 except ParseChangesError, line:
231 reject("%s: parse error, can't grok: %s." % (filename, line))
232 except UnknownFormatError, format:
233 reject("%s: unknown format '%s'." % (filename, format))
236 # Check for mandatory fields
237 for i in ("source", "binary", "architecture", "version", "distribution",
238 "maintainer", "files", "changes", "description"):
239 if not changes.has_key(i):
240 reject("%s: Missing mandatory field `%s'." % (filename, i))
241 return 0 # Avoid <undef> errors during later tests
243 # Strip a source version in brackets from the source field
244 if re_strip_srcver.search(changes["source"]):
245 changes["source"] = re_strip_srcver.sub('', changes["source"])
247 # Ensure the source field is a valid package name.
248 if not re_valid_pkg_name.match(changes["source"]):
249 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
251 # Split multi-value fields into a lower-level dictionary
252 for i in ("architecture", "distribution", "binary", "closes"):
253 o = changes.get(i, "")
260 # Fix the Maintainer: field to be RFC822/2047 compatible
262 (changes["maintainer822"], changes["maintainer2047"],
263 changes["maintainername"], changes["maintaineremail"]) = \
264 utils.fix_maintainer (changes["maintainer"])
265 except ParseMaintError, msg:
266 reject("%s: Maintainer field ('%s') failed to parse: %s" \
267 % (filename, changes["maintainer"], msg))
269 # ...likewise for the Changed-By: field if it exists.
271 (changes["changedby822"], changes["changedby2047"],
272 changes["changedbyname"], changes["changedbyemail"]) = \
273 utils.fix_maintainer (changes.get("changed-by", ""))
274 except ParseMaintError, msg:
275 (changes["changedby822"], changes["changedby2047"],
276 changes["changedbyname"], changes["changedbyemail"]) = \
278 reject("%s: Changed-By field ('%s') failed to parse: %s" \
279 % (filename, changes["changed-by"], msg))
281 # Ensure all the values in Closes: are numbers
282 if changes.has_key("closes"):
283 for i in changes["closes"].keys():
284 if re_isanum.match (i) == None:
285 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
288 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
289 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
290 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
292 # Check there isn't already a changes file of the same name in one
293 # of the queue directories.
294 base_filename = os.path.basename(filename)
295 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
296 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
297 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
299 # Check the .changes is non-empty
301 reject("%s: nothing to do (Files field is empty)." % (base_filename))
306 ################################################################################
308 def check_distributions():
309 "Check and map the Distribution field of a .changes file."
311 # Handle suite mappings
312 for m in Cnf.ValueList("SuiteMappings"):
315 if mtype == "map" or mtype == "silent-map":
316 (source, dest) = args[1:3]
317 if changes["distribution"].has_key(source):
318 del changes["distribution"][source]
319 changes["distribution"][dest] = 1
320 if mtype != "silent-map":
321 reject("Mapping %s to %s." % (source, dest),"")
322 if changes.has_key("distribution-version"):
323 if changes["distribution-version"].has_key(source):
324 changes["distribution-version"][source]=dest
325 elif mtype == "map-unreleased":
326 (source, dest) = args[1:3]
327 if changes["distribution"].has_key(source):
328 for arch in changes["architecture"].keys():
329 if arch not in DBConn().get_suite_architectures(source):
330 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
331 del changes["distribution"][source]
332 changes["distribution"][dest] = 1
334 elif mtype == "ignore":
336 if changes["distribution"].has_key(suite):
337 del changes["distribution"][suite]
338 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
339 elif mtype == "reject":
341 if changes["distribution"].has_key(suite):
342 reject("Uploads to %s are not accepted." % (suite))
343 elif mtype == "propup-version":
344 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
346 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
347 if changes["distribution"].has_key(args[1]):
348 changes.setdefault("distribution-version", {})
349 for suite in args[2:]: changes["distribution-version"][suite]=suite
351 # Ensure there is (still) a target distribution
352 if changes["distribution"].keys() == []:
353 reject("no valid distribution.")
355 # Ensure target distributions exist
356 for suite in changes["distribution"].keys():
357 if not Cnf.has_key("Suite::%s" % (suite)):
358 reject("Unknown distribution `%s'." % (suite))
360 ################################################################################
365 archive = utils.where_am_i()
366 file_keys = files.keys()
368 # if reprocess is 2 we've already done this and we're checking
369 # things again for the new .orig.tar.gz.
370 # [Yes, I'm fully aware of how disgusting this is]
371 if not Options["No-Action"] and reprocess < 2:
373 os.chdir(pkg.directory)
378 # Check there isn't already a .changes or .dak file of the same name in
379 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
380 # [NB: this check must be done post-suite mapping]
381 base_filename = os.path.basename(pkg.changes_file)
382 dot_dak_filename = base_filename[:-8]+".dak"
383 for suite in changes["distribution"].keys():
384 copychanges = "Suite::%s::CopyChanges" % (suite)
385 if Cnf.has_key(copychanges) and \
386 os.path.exists(Cnf[copychanges]+"/"+base_filename):
387 reject("%s: a file with this name already exists in %s" \
388 % (base_filename, Cnf[copychanges]))
390 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
391 if Cnf.has_key(copy_dot_dak) and \
392 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
393 reject("%s: a file with this name already exists in %s" \
394 % (dot_dak_filename, Cnf[copy_dot_dak]))
400 cursor = DBConn().cursor()
401 # Check for packages that have moved from one component to another
402 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
403 cursor.execute("""PREPARE moved_pkg_q(text,text,text) AS
404 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
405 component c, architecture a, files f
406 WHERE b.package = $1 AND s.suite_name = $2
407 AND (a.arch_string = $3 OR a.arch_string = 'all')
408 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
409 AND f.location = l.id
410 AND l.component = c.id
411 AND b.file = f.id""")
414 # Ensure the file does not already exist in one of the accepted directories
415 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
416 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
417 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
418 reject("%s file already exists in the %s directory." % (f, d))
419 if not re_taint_free.match(f):
420 reject("!!WARNING!! tainted filename: '%s'." % (f))
421 # Check the file is readable
422 if os.access(f, os.R_OK) == 0:
423 # When running in -n, copy_to_holding() won't have
424 # generated the reject_message, so we need to.
425 if Options["No-Action"]:
426 if os.path.exists(f):
427 reject("Can't read `%s'. [permission denied]" % (f))
429 reject("Can't read `%s'. [file not found]" % (f))
430 files[f]["type"] = "unreadable"
432 # If it's byhand skip remaining checks
433 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
434 files[f]["byhand"] = 1
435 files[f]["type"] = "byhand"
436 # Checks for a binary package...
437 elif re_isadeb.match(f):
439 files[f]["type"] = "deb"
441 # Extract package control information
442 deb_file = utils.open_file(f)
444 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
446 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
448 # Can't continue, none of the checks on control would work.
452 # Check for mandatory fields
453 for field in [ "Package", "Architecture", "Version" ]:
454 if control.Find(field) == None:
455 reject("%s: No %s field in control." % (f, field))
459 # Ensure the package name matches the one give in the .changes
460 if not changes["binary"].has_key(control.Find("Package", "")):
461 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
463 # Validate the package field
464 package = control.Find("Package")
465 if not re_valid_pkg_name.match(package):
466 reject("%s: invalid package name '%s'." % (f, package))
468 # Validate the version field
469 version = control.Find("Version")
470 if not re_valid_version.match(version):
471 reject("%s: invalid version number '%s'." % (f, version))
473 # Ensure the architecture of the .deb is one we know about.
474 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
475 architecture = control.Find("Architecture")
476 upload_suite = changes["distribution"].keys()[0]
477 if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite):
478 reject("Unknown architecture '%s'." % (architecture))
480 # Ensure the architecture of the .deb is one of the ones
481 # listed in the .changes.
482 if not changes["architecture"].has_key(architecture):
483 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
485 # Sanity-check the Depends field
486 depends = control.Find("Depends")
488 reject("%s: Depends field is empty." % (f))
490 # Sanity-check the Provides field
491 provides = control.Find("Provides")
493 provide = re_spacestrip.sub('', provides)
495 reject("%s: Provides field is empty." % (f))
496 prov_list = provide.split(",")
497 for prov in prov_list:
498 if not re_valid_pkg_name.match(prov):
499 reject("%s: Invalid Provides field content %s." % (f, prov))
502 # Check the section & priority match those given in the .changes (non-fatal)
503 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
504 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
505 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
506 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
508 files[f]["package"] = package
509 files[f]["architecture"] = architecture
510 files[f]["version"] = version
511 files[f]["maintainer"] = control.Find("Maintainer", "")
512 if f.endswith(".udeb"):
513 files[f]["dbtype"] = "udeb"
514 elif f.endswith(".deb"):
515 files[f]["dbtype"] = "deb"
517 reject("%s is neither a .deb or a .udeb." % (f))
518 files[f]["source"] = control.Find("Source", files[f]["package"])
519 # Get the source version
520 source = files[f]["source"]
522 if source.find("(") != -1:
523 m = re_extract_src_version.match(source)
525 source_version = m.group(2)
526 if not source_version:
527 source_version = files[f]["version"]
528 files[f]["source package"] = source
529 files[f]["source version"] = source_version
531 # Ensure the filename matches the contents of the .deb
532 m = re_isadeb.match(f)
534 file_package = m.group(1)
535 if files[f]["package"] != file_package:
536 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
537 epochless_version = re_no_epoch.sub('', control.Find("Version"))
539 file_version = m.group(2)
540 if epochless_version != file_version:
541 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
543 file_architecture = m.group(3)
544 if files[f]["architecture"] != file_architecture:
545 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
547 # Check for existent source
548 source_version = files[f]["source version"]
549 source_package = files[f]["source package"]
550 if changes["architecture"].has_key("source"):
551 if source_version != changes["version"]:
552 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
554 # Check in the SQL database
555 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
556 # Check in one of the other directories
557 source_epochless_version = re_no_epoch.sub('', source_version)
558 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
559 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
560 files[f]["byhand"] = 1
561 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
565 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
566 if Cnf.has_key("Dir::Queue::%s" % (myq)):
567 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
570 if not dsc_file_exists:
571 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
572 # Check the version and for file overwrites
573 reject(Upload.check_binary_against_db(f),"")
575 Binary(f).scan_package()
577 # Checks for a source package...
579 m = re_issource.match(f)
582 files[f]["package"] = m.group(1)
583 files[f]["version"] = m.group(2)
584 files[f]["type"] = m.group(3)
586 # Ensure the source package name matches the Source filed in the .changes
587 if changes["source"] != files[f]["package"]:
588 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
590 # Ensure the source version matches the version in the .changes file
591 if files[f]["type"] == "orig.tar.gz":
592 changes_version = changes["chopversion2"]
594 changes_version = changes["chopversion"]
595 if changes_version != files[f]["version"]:
596 reject("%s: should be %s according to changes file." % (f, changes_version))
598 # Ensure the .changes lists source in the Architecture field
599 if not changes["architecture"].has_key("source"):
600 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
602 # Check the signature of a .dsc file
603 if files[f]["type"] == "dsc":
604 dsc["fingerprint"] = utils.check_signature(f, reject)
606 files[f]["architecture"] = "source"
608 # Not a binary or source package? Assume byhand...
610 files[f]["byhand"] = 1
611 files[f]["type"] = "byhand"
613 # Per-suite file checks
614 files[f]["oldfiles"] = {}
615 for suite in changes["distribution"].keys():
617 if files[f].has_key("byhand"):
620 # Handle component mappings
621 for m in Cnf.ValueList("ComponentMappings"):
622 (source, dest) = m.split()
623 if files[f]["component"] == source:
624 files[f]["original component"] = source
625 files[f]["component"] = dest
627 # Ensure the component is valid for the target suite
628 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
629 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
630 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
633 # Validate the component
634 component = files[f]["component"]
635 component_id = DBConn().get_component_id(component)
636 if component_id == -1:
637 reject("file '%s' has unknown component '%s'." % (f, component))
640 # See if the package is NEW
641 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
644 # Validate the priority
645 if files[f]["priority"].find('/') != -1:
646 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
648 # Determine the location
649 location = Cnf["Dir::Pool"]
650 location_id = DBConn().get_location_id(location, component, archive)
651 if location_id == -1:
652 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
653 files[f]["location id"] = location_id
655 # Check the md5sum & size against existing files (if any)
656 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
657 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
659 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
661 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
662 files[f]["files id"] = files_id
664 # Check for packages that have moved from one component to another
665 files[f]['suite'] = suite
666 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
667 ql = cursor.fetchone()
669 files[f]["othercomponents"] = ql[0][0]
671 # If the .changes file says it has source, it must have source.
672 if changes["architecture"].has_key("source"):
674 reject("no source found and Architecture line in changes mention source.")
676 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
677 reject("source only uploads are not supported.")
679 ###############################################################################
684 # Ensure there is source to check
685 if not changes["architecture"].has_key("source"):
690 for f in files.keys():
691 if files[f]["type"] == "dsc":
693 reject("can not process a .changes file with multiple .dsc's.")
698 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
700 reject("source uploads must contain a dsc file")
703 # Parse the .dsc file
705 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
706 except CantOpenError:
707 # if not -n copy_to_holding() will have done this for us...
708 if Options["No-Action"]:
709 reject("%s: can't read file." % (dsc_filename))
710 except ParseChangesError, line:
711 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
712 except InvalidDscError, line:
713 reject("%s: syntax error on line %s." % (dsc_filename, line))
714 except ChangesUnicodeError:
715 reject("%s: dsc file not proper utf-8." % (dsc_filename))
717 # Build up the file list of files mentioned by the .dsc
719 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
720 except NoFilesFieldError:
721 reject("%s: no Files: field." % (dsc_filename))
723 except UnknownFormatError, format:
724 reject("%s: unknown format '%s'." % (dsc_filename, format))
726 except ParseChangesError, line:
727 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
730 # Enforce mandatory fields
731 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
732 if not dsc.has_key(i):
733 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
736 # Validate the source and version fields
737 if not re_valid_pkg_name.match(dsc["source"]):
738 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
739 if not re_valid_version.match(dsc["version"]):
740 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
742 # Bumping the version number of the .dsc breaks extraction by stable's
743 # dpkg-source. So let's not do that...
744 if dsc["format"] != "1.0":
745 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
747 # Validate the Maintainer field
749 utils.fix_maintainer (dsc["maintainer"])
750 except ParseMaintError, msg:
751 reject("%s: Maintainer field ('%s') failed to parse: %s" \
752 % (dsc_filename, dsc["maintainer"], msg))
754 # Validate the build-depends field(s)
755 for field_name in [ "build-depends", "build-depends-indep" ]:
756 field = dsc.get(field_name)
758 # Check for broken dpkg-dev lossage...
759 if field.startswith("ARRAY"):
760 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
762 # Have apt try to parse them...
764 apt_pkg.ParseSrcDepends(field)
766 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
769 # Ensure the version number in the .dsc matches the version number in the .changes
770 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
771 changes_version = files[dsc_filename]["version"]
772 if epochless_dsc_version != files[dsc_filename]["version"]:
773 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
775 # Ensure there is a .tar.gz in the .dsc file
777 for f in dsc_files.keys():
778 m = re_issource.match(f)
780 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
783 if ftype == "orig.tar.gz" or ftype == "tar.gz":
786 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
788 # Ensure source is newer than existing source in target suites
789 reject(Upload.check_source_against_db(dsc_filename),"")
791 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
792 reject(reject_msg, "")
794 if not Options["No-Action"]:
795 copy_to_holding(is_in_incoming)
796 orig_tar_gz = os.path.basename(is_in_incoming)
797 files[orig_tar_gz] = {}
798 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
799 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
800 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
801 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
802 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
803 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
804 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
805 files[orig_tar_gz]["type"] = "orig.tar.gz"
810 ################################################################################
812 def get_changelog_versions(source_dir):
813 """Extracts a the source package and (optionally) grabs the
814 version history out of debian/changelog for the BTS."""
816 # Find the .dsc (again)
818 for f in files.keys():
819 if files[f]["type"] == "dsc":
822 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
826 # Create a symlink mirror of the source files in our temporary directory
827 for f in files.keys():
828 m = re_issource.match(f)
830 src = os.path.join(source_dir, f)
831 # If a file is missing for whatever reason, give up.
832 if not os.path.exists(src):
835 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
837 dest = os.path.join(os.getcwd(), f)
838 os.symlink(src, dest)
840 # If the orig.tar.gz is not a part of the upload, create a symlink to the
843 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
844 os.symlink(pkg.orig_tar_gz, dest)
847 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
848 (result, output) = commands.getstatusoutput(cmd)
850 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
851 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
854 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
857 # Get the upstream version
858 upstr_version = re_no_epoch.sub('', dsc["version"])
859 if re_strip_revision.search(upstr_version):
860 upstr_version = re_strip_revision.sub('', upstr_version)
862 # Ensure the changelog file exists
863 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
864 if not os.path.exists(changelog_filename):
865 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
868 # Parse the changelog
869 dsc["bts changelog"] = ""
870 changelog_file = utils.open_file(changelog_filename)
871 for line in changelog_file.readlines():
872 m = re_changelog_versions.match(line)
874 dsc["bts changelog"] += line
875 changelog_file.close()
877 # Check we found at least one revision in the changelog
878 if not dsc["bts changelog"]:
879 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
881 ########################################
885 # a) there's no source
886 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
887 # or c) the orig.tar.gz is MIA
888 if not changes["architecture"].has_key("source") or reprocess == 2 \
889 or pkg.orig_tar_gz == -1:
892 tmpdir = create_tmpdir()
894 # Move into the temporary directory
898 # Get the changelog version history
899 get_changelog_versions(cwd)
901 # Move back and cleanup the temporary tree
904 shutil.rmtree(tmpdir)
906 if errno.errorcode[e.errno] != 'EACCES':
907 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
909 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
910 # We probably have u-r or u-w directories so chmod everything
912 cmd = "chmod -R u+rwx %s" % (tmpdir)
913 result = os.system(cmd)
915 utils.fubar("'%s' failed with result %s." % (cmd, result))
916 shutil.rmtree(tmpdir)
918 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
920 ################################################################################
922 # FIXME: should be a debian specific check called from a hook
924 def check_urgency ():
925 if changes["architecture"].has_key("source"):
926 if not changes.has_key("urgency"):
927 changes["urgency"] = Cnf["Urgency::Default"]
928 changes["urgency"] = changes["urgency"].lower()
929 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
930 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
931 changes["urgency"] = Cnf["Urgency::Default"]
933 ################################################################################
936 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
937 utils.check_size(".changes", files)
938 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
939 utils.check_size(".dsc", dsc_files)
941 # This is stupid API, but it'll have to do for now until
942 # we actually have proper abstraction
943 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
946 ################################################################################
948 # Sanity check the time stamps of files inside debs.
949 # [Files in the near future cause ugly warnings and extreme time
950 # travel can cause errors on extraction]
952 def check_timestamps():
954 def __init__(self, future_cutoff, past_cutoff):
956 self.future_cutoff = future_cutoff
957 self.past_cutoff = past_cutoff
960 self.future_files = {}
961 self.ancient_files = {}
963 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
964 if MTime > self.future_cutoff:
965 self.future_files[Name] = MTime
966 if MTime < self.past_cutoff:
967 self.ancient_files[Name] = MTime
970 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
971 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
972 tar = Tar(future_cutoff, past_cutoff)
973 for filename in files.keys():
974 if files[filename]["type"] == "deb":
977 deb_file = utils.open_file(filename)
978 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
981 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
982 except SystemError, e:
983 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
984 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
987 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
990 future_files = tar.future_files.keys()
992 num_future_files = len(future_files)
993 future_file = future_files[0]
994 future_date = tar.future_files[future_file]
995 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
996 % (filename, num_future_files, future_file,
997 time.ctime(future_date)))
999 ancient_files = tar.ancient_files.keys()
1001 num_ancient_files = len(ancient_files)
1002 ancient_file = ancient_files[0]
1003 ancient_date = tar.ancient_files[ancient_file]
1004 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1005 % (filename, num_ancient_files, ancient_file,
1006 time.ctime(ancient_date)))
1008 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1010 ################################################################################
1012 def lookup_uid_from_fingerprint(fpr):
1014 Return the uid,name,isdm for a given gpg fingerprint
1017 @param fpr: a 40 byte GPG fingerprint
1019 @return (uid, name, isdm)
1021 cursor = DBConn().cursor()
1022 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
1023 qs = cursor.fetchone()
1027 return (None, None, None)
1029 def check_signed_by_key():
1030 """Ensure the .changes is signed by an authorized uploader."""
1032 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1033 if uid_name == None:
1036 # match claimed name with actual name:
1038 uid, uid_email = changes["fingerprint"], uid
1039 may_nmu, may_sponsor = 1, 1
1040 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1041 # and can't get one in there if we don't allow nmu/sponsorship
1044 may_nmu, may_sponsor = 0, 0
1046 uid_email = "%s@debian.org" % (uid)
1047 may_nmu, may_sponsor = 1, 1
1049 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1051 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1053 if uid_name == "": sponsored = 1
1056 if ("source" in changes["architecture"] and
1057 uid_email and utils.is_email_alias(uid_email)):
1058 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1059 if (changes["maintaineremail"] not in sponsor_addresses and
1060 changes["changedbyemail"] not in sponsor_addresses):
1061 changes["sponsoremail"] = uid_email
1063 if sponsored and not may_sponsor:
1064 reject("%s is not authorised to sponsor uploads" % (uid))
1066 if not sponsored and not may_nmu:
1068 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1070 highest_sid, highest_version = None, None
1072 should_reject = True
1074 si = cursor.fetchone()
1078 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1080 highest_version = si[1]
1082 if highest_sid == None:
1083 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1086 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1089 m = cursor.fetchone()
1093 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1094 if email == uid_email or name == uid_name:
1098 if should_reject == True:
1099 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1101 for b in changes["binary"].keys():
1102 for suite in changes["distribution"].keys():
1103 suite_id = DBConn().get_suite_id(suite)
1105 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1107 s = cursor.fetchone()
1111 if s[0] != changes["source"]:
1112 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1114 for f in files.keys():
1115 if files[f].has_key("byhand"):
1116 reject("%s may not upload BYHAND file %s" % (uid, f))
1117 if files[f].has_key("new"):
1118 reject("%s may not upload NEW file %s" % (uid, f))
1121 ################################################################################
1122 ################################################################################
1124 # If any file of an upload has a recent mtime then chances are good
1125 # the file is still being uploaded.
1127 def upload_too_new():
1129 # Move back to the original directory to get accurate time stamps
1131 os.chdir(pkg.directory)
1132 file_list = pkg.files.keys()
1133 file_list.extend(pkg.dsc_files.keys())
1134 file_list.append(pkg.changes_file)
1137 last_modified = time.time()-os.path.getmtime(f)
1138 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1146 ################################################################################
1149 # changes["distribution"] may not exist in corner cases
1150 # (e.g. unreadable changes files)
1151 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1152 changes["distribution"] = {}
1154 (summary, short_summary) = Upload.build_summaries()
1156 # q-unapproved hax0ring
1158 "New": { "is": is_new, "process": acknowledge_new },
1159 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1160 "Byhand" : { "is": is_byhand, "process": do_byhand },
1161 "OldStableUpdate" : { "is": is_oldstableupdate,
1162 "process": do_oldstableupdate },
1163 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1164 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1165 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1167 queues = [ "New", "Autobyhand", "Byhand" ]
1168 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1169 queues += [ "Unembargo", "Embargo" ]
1171 queues += [ "OldStableUpdate", "StableUpdate" ]
1173 (prompt, answer) = ("", "XXX")
1174 if Options["No-Action"] or Options["Automatic"]:
1179 if reject_message.find("Rejected") != -1:
1180 if upload_too_new():
1181 print "SKIP (too new)\n" + reject_message,
1182 prompt = "[S]kip, Quit ?"
1184 print "REJECT\n" + reject_message,
1185 prompt = "[R]eject, Skip, Quit ?"
1186 if Options["Automatic"]:
1191 if queue_info[q]["is"]():
1195 print "%s for %s\n%s%s" % (
1196 qu.upper(), ", ".join(changes["distribution"].keys()),
1197 reject_message, summary),
1198 queuekey = qu[0].upper()
1199 if queuekey in "RQSA":
1201 prompt = "[D]ivert, Skip, Quit ?"
1203 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1204 if Options["Automatic"]:
1207 print "ACCEPT\n" + reject_message + summary,
1208 prompt = "[A]ccept, Skip, Quit ?"
1209 if Options["Automatic"]:
1212 while prompt.find(answer) == -1:
1213 answer = utils.our_raw_input(prompt)
1214 m = re_default_answer.match(prompt)
1217 answer = answer[:1].upper()
1220 os.chdir (pkg.directory)
1221 Upload.do_reject(0, reject_message)
1223 accept(summary, short_summary)
1224 remove_from_unchecked()
1225 elif answer == queuekey:
1226 queue_info[qu]["process"](summary, short_summary)
1227 remove_from_unchecked()
1231 def remove_from_unchecked():
1232 os.chdir (pkg.directory)
1233 for f in files.keys():
1235 os.unlink(pkg.changes_file)
1237 ################################################################################
1239 def accept (summary, short_summary):
1240 Upload.accept(summary, short_summary)
1241 Upload.check_override()
1243 ################################################################################
1245 def move_to_dir (dest, perms=0660, changesperms=0664):
1246 utils.move (pkg.changes_file, dest, perms=changesperms)
1247 file_keys = files.keys()
1249 utils.move (f, dest, perms=perms)
1251 ################################################################################
1253 def is_unembargo ():
1254 cursor = DBConn().cursor()
1255 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1256 if cursor.fetchone():
1259 oldcwd = os.getcwd()
1260 os.chdir(Cnf["Dir::Queue::Disembargo"])
1261 disdir = os.getcwd()
1264 if pkg.directory == disdir:
1265 if changes["architecture"].has_key("source"):
1266 if Options["No-Action"]: return 1
1268 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1270 cursor.execute( "COMMIT" )
1275 def queue_unembargo (summary, short_summary):
1276 print "Moving to UNEMBARGOED holding area."
1277 Logger.log(["Moving to unembargoed", pkg.changes_file])
1279 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1280 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1281 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1283 # Check for override disparities
1284 Upload.Subst["__SUMMARY__"] = summary
1285 Upload.check_override()
1287 # Send accept mail, announce to lists, close bugs and check for
1288 # override disparities
1289 if not Cnf["Dinstall::Options::No-Mail"]:
1290 Upload.Subst["__SUITE__"] = ""
1291 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1292 utils.send_mail(mail_message)
1293 Upload.announce(short_summary, 1)
1295 ################################################################################
1298 # if embargoed queues are enabled always embargo
1301 def queue_embargo (summary, short_summary):
1302 print "Moving to EMBARGOED holding area."
1303 Logger.log(["Moving to embargoed", pkg.changes_file])
1305 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1306 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1307 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1309 # Check for override disparities
1310 Upload.Subst["__SUMMARY__"] = summary
1311 Upload.check_override()
1313 # Send accept mail, announce to lists, close bugs and check for
1314 # override disparities
1315 if not Cnf["Dinstall::Options::No-Mail"]:
1316 Upload.Subst["__SUITE__"] = ""
1317 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1318 utils.send_mail(mail_message)
1319 Upload.announce(short_summary, 1)
1321 ################################################################################
1323 def is_stableupdate ():
1324 if not changes["distribution"].has_key("proposed-updates"):
1327 if not changes["architecture"].has_key("source"):
1328 pusuite = DBConn().get_suite_id("proposed-updates")
1329 cursor = DBConn().cursor()
1330 cursor.execute( """SELECT 1 FROM source s
1331 JOIN src_associations sa ON (s.id = sa.source)
1332 WHERE s.source = %(source)s
1333 AND s.version = '%(version)s'
1334 AND sa.suite = %(suite)d""",
1335 {'source' : changes['source'],
1336 'version' : changes['version'],
1339 if cursor.fetchone():
1340 # source is already in proposed-updates so no need to hold
1345 def do_stableupdate (summary, short_summary):
1346 print "Moving to PROPOSED-UPDATES holding area."
1347 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1349 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1350 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1352 # Check for override disparities
1353 Upload.Subst["__SUMMARY__"] = summary
1354 Upload.check_override()
1356 ################################################################################
1358 def is_oldstableupdate ():
1359 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1362 if not changes["architecture"].has_key("source"):
1363 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1364 cursor = DBConn().cursor()
1365 cursor.execute( """"SELECT 1 FROM source s
1366 JOIN src_associations sa ON (s.id = sa.source)
1367 WHERE s.source = %(source)s
1368 AND s.version = %(version)s
1369 AND sa.suite = %d""",
1370 {'source' : changes['source'],
1371 'version' : changes['version'],
1373 if cursor.fetchone():
1378 def do_oldstableupdate (summary, short_summary):
1379 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1380 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1382 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1383 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1385 # Check for override disparities
1386 Upload.Subst["__SUMMARY__"] = summary
1387 Upload.check_override()
1389 ################################################################################
1391 def is_autobyhand ():
1394 for f in files.keys():
1395 if files[f].has_key("byhand"):
1398 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1399 # don't contain underscores, and ARCH doesn't contain dots.
1400 # further VER matches the .changes Version:, and ARCH should be in
1401 # the .changes Architecture: list.
1402 if f.count("_") < 2:
1406 (pckg, ver, archext) = f.split("_", 2)
1407 if archext.count(".") < 1 or changes["version"] != ver:
1411 ABH = Cnf.SubTree("AutomaticByHandPackages")
1412 if not ABH.has_key(pckg) or \
1413 ABH["%s::Source" % (pckg)] != changes["source"]:
1414 print "not match %s %s" % (pckg, changes["source"])
1418 (arch, ext) = archext.split(".", 1)
1419 if arch not in changes["architecture"]:
1423 files[f]["byhand-arch"] = arch
1424 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1426 return any_auto and all_auto
1428 def do_autobyhand (summary, short_summary):
1429 print "Attempting AUTOBYHAND."
1431 for f in files.keys():
1433 if not files[f].has_key("byhand"):
1435 if not files[f].has_key("byhand-script"):
1439 os.system("ls -l %s" % byhandfile)
1440 result = os.system("%s %s %s %s %s" % (
1441 files[f]["byhand-script"], byhandfile,
1442 changes["version"], files[f]["byhand-arch"],
1443 os.path.abspath(pkg.changes_file)))
1445 os.unlink(byhandfile)
1448 print "Error processing %s, left as byhand." % (f)
1452 do_byhand(summary, short_summary)
1454 accept(summary, short_summary)
1456 ################################################################################
1459 for f in files.keys():
1460 if files[f].has_key("byhand"):
1464 def do_byhand (summary, short_summary):
1465 print "Moving to BYHAND holding area."
1466 Logger.log(["Moving to byhand", pkg.changes_file])
1468 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1469 move_to_dir(Cnf["Dir::Queue::Byhand"])
1471 # Check for override disparities
1472 Upload.Subst["__SUMMARY__"] = summary
1473 Upload.check_override()
1475 ################################################################################
1478 for f in files.keys():
1479 if files[f].has_key("new"):
1483 def acknowledge_new (summary, short_summary):
1484 Subst = Upload.Subst
1486 print "Moving to NEW holding area."
1487 Logger.log(["Moving to new", pkg.changes_file])
1489 Upload.dump_vars(Cnf["Dir::Queue::New"])
1490 move_to_dir(Cnf["Dir::Queue::New"])
1492 if not Options["No-Mail"]:
1493 print "Sending new ack."
1494 Subst["__SUMMARY__"] = summary
1495 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1496 utils.send_mail(new_ack_message)
1498 ################################################################################
1500 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1501 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1502 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1503 # not have processed it during it's checks of -2. If -1 has been
1504 # deleted or otherwise not checked by 'dak process-unchecked', the
1505 # .orig.tar.gz will not have been checked at all. To get round this,
1506 # we force the .orig.tar.gz into the .changes structure and reprocess
1507 # the .changes file.
1509 def process_it (changes_file):
1510 global reprocess, reject_message
1512 # Reset some globals
1515 # Some defaults in case we can't fully process the .changes file
1516 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1517 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1520 # Absolutize the filename to avoid the requirement of being in the
1521 # same directory as the .changes file.
1522 pkg.changes_file = os.path.abspath(changes_file)
1524 # Remember where we are so we can come back after cd-ing into the
1525 # holding directory.
1526 pkg.directory = os.getcwd()
1529 # If this is the Real Thing(tm), copy things into a private
1530 # holding directory first to avoid replacable file races.
1531 if not Options["No-Action"]:
1532 os.chdir(Cnf["Dir::Queue::Holding"])
1533 copy_to_holding(pkg.changes_file)
1534 # Relativize the filename so we use the copy in holding
1535 # rather than the original...
1536 pkg.changes_file = os.path.basename(pkg.changes_file)
1537 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1538 if changes["fingerprint"]:
1539 valid_changes_p = check_changes()
1544 check_distributions()
1546 valid_dsc_p = check_dsc()
1552 check_signed_by_key()
1553 Upload.update_subst(reject_message)
1559 traceback.print_exc(file=sys.stderr)
1562 # Restore previous WD
1563 os.chdir(pkg.directory)
1565 ###############################################################################
1568 global Cnf, Options, Logger
1570 changes_files = init()
1572 # -n/--dry-run invalidates some other options which would involve things happening
1573 if Options["No-Action"]:
1574 Options["Automatic"] = ""
1576 # Ensure all the arguments we were given are .changes files
1577 for f in changes_files:
1578 if not f.endswith(".changes"):
1579 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1580 changes_files.remove(f)
1582 if changes_files == []:
1583 utils.fubar("Need at least one .changes file as an argument.")
1585 # Check that we aren't going to clash with the daily cron job
1587 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1588 utils.fubar("Archive maintenance in progress. Try again later.")
1590 # Obtain lock if not in no-action mode and initialize the log
1592 if not Options["No-Action"]:
1593 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1595 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1597 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1598 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1601 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1603 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1604 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1605 if Cnf.has_key("Dinstall::Bcc"):
1606 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1608 Upload.Subst["__BCC__"] = bcc
1611 # Sort the .changes files so that we process sourceful ones first
1612 changes_files.sort(utils.changes_compare)
1614 # Process the changes files
1615 for changes_file in changes_files:
1616 print "\n" + changes_file
1618 process_it (changes_file)
1620 if not Options["No-Action"]:
1623 accept_count = Upload.accept_count
1624 accept_bytes = Upload.accept_bytes
1627 if accept_count > 1:
1629 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1630 Logger.log(["total",accept_count,accept_bytes])
1632 if not Options["No-Action"]:
1635 ################################################################################
1637 if __name__ == '__main__':