3 """ Checks Debian packages from Incoming """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Originally based on dinstall by Guy Maor <maor@debian.org>
22 ################################################################################
24 # Computer games don't affect kids. I mean if Pacman affected our generation as
25 # kids, we'd all run around in a darkened room munching pills and listening to
29 ################################################################################
44 from debian_bundle import deb822
45 from daklib.dbconn import DBConn
46 from daklib.binary import Binary
47 from daklib import logging
48 from daklib import queue
49 from daklib import utils
50 from daklib.dak_exceptions import *
51 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
52 re_strip_revision, re_strip_srcver, re_spacestrip, \
53 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
54 re_isadeb, re_extract_src_version, re_issource, re_default_answer
58 ################################################################################
61 ################################################################################
72 # Aliases to the real vars in the Upload class; hysterical raisins.
80 ###############################################################################
83 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
87 Cnf = apt_pkg.newConfiguration()
88 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
90 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
91 ('h',"help","Dinstall::Options::Help"),
92 ('n',"no-action","Dinstall::Options::No-Action"),
93 ('p',"no-lock", "Dinstall::Options::No-Lock"),
94 ('s',"no-mail", "Dinstall::Options::No-Mail"),
95 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
97 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
98 "override-distribution", "version", "directory"]:
99 Cnf["Dinstall::Options::%s" % (i)] = ""
101 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
102 Options = Cnf.SubTree("Dinstall::Options")
107 # If we have a directory flag, use it to find our files
108 if Cnf["Dinstall::Options::Directory"] != "":
109 # Note that we clobber the list of files we were given in this case
110 # so warn if the user has done both
111 if len(changes_files) > 0:
112 utils.warn("Directory provided so ignoring files given on command line")
114 changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
116 Upload = queue.Upload(Cnf)
118 changes = Upload.pkg.changes
120 dsc_files = Upload.pkg.dsc_files
121 files = Upload.pkg.files
126 ################################################################################
128 def usage (exit_code=0):
129 print """Usage: dinstall [OPTION]... [CHANGES]...
130 -a, --automatic automatic run
131 -h, --help show this help and exit.
132 -n, --no-action don't do anything
133 -p, --no-lock don't check lockfile !! for cron.daily only !!
134 -s, --no-mail don't send any mail
135 -V, --version display the version number and exit"""
138 ################################################################################
140 def reject (str, prefix="Rejected: "):
141 global reject_message
143 reject_message += prefix + str + "\n"
145 ################################################################################
147 def copy_to_holding(filename):
150 base_filename = os.path.basename(filename)
152 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
154 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
157 # Shouldn't happen, but will if, for example, someone lists a
158 # file twice in the .changes.
159 if errno.errorcode[e.errno] == 'EEXIST':
160 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
165 shutil.copy(filename, dest)
167 # In either case (ENOENT or EACCES) we want to remove the
168 # O_CREAT | O_EXCLed ghost file, so add the file to the list
169 # of 'in holding' even if it's not the real file.
170 if errno.errorcode[e.errno] == 'ENOENT':
171 reject("%s: can not copy to holding area: file not found." % (base_filename))
174 elif errno.errorcode[e.errno] == 'EACCES':
175 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
180 in_holding[base_filename] = ""
182 ################################################################################
188 os.chdir(Cnf["Dir::Queue::Holding"])
189 for f in in_holding.keys():
190 if os.path.exists(f):
191 if f.find('/') != -1:
192 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
198 ################################################################################
201 filename = pkg.changes_file
203 # Parse the .changes field into a dictionary
205 changes.update(utils.parse_changes(filename))
206 except CantOpenError:
207 reject("%s: can't read file." % (filename))
209 except ParseChangesError, line:
210 reject("%s: parse error, can't grok: %s." % (filename, line))
212 except ChangesUnicodeError:
213 reject("%s: changes file not proper utf-8" % (filename))
216 # Parse the Files field from the .changes into another dictionary
218 files.update(utils.build_file_list(changes))
219 except ParseChangesError, line:
220 reject("%s: parse error, can't grok: %s." % (filename, line))
221 except UnknownFormatError, format:
222 reject("%s: unknown format '%s'." % (filename, format))
225 # Check for mandatory fields
226 for i in ("source", "binary", "architecture", "version", "distribution",
227 "maintainer", "files", "changes", "description"):
228 if not changes.has_key(i):
229 reject("%s: Missing mandatory field `%s'." % (filename, i))
230 return 0 # Avoid <undef> errors during later tests
232 # Strip a source version in brackets from the source field
233 if re_strip_srcver.search(changes["source"]):
234 changes["source"] = re_strip_srcver.sub('', changes["source"])
236 # Ensure the source field is a valid package name.
237 if not re_valid_pkg_name.match(changes["source"]):
238 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
240 # Split multi-value fields into a lower-level dictionary
241 for i in ("architecture", "distribution", "binary", "closes"):
242 o = changes.get(i, "")
249 # Fix the Maintainer: field to be RFC822/2047 compatible
251 (changes["maintainer822"], changes["maintainer2047"],
252 changes["maintainername"], changes["maintaineremail"]) = \
253 utils.fix_maintainer (changes["maintainer"])
254 except ParseMaintError, msg:
255 reject("%s: Maintainer field ('%s') failed to parse: %s" \
256 % (filename, changes["maintainer"], msg))
258 # ...likewise for the Changed-By: field if it exists.
260 (changes["changedby822"], changes["changedby2047"],
261 changes["changedbyname"], changes["changedbyemail"]) = \
262 utils.fix_maintainer (changes.get("changed-by", ""))
263 except ParseMaintError, msg:
264 (changes["changedby822"], changes["changedby2047"],
265 changes["changedbyname"], changes["changedbyemail"]) = \
267 reject("%s: Changed-By field ('%s') failed to parse: %s" \
268 % (filename, changes["changed-by"], msg))
270 # Ensure all the values in Closes: are numbers
271 if changes.has_key("closes"):
272 for i in changes["closes"].keys():
273 if re_isanum.match (i) == None:
274 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
277 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
278 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
279 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
281 # Check there isn't already a changes file of the same name in one
282 # of the queue directories.
283 base_filename = os.path.basename(filename)
284 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
285 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
286 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
288 # Check the .changes is non-empty
290 reject("%s: nothing to do (Files field is empty)." % (base_filename))
295 ################################################################################
297 def check_distributions():
298 "Check and map the Distribution field of a .changes file."
300 # Handle suite mappings
301 for m in Cnf.ValueList("SuiteMappings"):
304 if mtype == "map" or mtype == "silent-map":
305 (source, dest) = args[1:3]
306 if changes["distribution"].has_key(source):
307 del changes["distribution"][source]
308 changes["distribution"][dest] = 1
309 if mtype != "silent-map":
310 reject("Mapping %s to %s." % (source, dest),"")
311 if changes.has_key("distribution-version"):
312 if changes["distribution-version"].has_key(source):
313 changes["distribution-version"][source]=dest
314 elif mtype == "map-unreleased":
315 (source, dest) = args[1:3]
316 if changes["distribution"].has_key(source):
317 for arch in changes["architecture"].keys():
318 if arch not in DBConn().get_suite_architectures(source):
319 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
320 del changes["distribution"][source]
321 changes["distribution"][dest] = 1
323 elif mtype == "ignore":
325 if changes["distribution"].has_key(suite):
326 del changes["distribution"][suite]
327 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
328 elif mtype == "reject":
330 if changes["distribution"].has_key(suite):
331 reject("Uploads to %s are not accepted." % (suite))
332 elif mtype == "propup-version":
333 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
335 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
336 if changes["distribution"].has_key(args[1]):
337 changes.setdefault("distribution-version", {})
338 for suite in args[2:]: changes["distribution-version"][suite]=suite
340 # Ensure there is (still) a target distribution
341 if changes["distribution"].keys() == []:
342 reject("no valid distribution.")
344 # Ensure target distributions exist
345 for suite in changes["distribution"].keys():
346 if not Cnf.has_key("Suite::%s" % (suite)):
347 reject("Unknown distribution `%s'." % (suite))
349 ################################################################################
354 archive = utils.where_am_i()
355 file_keys = files.keys()
357 # if reprocess is 2 we've already done this and we're checking
358 # things again for the new .orig.tar.gz.
359 # [Yes, I'm fully aware of how disgusting this is]
360 if not Options["No-Action"] and reprocess < 2:
362 os.chdir(pkg.directory)
367 # Check there isn't already a .changes or .dak file of the same name in
368 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
369 # [NB: this check must be done post-suite mapping]
370 base_filename = os.path.basename(pkg.changes_file)
371 dot_dak_filename = base_filename[:-8]+".dak"
372 for suite in changes["distribution"].keys():
373 copychanges = "Suite::%s::CopyChanges" % (suite)
374 if Cnf.has_key(copychanges) and \
375 os.path.exists(Cnf[copychanges]+"/"+base_filename):
376 reject("%s: a file with this name already exists in %s" \
377 % (base_filename, Cnf[copychanges]))
379 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
380 if Cnf.has_key(copy_dot_dak) and \
381 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
382 reject("%s: a file with this name already exists in %s" \
383 % (dot_dak_filename, Cnf[copy_dot_dak]))
389 cursor = DBConn().cursor()
390 # Check for packages that have moved from one component to another
391 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
392 DBConn().prepare("moved_pkg_q", """
393 PREPARE moved_pkg_q(text,text,text) AS
394 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
395 component c, architecture a, files f
396 WHERE b.package = $1 AND s.suite_name = $2
397 AND (a.arch_string = $3 OR a.arch_string = 'all')
398 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
399 AND f.location = l.id
400 AND l.component = c.id
401 AND b.file = f.id""")
404 # Ensure the file does not already exist in one of the accepted directories
405 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
406 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
407 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
408 reject("%s file already exists in the %s directory." % (f, d))
409 if not re_taint_free.match(f):
410 reject("!!WARNING!! tainted filename: '%s'." % (f))
411 # Check the file is readable
412 if os.access(f, os.R_OK) == 0:
413 # When running in -n, copy_to_holding() won't have
414 # generated the reject_message, so we need to.
415 if Options["No-Action"]:
416 if os.path.exists(f):
417 reject("Can't read `%s'. [permission denied]" % (f))
419 reject("Can't read `%s'. [file not found]" % (f))
420 files[f]["type"] = "unreadable"
422 # If it's byhand skip remaining checks
423 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
424 files[f]["byhand"] = 1
425 files[f]["type"] = "byhand"
426 # Checks for a binary package...
427 elif re_isadeb.match(f):
429 files[f]["type"] = "deb"
431 # Extract package control information
432 deb_file = utils.open_file(f)
434 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
436 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
438 # Can't continue, none of the checks on control would work.
441 # Check for mandantory "Description:"
444 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
446 reject("%s: Missing Description in binary package" % (f))
451 # Check for mandatory fields
452 for field in [ "Package", "Architecture", "Version" ]:
453 if control.Find(field) == None:
454 reject("%s: No %s field in control." % (f, field))
458 # Ensure the package name matches the one give in the .changes
459 if not changes["binary"].has_key(control.Find("Package", "")):
460 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
462 # Validate the package field
463 package = control.Find("Package")
464 if not re_valid_pkg_name.match(package):
465 reject("%s: invalid package name '%s'." % (f, package))
467 # Validate the version field
468 version = control.Find("Version")
469 if not re_valid_version.match(version):
470 reject("%s: invalid version number '%s'." % (f, version))
472 # Ensure the architecture of the .deb is one we know about.
473 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
474 architecture = control.Find("Architecture")
475 upload_suite = changes["distribution"].keys()[0]
476 if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite):
477 reject("Unknown architecture '%s'." % (architecture))
479 # Ensure the architecture of the .deb is one of the ones
480 # listed in the .changes.
481 if not changes["architecture"].has_key(architecture):
482 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
484 # Sanity-check the Depends field
485 depends = control.Find("Depends")
487 reject("%s: Depends field is empty." % (f))
489 # Sanity-check the Provides field
490 provides = control.Find("Provides")
492 provide = re_spacestrip.sub('', provides)
494 reject("%s: Provides field is empty." % (f))
495 prov_list = provide.split(",")
496 for prov in prov_list:
497 if not re_valid_pkg_name.match(prov):
498 reject("%s: Invalid Provides field content %s." % (f, prov))
501 # Check the section & priority match those given in the .changes (non-fatal)
502 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
503 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
504 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
505 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
507 files[f]["package"] = package
508 files[f]["architecture"] = architecture
509 files[f]["version"] = version
510 files[f]["maintainer"] = control.Find("Maintainer", "")
511 if f.endswith(".udeb"):
512 files[f]["dbtype"] = "udeb"
513 elif f.endswith(".deb"):
514 files[f]["dbtype"] = "deb"
516 reject("%s is neither a .deb or a .udeb." % (f))
517 files[f]["source"] = control.Find("Source", files[f]["package"])
518 # Get the source version
519 source = files[f]["source"]
521 if source.find("(") != -1:
522 m = re_extract_src_version.match(source)
524 source_version = m.group(2)
525 if not source_version:
526 source_version = files[f]["version"]
527 files[f]["source package"] = source
528 files[f]["source version"] = source_version
530 # Ensure the filename matches the contents of the .deb
531 m = re_isadeb.match(f)
533 file_package = m.group(1)
534 if files[f]["package"] != file_package:
535 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
536 epochless_version = re_no_epoch.sub('', control.Find("Version"))
538 file_version = m.group(2)
539 if epochless_version != file_version:
540 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
542 file_architecture = m.group(3)
543 if files[f]["architecture"] != file_architecture:
544 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
546 # Check for existent source
547 source_version = files[f]["source version"]
548 source_package = files[f]["source package"]
549 if changes["architecture"].has_key("source"):
550 if source_version != changes["version"]:
551 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
553 # Check in the SQL database
554 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
555 # Check in one of the other directories
556 source_epochless_version = re_no_epoch.sub('', source_version)
557 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
558 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
559 files[f]["byhand"] = 1
560 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
564 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
565 if Cnf.has_key("Dir::Queue::%s" % (myq)):
566 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
569 if not dsc_file_exists:
570 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
571 # Check the version and for file overwrites
572 reject(Upload.check_binary_against_db(f),"")
574 Binary(f, reject).scan_package()
576 # Checks for a source package...
578 m = re_issource.match(f)
581 files[f]["package"] = m.group(1)
582 files[f]["version"] = m.group(2)
583 files[f]["type"] = m.group(3)
585 # Ensure the source package name matches the Source filed in the .changes
586 if changes["source"] != files[f]["package"]:
587 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
589 # Ensure the source version matches the version in the .changes file
590 if files[f]["type"] == "orig.tar.gz":
591 changes_version = changes["chopversion2"]
593 changes_version = changes["chopversion"]
594 if changes_version != files[f]["version"]:
595 reject("%s: should be %s according to changes file." % (f, changes_version))
597 # Ensure the .changes lists source in the Architecture field
598 if not changes["architecture"].has_key("source"):
599 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
601 # Check the signature of a .dsc file
602 if files[f]["type"] == "dsc":
603 dsc["fingerprint"] = utils.check_signature(f, reject)
605 files[f]["architecture"] = "source"
607 # Not a binary or source package? Assume byhand...
609 files[f]["byhand"] = 1
610 files[f]["type"] = "byhand"
612 # Per-suite file checks
613 files[f]["oldfiles"] = {}
614 for suite in changes["distribution"].keys():
616 if files[f].has_key("byhand"):
619 # Handle component mappings
620 for m in Cnf.ValueList("ComponentMappings"):
621 (source, dest) = m.split()
622 if files[f]["component"] == source:
623 files[f]["original component"] = source
624 files[f]["component"] = dest
626 # Ensure the component is valid for the target suite
627 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
628 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
629 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
632 # Validate the component
633 component = files[f]["component"]
634 component_id = DBConn().get_component_id(component)
635 if component_id == -1:
636 reject("file '%s' has unknown component '%s'." % (f, component))
639 # See if the package is NEW
640 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
643 # Validate the priority
644 if files[f]["priority"].find('/') != -1:
645 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
647 # Determine the location
648 location = Cnf["Dir::Pool"]
649 location_id = DBConn().get_location_id(location, component, archive)
650 if location_id == -1:
651 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
652 files[f]["location id"] = location_id
654 # Check the md5sum & size against existing files (if any)
655 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
656 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
658 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
660 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
661 files[f]["files id"] = files_id
663 # Check for packages that have moved from one component to another
664 files[f]['suite'] = suite
665 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
666 ql = cursor.fetchone()
668 files[f]["othercomponents"] = ql[0][0]
670 # If the .changes file says it has source, it must have source.
671 if changes["architecture"].has_key("source"):
673 reject("no source found and Architecture line in changes mention source.")
675 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
676 reject("source only uploads are not supported.")
678 ###############################################################################
683 # Ensure there is source to check
684 if not changes["architecture"].has_key("source"):
689 for f in files.keys():
690 if files[f]["type"] == "dsc":
692 reject("can not process a .changes file with multiple .dsc's.")
697 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
699 reject("source uploads must contain a dsc file")
702 # Parse the .dsc file
704 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
705 except CantOpenError:
706 # if not -n copy_to_holding() will have done this for us...
707 if Options["No-Action"]:
708 reject("%s: can't read file." % (dsc_filename))
709 except ParseChangesError, line:
710 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
711 except InvalidDscError, line:
712 reject("%s: syntax error on line %s." % (dsc_filename, line))
713 except ChangesUnicodeError:
714 reject("%s: dsc file not proper utf-8." % (dsc_filename))
716 # Build up the file list of files mentioned by the .dsc
718 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
719 except NoFilesFieldError:
720 reject("%s: no Files: field." % (dsc_filename))
722 except UnknownFormatError, format:
723 reject("%s: unknown format '%s'." % (dsc_filename, format))
725 except ParseChangesError, line:
726 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
729 # Enforce mandatory fields
730 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
731 if not dsc.has_key(i):
732 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
735 # Validate the source and version fields
736 if not re_valid_pkg_name.match(dsc["source"]):
737 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
738 if not re_valid_version.match(dsc["version"]):
739 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
741 # Bumping the version number of the .dsc breaks extraction by stable's
742 # dpkg-source. So let's not do that...
743 if dsc["format"] != "1.0":
744 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
746 # Validate the Maintainer field
748 utils.fix_maintainer (dsc["maintainer"])
749 except ParseMaintError, msg:
750 reject("%s: Maintainer field ('%s') failed to parse: %s" \
751 % (dsc_filename, dsc["maintainer"], msg))
753 # Validate the build-depends field(s)
754 for field_name in [ "build-depends", "build-depends-indep" ]:
755 field = dsc.get(field_name)
757 # Check for broken dpkg-dev lossage...
758 if field.startswith("ARRAY"):
759 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
761 # Have apt try to parse them...
763 apt_pkg.ParseSrcDepends(field)
765 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
768 # Ensure the version number in the .dsc matches the version number in the .changes
769 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
770 changes_version = files[dsc_filename]["version"]
771 if epochless_dsc_version != files[dsc_filename]["version"]:
772 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
774 # Ensure there is a .tar.gz in the .dsc file
776 for f in dsc_files.keys():
777 m = re_issource.match(f)
779 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
782 if ftype == "orig.tar.gz" or ftype == "tar.gz":
785 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
787 # Ensure source is newer than existing source in target suites
788 reject(Upload.check_source_against_db(dsc_filename),"")
790 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
791 reject(reject_msg, "")
793 if not Options["No-Action"]:
794 copy_to_holding(is_in_incoming)
795 orig_tar_gz = os.path.basename(is_in_incoming)
796 files[orig_tar_gz] = {}
797 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
798 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
799 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
800 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
801 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
802 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
803 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
804 files[orig_tar_gz]["type"] = "orig.tar.gz"
809 ################################################################################
811 def get_changelog_versions(source_dir):
812 """Extracts a the source package and (optionally) grabs the
813 version history out of debian/changelog for the BTS."""
815 # Find the .dsc (again)
817 for f in files.keys():
818 if files[f]["type"] == "dsc":
821 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
825 # Create a symlink mirror of the source files in our temporary directory
826 for f in files.keys():
827 m = re_issource.match(f)
829 src = os.path.join(source_dir, f)
830 # If a file is missing for whatever reason, give up.
831 if not os.path.exists(src):
834 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
836 dest = os.path.join(os.getcwd(), f)
837 os.symlink(src, dest)
839 # If the orig.tar.gz is not a part of the upload, create a symlink to the
842 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
843 os.symlink(pkg.orig_tar_gz, dest)
846 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
847 (result, output) = commands.getstatusoutput(cmd)
849 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
850 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
853 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
856 # Get the upstream version
857 upstr_version = re_no_epoch.sub('', dsc["version"])
858 if re_strip_revision.search(upstr_version):
859 upstr_version = re_strip_revision.sub('', upstr_version)
861 # Ensure the changelog file exists
862 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
863 if not os.path.exists(changelog_filename):
864 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
867 # Parse the changelog
868 dsc["bts changelog"] = ""
869 changelog_file = utils.open_file(changelog_filename)
870 for line in changelog_file.readlines():
871 m = re_changelog_versions.match(line)
873 dsc["bts changelog"] += line
874 changelog_file.close()
876 # Check we found at least one revision in the changelog
877 if not dsc["bts changelog"]:
878 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
880 ########################################
884 # a) there's no source
885 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
886 # or c) the orig.tar.gz is MIA
887 if not changes["architecture"].has_key("source") or reprocess == 2 \
888 or pkg.orig_tar_gz == -1:
891 tmpdir = utils.temp_dirname()
893 # Move into the temporary directory
897 # Get the changelog version history
898 get_changelog_versions(cwd)
900 # Move back and cleanup the temporary tree
903 shutil.rmtree(tmpdir)
905 if errno.errorcode[e.errno] != 'EACCES':
906 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
908 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
909 # We probably have u-r or u-w directories so chmod everything
911 cmd = "chmod -R u+rwx %s" % (tmpdir)
912 result = os.system(cmd)
914 utils.fubar("'%s' failed with result %s." % (cmd, result))
915 shutil.rmtree(tmpdir)
917 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
919 ################################################################################
921 # FIXME: should be a debian specific check called from a hook
923 def check_urgency ():
924 if changes["architecture"].has_key("source"):
925 if not changes.has_key("urgency"):
926 changes["urgency"] = Cnf["Urgency::Default"]
927 changes["urgency"] = changes["urgency"].lower()
928 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
929 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
930 changes["urgency"] = Cnf["Urgency::Default"]
932 ################################################################################
935 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
936 utils.check_size(".changes", files)
937 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
938 utils.check_size(".dsc", dsc_files)
940 # This is stupid API, but it'll have to do for now until
941 # we actually have proper abstraction
942 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
945 ################################################################################
947 # Sanity check the time stamps of files inside debs.
948 # [Files in the near future cause ugly warnings and extreme time
949 # travel can cause errors on extraction]
951 def check_timestamps():
953 def __init__(self, future_cutoff, past_cutoff):
955 self.future_cutoff = future_cutoff
956 self.past_cutoff = past_cutoff
959 self.future_files = {}
960 self.ancient_files = {}
962 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
963 if MTime > self.future_cutoff:
964 self.future_files[Name] = MTime
965 if MTime < self.past_cutoff:
966 self.ancient_files[Name] = MTime
969 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
970 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
971 tar = Tar(future_cutoff, past_cutoff)
972 for filename in files.keys():
973 if files[filename]["type"] == "deb":
976 deb_file = utils.open_file(filename)
977 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
980 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
981 except SystemError, e:
982 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
983 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
986 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
989 future_files = tar.future_files.keys()
991 num_future_files = len(future_files)
992 future_file = future_files[0]
993 future_date = tar.future_files[future_file]
994 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
995 % (filename, num_future_files, future_file,
996 time.ctime(future_date)))
998 ancient_files = tar.ancient_files.keys()
1000 num_ancient_files = len(ancient_files)
1001 ancient_file = ancient_files[0]
1002 ancient_date = tar.ancient_files[ancient_file]
1003 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1004 % (filename, num_ancient_files, ancient_file,
1005 time.ctime(ancient_date)))
1007 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1009 ################################################################################
1011 def lookup_uid_from_fingerprint(fpr):
1013 Return the uid,name,isdm for a given gpg fingerprint
1016 @param fpr: a 40 byte GPG fingerprint
1018 @return: (uid, name, isdm)
1020 cursor = DBConn().cursor()
1021 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
1022 qs = cursor.fetchone()
1026 return (None, None, False)
1028 def check_signed_by_key():
1029 """Ensure the .changes is signed by an authorized uploader."""
1031 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1032 if uid_name == None:
1035 # match claimed name with actual name:
1037 # This is fundamentally broken but need us to refactor how we get
1038 # the UIDs/Fingerprints in order for us to fix it properly
1039 uid, uid_email = changes["fingerprint"], uid
1040 may_nmu, may_sponsor = 1, 1
1041 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1042 # and can't get one in there if we don't allow nmu/sponsorship
1043 elif is_dm is False:
1044 # If is_dm is False, we allow full upload rights
1045 uid_email = "%s@debian.org" % (uid)
1046 may_nmu, may_sponsor = 1, 1
1048 # Assume limited upload rights unless we've discovered otherwise
1050 may_nmu, may_sponsor = 0, 0
1053 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1055 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1057 if uid_name == "": sponsored = 1
1060 if ("source" in changes["architecture"] and
1061 uid_email and utils.is_email_alias(uid_email)):
1062 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1063 if (changes["maintaineremail"] not in sponsor_addresses and
1064 changes["changedbyemail"] not in sponsor_addresses):
1065 changes["sponsoremail"] = uid_email
1067 if sponsored and not may_sponsor:
1068 reject("%s is not authorised to sponsor uploads" % (uid))
1070 cursor = DBConn().cursor()
1071 if not sponsored and not may_nmu:
1073 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1075 highest_sid, highest_version = None, None
1077 should_reject = True
1079 si = cursor.fetchone()
1083 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1085 highest_version = si[1]
1087 if highest_sid == None:
1088 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1091 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1094 m = cursor.fetchone()
1098 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1099 if email == uid_email or name == uid_name:
1103 if should_reject == True:
1104 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1106 for b in changes["binary"].keys():
1107 for suite in changes["distribution"].keys():
1108 suite_id = DBConn().get_suite_id(suite)
1110 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1112 s = cursor.fetchone()
1116 if s[0] != changes["source"]:
1117 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1119 for f in files.keys():
1120 if files[f].has_key("byhand"):
1121 reject("%s may not upload BYHAND file %s" % (uid, f))
1122 if files[f].has_key("new"):
1123 reject("%s may not upload NEW file %s" % (uid, f))
1126 ################################################################################
1127 ################################################################################
1129 # If any file of an upload has a recent mtime then chances are good
1130 # the file is still being uploaded.
1132 def upload_too_new():
1134 # Move back to the original directory to get accurate time stamps
1136 os.chdir(pkg.directory)
1137 file_list = pkg.files.keys()
1138 file_list.extend(pkg.dsc_files.keys())
1139 file_list.append(pkg.changes_file)
1142 last_modified = time.time()-os.path.getmtime(f)
1143 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1151 ################################################################################
1154 # changes["distribution"] may not exist in corner cases
1155 # (e.g. unreadable changes files)
1156 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1157 changes["distribution"] = {}
1159 (summary, short_summary) = Upload.build_summaries()
1161 # q-unapproved hax0ring
1163 "New": { "is": is_new, "process": acknowledge_new },
1164 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1165 "Byhand" : { "is": is_byhand, "process": do_byhand },
1166 "OldStableUpdate" : { "is": is_oldstableupdate,
1167 "process": do_oldstableupdate },
1168 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1169 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1170 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1172 queues = [ "New", "Autobyhand", "Byhand" ]
1173 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1174 queues += [ "Unembargo", "Embargo" ]
1176 queues += [ "OldStableUpdate", "StableUpdate" ]
1178 (prompt, answer) = ("", "XXX")
1179 if Options["No-Action"] or Options["Automatic"]:
1184 if reject_message.find("Rejected") != -1:
1185 if upload_too_new():
1186 print "SKIP (too new)\n" + reject_message,
1187 prompt = "[S]kip, Quit ?"
1189 print "REJECT\n" + reject_message,
1190 prompt = "[R]eject, Skip, Quit ?"
1191 if Options["Automatic"]:
1196 if queue_info[q]["is"]():
1200 print "%s for %s\n%s%s" % (
1201 qu.upper(), ", ".join(changes["distribution"].keys()),
1202 reject_message, summary),
1203 queuekey = qu[0].upper()
1204 if queuekey in "RQSA":
1206 prompt = "[D]ivert, Skip, Quit ?"
1208 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1209 if Options["Automatic"]:
1212 print "ACCEPT\n" + reject_message + summary,
1213 prompt = "[A]ccept, Skip, Quit ?"
1214 if Options["Automatic"]:
1217 while prompt.find(answer) == -1:
1218 answer = utils.our_raw_input(prompt)
1219 m = re_default_answer.match(prompt)
1222 answer = answer[:1].upper()
1225 os.chdir (pkg.directory)
1226 Upload.do_reject(0, reject_message)
1228 accept(summary, short_summary)
1229 remove_from_unchecked()
1230 elif answer == queuekey:
1231 queue_info[qu]["process"](summary, short_summary)
1232 remove_from_unchecked()
1236 def remove_from_unchecked():
1237 os.chdir (pkg.directory)
1238 for f in files.keys():
1240 os.unlink(pkg.changes_file)
1242 ################################################################################
1244 def accept (summary, short_summary):
1245 Upload.accept(summary, short_summary)
1246 Upload.check_override()
1248 ################################################################################
1250 def move_to_dir (dest, perms=0660, changesperms=0664):
1251 utils.move (pkg.changes_file, dest, perms=changesperms)
1252 file_keys = files.keys()
1254 utils.move (f, dest, perms=perms)
1256 ################################################################################
1258 def is_unembargo ():
1259 cursor = DBConn().cursor()
1260 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1261 if cursor.fetchone():
1264 oldcwd = os.getcwd()
1265 os.chdir(Cnf["Dir::Queue::Disembargo"])
1266 disdir = os.getcwd()
1269 if pkg.directory == disdir:
1270 if changes["architecture"].has_key("source"):
1271 if Options["No-Action"]: return 1
1273 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1275 cursor.execute( "COMMIT" )
1280 def queue_unembargo (summary, short_summary):
1281 print "Moving to UNEMBARGOED holding area."
1282 Logger.log(["Moving to unembargoed", pkg.changes_file])
1284 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1285 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1286 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1288 # Check for override disparities
1289 Upload.Subst["__SUMMARY__"] = summary
1290 Upload.check_override()
1292 # Send accept mail, announce to lists, close bugs and check for
1293 # override disparities
1294 if not Cnf["Dinstall::Options::No-Mail"]:
1295 Upload.Subst["__SUITE__"] = ""
1296 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1297 utils.send_mail(mail_message)
1298 Upload.announce(short_summary, 1)
1300 ################################################################################
1303 # if embargoed queues are enabled always embargo
1306 def queue_embargo (summary, short_summary):
1307 print "Moving to EMBARGOED holding area."
1308 Logger.log(["Moving to embargoed", pkg.changes_file])
1310 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1311 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1312 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1314 # Check for override disparities
1315 Upload.Subst["__SUMMARY__"] = summary
1316 Upload.check_override()
1318 # Send accept mail, announce to lists, close bugs and check for
1319 # override disparities
1320 if not Cnf["Dinstall::Options::No-Mail"]:
1321 Upload.Subst["__SUITE__"] = ""
1322 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1323 utils.send_mail(mail_message)
1324 Upload.announce(short_summary, 1)
1326 ################################################################################
1328 def is_stableupdate ():
1329 if not changes["distribution"].has_key("proposed-updates"):
1332 if not changes["architecture"].has_key("source"):
1333 pusuite = DBConn().get_suite_id("proposed-updates")
1334 cursor = DBConn().cursor()
1335 cursor.execute( """SELECT 1 FROM source s
1336 JOIN src_associations sa ON (s.id = sa.source)
1337 WHERE s.source = %(source)s
1338 AND s.version = %(version)s
1339 AND sa.suite = %(suite)s""",
1340 {'source' : changes['source'],
1341 'version' : changes['version'],
1344 if cursor.fetchone():
1345 # source is already in proposed-updates so no need to hold
1350 def do_stableupdate (summary, short_summary):
1351 print "Moving to PROPOSED-UPDATES holding area."
1352 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1354 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1355 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1357 # Check for override disparities
1358 Upload.Subst["__SUMMARY__"] = summary
1359 Upload.check_override()
1361 ################################################################################
1363 def is_oldstableupdate ():
1364 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1367 if not changes["architecture"].has_key("source"):
1368 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1369 cursor = DBConn().cursor()
1370 cursor.execute( """SELECT 1 FROM source s
1371 JOIN src_associations sa ON (s.id = sa.source)
1372 WHERE s.source = %(source)s
1373 AND s.version = %(version)s
1374 AND sa.suite = %(suite)s""",
1375 {'source' : changes['source'],
1376 'version' : changes['version'],
1378 if cursor.fetchone():
1383 def do_oldstableupdate (summary, short_summary):
1384 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1385 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1387 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1388 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1390 # Check for override disparities
1391 Upload.Subst["__SUMMARY__"] = summary
1392 Upload.check_override()
1394 ################################################################################
1396 def is_autobyhand ():
1399 for f in files.keys():
1400 if files[f].has_key("byhand"):
1403 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1404 # don't contain underscores, and ARCH doesn't contain dots.
1405 # further VER matches the .changes Version:, and ARCH should be in
1406 # the .changes Architecture: list.
1407 if f.count("_") < 2:
1411 (pckg, ver, archext) = f.split("_", 2)
1412 if archext.count(".") < 1 or changes["version"] != ver:
1416 ABH = Cnf.SubTree("AutomaticByHandPackages")
1417 if not ABH.has_key(pckg) or \
1418 ABH["%s::Source" % (pckg)] != changes["source"]:
1419 print "not match %s %s" % (pckg, changes["source"])
1423 (arch, ext) = archext.split(".", 1)
1424 if arch not in changes["architecture"]:
1428 files[f]["byhand-arch"] = arch
1429 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1431 return any_auto and all_auto
1433 def do_autobyhand (summary, short_summary):
1434 print "Attempting AUTOBYHAND."
1436 for f in files.keys():
1438 if not files[f].has_key("byhand"):
1440 if not files[f].has_key("byhand-script"):
1444 os.system("ls -l %s" % byhandfile)
1445 result = os.system("%s %s %s %s %s" % (
1446 files[f]["byhand-script"], byhandfile,
1447 changes["version"], files[f]["byhand-arch"],
1448 os.path.abspath(pkg.changes_file)))
1450 os.unlink(byhandfile)
1453 print "Error processing %s, left as byhand." % (f)
1457 do_byhand(summary, short_summary)
1459 accept(summary, short_summary)
1461 ################################################################################
1464 for f in files.keys():
1465 if files[f].has_key("byhand"):
1469 def do_byhand (summary, short_summary):
1470 print "Moving to BYHAND holding area."
1471 Logger.log(["Moving to byhand", pkg.changes_file])
1473 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1474 move_to_dir(Cnf["Dir::Queue::Byhand"])
1476 # Check for override disparities
1477 Upload.Subst["__SUMMARY__"] = summary
1478 Upload.check_override()
1480 ################################################################################
1483 for f in files.keys():
1484 if files[f].has_key("new"):
1488 def acknowledge_new (summary, short_summary):
1489 Subst = Upload.Subst
1491 print "Moving to NEW holding area."
1492 Logger.log(["Moving to new", pkg.changes_file])
1494 Upload.dump_vars(Cnf["Dir::Queue::New"])
1495 move_to_dir(Cnf["Dir::Queue::New"], perms=0640, changesperms=0644)
1497 if not Options["No-Mail"]:
1498 print "Sending new ack."
1499 Subst["__SUMMARY__"] = summary
1500 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1501 utils.send_mail(new_ack_message)
1503 ################################################################################
1505 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1506 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1507 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1508 # not have processed it during it's checks of -2. If -1 has been
1509 # deleted or otherwise not checked by 'dak process-unchecked', the
1510 # .orig.tar.gz will not have been checked at all. To get round this,
1511 # we force the .orig.tar.gz into the .changes structure and reprocess
1512 # the .changes file.
1514 def process_it (changes_file):
1515 global reprocess, reject_message
1517 # Reset some globals
1520 # Some defaults in case we can't fully process the .changes file
1521 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1522 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1525 # Absolutize the filename to avoid the requirement of being in the
1526 # same directory as the .changes file.
1527 pkg.changes_file = os.path.abspath(changes_file)
1529 # Remember where we are so we can come back after cd-ing into the
1530 # holding directory.
1531 pkg.directory = os.getcwd()
1534 # If this is the Real Thing(tm), copy things into a private
1535 # holding directory first to avoid replacable file races.
1536 if not Options["No-Action"]:
1537 os.chdir(Cnf["Dir::Queue::Holding"])
1538 copy_to_holding(pkg.changes_file)
1539 # Relativize the filename so we use the copy in holding
1540 # rather than the original...
1541 pkg.changes_file = os.path.basename(pkg.changes_file)
1542 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1543 if changes["fingerprint"]:
1544 valid_changes_p = check_changes()
1549 check_distributions()
1551 valid_dsc_p = check_dsc()
1557 check_signed_by_key()
1558 Upload.update_subst(reject_message)
1564 traceback.print_exc(file=sys.stderr)
1567 # Restore previous WD
1568 os.chdir(pkg.directory)
1570 ###############################################################################
1573 global Cnf, Options, Logger
1575 changes_files = init()
1577 # -n/--dry-run invalidates some other options which would involve things happening
1578 if Options["No-Action"]:
1579 Options["Automatic"] = ""
1581 # Ensure all the arguments we were given are .changes files
1582 for f in changes_files:
1583 if not f.endswith(".changes"):
1584 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1585 changes_files.remove(f)
1587 if changes_files == []:
1588 utils.fubar("Need at least one .changes file as an argument.")
1590 # Check that we aren't going to clash with the daily cron job
1592 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1593 utils.fubar("Archive maintenance in progress. Try again later.")
1595 # Obtain lock if not in no-action mode and initialize the log
1597 if not Options["No-Action"]:
1598 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1600 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1602 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1603 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1606 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1608 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1609 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1610 if Cnf.has_key("Dinstall::Bcc"):
1611 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1613 Upload.Subst["__BCC__"] = bcc
1616 # Sort the .changes files so that we process sourceful ones first
1617 changes_files.sort(utils.changes_compare)
1619 # Process the changes files
1620 for changes_file in changes_files:
1621 print "\n" + changes_file
1623 process_it (changes_file)
1625 if not Options["No-Action"]:
1628 accept_count = Upload.accept_count
1629 accept_bytes = Upload.accept_bytes
1632 if accept_count > 1:
1634 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1635 Logger.log(["total",accept_count,accept_bytes])
1637 if not Options["No-Action"]:
1640 ################################################################################
1642 if __name__ == '__main__':