3 """ Checks Debian packages from Incoming """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Originally based on dinstall by Guy Maor <maor@debian.org>
22 ################################################################################
24 # Computer games don't affect kids. I mean if Pacman affected our generation as
25 # kids, we'd all run around in a darkened room munching pills and listening to
29 ################################################################################
43 import apt_inst, apt_pkg
44 from debian_bundle import deb822
45 from daklib.dbconn import DBConn
46 from daklib.binary import Binary
47 from daklib import logging
48 from daklib import queue
49 from daklib import utils
50 from daklib.dak_exceptions import *
51 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
52 re_strip_revision, re_strip_srcver, re_spacestrip, \
53 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
54 re_isadeb, re_extract_src_version, re_issource, re_default_answer
58 ################################################################################
61 ################################################################################
72 # Aliases to the real vars in the Upload class; hysterical raisins.
80 ###############################################################################
83 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
87 Cnf = apt_pkg.newConfiguration()
88 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
90 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
91 ('h',"help","Dinstall::Options::Help"),
92 ('n',"no-action","Dinstall::Options::No-Action"),
93 ('p',"no-lock", "Dinstall::Options::No-Lock"),
94 ('s',"no-mail", "Dinstall::Options::No-Mail")]
96 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
97 "override-distribution", "version"]:
98 Cnf["Dinstall::Options::%s" % (i)] = ""
100 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
101 Options = Cnf.SubTree("Dinstall::Options")
106 Upload = queue.Upload(Cnf)
108 changes = Upload.pkg.changes
110 dsc_files = Upload.pkg.dsc_files
111 files = Upload.pkg.files
116 ################################################################################
118 def usage (exit_code=0):
119 print """Usage: dinstall [OPTION]... [CHANGES]...
120 -a, --automatic automatic run
121 -h, --help show this help and exit.
122 -n, --no-action don't do anything
123 -p, --no-lock don't check lockfile !! for cron.daily only !!
124 -s, --no-mail don't send any mail
125 -V, --version display the version number and exit"""
128 ################################################################################
130 def reject (str, prefix="Rejected: "):
131 global reject_message
133 reject_message += prefix + str + "\n"
135 ################################################################################
139 Create a temporary directory that can be used for unpacking files into for
142 tmpdir = tempfile.mkdtemp()
145 ################################################################################
147 def copy_to_holding(filename):
150 base_filename = os.path.basename(filename)
152 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
154 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
157 # Shouldn't happen, but will if, for example, someone lists a
158 # file twice in the .changes.
159 if errno.errorcode[e.errno] == 'EEXIST':
160 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
165 shutil.copy(filename, dest)
167 # In either case (ENOENT or EACCES) we want to remove the
168 # O_CREAT | O_EXCLed ghost file, so add the file to the list
169 # of 'in holding' even if it's not the real file.
170 if errno.errorcode[e.errno] == 'ENOENT':
171 reject("%s: can not copy to holding area: file not found." % (base_filename))
174 elif errno.errorcode[e.errno] == 'EACCES':
175 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
180 in_holding[base_filename] = ""
182 ################################################################################
188 os.chdir(Cnf["Dir::Queue::Holding"])
189 for f in in_holding.keys():
190 if os.path.exists(f):
191 if f.find('/') != -1:
192 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
198 ################################################################################
201 filename = pkg.changes_file
203 # Parse the .changes field into a dictionary
205 changes.update(utils.parse_changes(filename))
206 except CantOpenError:
207 reject("%s: can't read file." % (filename))
209 except ParseChangesError, line:
210 reject("%s: parse error, can't grok: %s." % (filename, line))
213 # Parse the Files field from the .changes into another dictionary
215 files.update(utils.build_file_list(changes))
216 except ParseChangesError, line:
217 reject("%s: parse error, can't grok: %s." % (filename, line))
218 except UnknownFormatError, format:
219 reject("%s: unknown format '%s'." % (filename, format))
222 # Check for mandatory fields
223 for i in ("source", "binary", "architecture", "version", "distribution",
224 "maintainer", "files", "changes", "description"):
225 if not changes.has_key(i):
226 reject("%s: Missing mandatory field `%s'." % (filename, i))
227 return 0 # Avoid <undef> errors during later tests
229 # Strip a source version in brackets from the source field
230 if re_strip_srcver.search(changes["source"]):
231 changes["source"] = re_strip_srcver.sub('', changes["source"])
233 # Ensure the source field is a valid package name.
234 if not re_valid_pkg_name.match(changes["source"]):
235 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
237 # Split multi-value fields into a lower-level dictionary
238 for i in ("architecture", "distribution", "binary", "closes"):
239 o = changes.get(i, "")
246 # Fix the Maintainer: field to be RFC822/2047 compatible
248 (changes["maintainer822"], changes["maintainer2047"],
249 changes["maintainername"], changes["maintaineremail"]) = \
250 utils.fix_maintainer (changes["maintainer"])
251 except ParseMaintError, msg:
252 reject("%s: Maintainer field ('%s') failed to parse: %s" \
253 % (filename, changes["maintainer"], msg))
255 # ...likewise for the Changed-By: field if it exists.
257 (changes["changedby822"], changes["changedby2047"],
258 changes["changedbyname"], changes["changedbyemail"]) = \
259 utils.fix_maintainer (changes.get("changed-by", ""))
260 except ParseMaintError, msg:
261 (changes["changedby822"], changes["changedby2047"],
262 changes["changedbyname"], changes["changedbyemail"]) = \
264 reject("%s: Changed-By field ('%s') failed to parse: %s" \
265 % (filename, changes["changed-by"], msg))
267 # Ensure all the values in Closes: are numbers
268 if changes.has_key("closes"):
269 for i in changes["closes"].keys():
270 if re_isanum.match (i) == None:
271 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
274 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
275 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
276 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
278 # Check there isn't already a changes file of the same name in one
279 # of the queue directories.
280 base_filename = os.path.basename(filename)
281 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
282 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
283 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
285 # Check the .changes is non-empty
287 reject("%s: nothing to do (Files field is empty)." % (base_filename))
292 ################################################################################
294 def check_distributions():
295 "Check and map the Distribution field of a .changes file."
297 # Handle suite mappings
298 for m in Cnf.ValueList("SuiteMappings"):
301 if mtype == "map" or mtype == "silent-map":
302 (source, dest) = args[1:3]
303 if changes["distribution"].has_key(source):
304 del changes["distribution"][source]
305 changes["distribution"][dest] = 1
306 if mtype != "silent-map":
307 reject("Mapping %s to %s." % (source, dest),"")
308 if changes.has_key("distribution-version"):
309 if changes["distribution-version"].has_key(source):
310 changes["distribution-version"][source]=dest
311 elif mtype == "map-unreleased":
312 (source, dest) = args[1:3]
313 if changes["distribution"].has_key(source):
314 for arch in changes["architecture"].keys():
315 if arch not in database.get_suite_architectures(source):
316 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
317 del changes["distribution"][source]
318 changes["distribution"][dest] = 1
320 elif mtype == "ignore":
322 if changes["distribution"].has_key(suite):
323 del changes["distribution"][suite]
324 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
325 elif mtype == "reject":
327 if changes["distribution"].has_key(suite):
328 reject("Uploads to %s are not accepted." % (suite))
329 elif mtype == "propup-version":
330 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
332 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
333 if changes["distribution"].has_key(args[1]):
334 changes.setdefault("distribution-version", {})
335 for suite in args[2:]: changes["distribution-version"][suite]=suite
337 # Ensure there is (still) a target distribution
338 if changes["distribution"].keys() == []:
339 reject("no valid distribution.")
341 # Ensure target distributions exist
342 for suite in changes["distribution"].keys():
343 if not Cnf.has_key("Suite::%s" % (suite)):
344 reject("Unknown distribution `%s'." % (suite))
346 ################################################################################
351 archive = utils.where_am_i()
352 file_keys = files.keys()
354 # if reprocess is 2 we've already done this and we're checking
355 # things again for the new .orig.tar.gz.
356 # [Yes, I'm fully aware of how disgusting this is]
357 if not Options["No-Action"] and reprocess < 2:
359 os.chdir(pkg.directory)
364 # Check there isn't already a .changes or .dak file of the same name in
365 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
366 # [NB: this check must be done post-suite mapping]
367 base_filename = os.path.basename(pkg.changes_file)
368 dot_dak_filename = base_filename[:-8]+".dak"
369 for suite in changes["distribution"].keys():
370 copychanges = "Suite::%s::CopyChanges" % (suite)
371 if Cnf.has_key(copychanges) and \
372 os.path.exists(Cnf[copychanges]+"/"+base_filename):
373 reject("%s: a file with this name already exists in %s" \
374 % (base_filename, Cnf[copychanges]))
376 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
377 if Cnf.has_key(copy_dot_dak) and \
378 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
379 reject("%s: a file with this name already exists in %s" \
380 % (dot_dak_filename, Cnf[copy_dot_dak]))
386 cursor = DBConn().cursor()
387 # Check for packages that have moved from one component to another
388 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
389 cursor.execute("""PREPARE moved_pkg_q AS
390 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
391 component c, architecture a, files f
392 WHERE b.package = $1 AND s.suite_name = $2
393 AND (a.arch_string = $3 OR a.arch_string = 'all')
394 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
395 AND f.location = l.id
396 AND l.component = c.id
397 AND b.file = f.id""")
400 # Ensure the file does not already exist in one of the accepted directories
401 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
402 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
403 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
404 reject("%s file already exists in the %s directory." % (f, d))
405 if not re_taint_free.match(f):
406 reject("!!WARNING!! tainted filename: '%s'." % (f))
407 # Check the file is readable
408 if os.access(f, os.R_OK) == 0:
409 # When running in -n, copy_to_holding() won't have
410 # generated the reject_message, so we need to.
411 if Options["No-Action"]:
412 if os.path.exists(f):
413 reject("Can't read `%s'. [permission denied]" % (f))
415 reject("Can't read `%s'. [file not found]" % (f))
416 files[f]["type"] = "unreadable"
418 # If it's byhand skip remaining checks
419 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
420 files[f]["byhand"] = 1
421 files[f]["type"] = "byhand"
422 # Checks for a binary package...
423 elif re_isadeb.match(f):
425 files[f]["type"] = "deb"
427 # Extract package control information
428 deb_file = utils.open_file(f)
430 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
432 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
434 # Can't continue, none of the checks on control would work.
438 # Check for mandatory fields
439 for field in [ "Package", "Architecture", "Version" ]:
440 if control.Find(field) == None:
441 reject("%s: No %s field in control." % (f, field))
445 # Ensure the package name matches the one give in the .changes
446 if not changes["binary"].has_key(control.Find("Package", "")):
447 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
449 # Validate the package field
450 package = control.Find("Package")
451 if not re_valid_pkg_name.match(package):
452 reject("%s: invalid package name '%s'." % (f, package))
454 # Validate the version field
455 version = control.Find("Version")
456 if not re_valid_version.match(version):
457 reject("%s: invalid version number '%s'." % (f, version))
459 # Ensure the architecture of the .deb is one we know about.
460 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
461 architecture = control.Find("Architecture")
462 upload_suite = changes["distribution"].keys()[0]
463 if architecture not in database.get_suite_architectures(default_suite) and architecture not in database.get_suite_architectures(upload_suite):
464 reject("Unknown architecture '%s'." % (architecture))
466 # Ensure the architecture of the .deb is one of the ones
467 # listed in the .changes.
468 if not changes["architecture"].has_key(architecture):
469 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
471 # Sanity-check the Depends field
472 depends = control.Find("Depends")
474 reject("%s: Depends field is empty." % (f))
476 # Sanity-check the Provides field
477 provides = control.Find("Provides")
479 provide = re_spacestrip.sub('', provides)
481 reject("%s: Provides field is empty." % (f))
482 prov_list = provide.split(",")
483 for prov in prov_list:
484 if not re_valid_pkg_name.match(prov):
485 reject("%s: Invalid Provides field content %s." % (f, prov))
488 # Check the section & priority match those given in the .changes (non-fatal)
489 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
490 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
491 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
492 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
494 files[f]["package"] = package
495 files[f]["architecture"] = architecture
496 files[f]["version"] = version
497 files[f]["maintainer"] = control.Find("Maintainer", "")
498 if f.endswith(".udeb"):
499 files[f]["dbtype"] = "udeb"
500 elif f.endswith(".deb"):
501 files[f]["dbtype"] = "deb"
503 reject("%s is neither a .deb or a .udeb." % (f))
504 files[f]["source"] = control.Find("Source", files[f]["package"])
505 # Get the source version
506 source = files[f]["source"]
508 if source.find("(") != -1:
509 m = re_extract_src_version.match(source)
511 source_version = m.group(2)
512 if not source_version:
513 source_version = files[f]["version"]
514 files[f]["source package"] = source
515 files[f]["source version"] = source_version
517 # Ensure the filename matches the contents of the .deb
518 m = re_isadeb.match(f)
520 file_package = m.group(1)
521 if files[f]["package"] != file_package:
522 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
523 epochless_version = re_no_epoch.sub('', control.Find("Version"))
525 file_version = m.group(2)
526 if epochless_version != file_version:
527 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
529 file_architecture = m.group(3)
530 if files[f]["architecture"] != file_architecture:
531 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
533 # Check for existent source
534 source_version = files[f]["source version"]
535 source_package = files[f]["source package"]
536 if changes["architecture"].has_key("source"):
537 if source_version != changes["version"]:
538 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
540 # Check in the SQL database
541 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
542 # Check in one of the other directories
543 source_epochless_version = re_no_epoch.sub('', source_version)
544 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
545 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
546 files[f]["byhand"] = 1
547 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
551 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
552 if Cnf.has_key("Dir::Queue::%s" % (myq)):
553 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
556 if not dsc_file_exists:
557 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
558 # Check the version and for file overwrites
559 reject(Upload.check_binary_against_db(f),"")
561 Binary(f).scan_package()
563 # Checks for a source package...
565 m = re_issource.match(f)
568 files[f]["package"] = m.group(1)
569 files[f]["version"] = m.group(2)
570 files[f]["type"] = m.group(3)
572 # Ensure the source package name matches the Source filed in the .changes
573 if changes["source"] != files[f]["package"]:
574 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
576 # Ensure the source version matches the version in the .changes file
577 if files[f]["type"] == "orig.tar.gz":
578 changes_version = changes["chopversion2"]
580 changes_version = changes["chopversion"]
581 if changes_version != files[f]["version"]:
582 reject("%s: should be %s according to changes file." % (f, changes_version))
584 # Ensure the .changes lists source in the Architecture field
585 if not changes["architecture"].has_key("source"):
586 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
588 # Check the signature of a .dsc file
589 if files[f]["type"] == "dsc":
590 dsc["fingerprint"] = utils.check_signature(f, reject)
592 files[f]["architecture"] = "source"
594 # Not a binary or source package? Assume byhand...
596 files[f]["byhand"] = 1
597 files[f]["type"] = "byhand"
599 # Per-suite file checks
600 files[f]["oldfiles"] = {}
601 for suite in changes["distribution"].keys():
603 if files[f].has_key("byhand"):
606 # Handle component mappings
607 for m in Cnf.ValueList("ComponentMappings"):
608 (source, dest) = m.split()
609 if files[f]["component"] == source:
610 files[f]["original component"] = source
611 files[f]["component"] = dest
613 # Ensure the component is valid for the target suite
614 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
615 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
616 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
619 # Validate the component
620 component = files[f]["component"]
621 component_id = DBConn().get_component_id(component)
622 if component_id == -1:
623 reject("file '%s' has unknown component '%s'." % (f, component))
626 # See if the package is NEW
627 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
630 # Validate the priority
631 if files[f]["priority"].find('/') != -1:
632 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
634 # Determine the location
635 location = Cnf["Dir::Pool"]
636 location_id = DBConn().get_location_id(location, component, archive)
637 if location_id == -1:
638 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
639 files[f]["location id"] = location_id
641 # Check the md5sum & size against existing files (if any)
642 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
643 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
645 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
647 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
648 files[f]["files id"] = files_id
650 # Check for packages that have moved from one component to another
651 files[f]['suite'] = suite
652 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
653 ql = cursor.fetchone()
655 files[f]["othercomponents"] = ql[0][0]
657 # If the .changes file says it has source, it must have source.
658 if changes["architecture"].has_key("source"):
660 reject("no source found and Architecture line in changes mention source.")
662 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
663 reject("source only uploads are not supported.")
665 ###############################################################################
670 # Ensure there is source to check
671 if not changes["architecture"].has_key("source"):
676 for f in files.keys():
677 if files[f]["type"] == "dsc":
679 reject("can not process a .changes file with multiple .dsc's.")
684 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
686 reject("source uploads must contain a dsc file")
689 # Parse the .dsc file
691 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
692 except CantOpenError:
693 # if not -n copy_to_holding() will have done this for us...
694 if Options["No-Action"]:
695 reject("%s: can't read file." % (dsc_filename))
696 except ParseChangesError, line:
697 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
698 except InvalidDscError, line:
699 reject("%s: syntax error on line %s." % (dsc_filename, line))
700 # Build up the file list of files mentioned by the .dsc
702 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
703 except NoFilesFieldError:
704 reject("%s: no Files: field." % (dsc_filename))
706 except UnknownFormatError, format:
707 reject("%s: unknown format '%s'." % (dsc_filename, format))
709 except ParseChangesError, line:
710 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
713 # Enforce mandatory fields
714 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
715 if not dsc.has_key(i):
716 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
719 # Validate the source and version fields
720 if not re_valid_pkg_name.match(dsc["source"]):
721 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
722 if not re_valid_version.match(dsc["version"]):
723 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
725 # Bumping the version number of the .dsc breaks extraction by stable's
726 # dpkg-source. So let's not do that...
727 if dsc["format"] != "1.0":
728 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
730 # Validate the Maintainer field
732 utils.fix_maintainer (dsc["maintainer"])
733 except ParseMaintError, msg:
734 reject("%s: Maintainer field ('%s') failed to parse: %s" \
735 % (dsc_filename, dsc["maintainer"], msg))
737 # Validate the build-depends field(s)
738 for field_name in [ "build-depends", "build-depends-indep" ]:
739 field = dsc.get(field_name)
741 # Check for broken dpkg-dev lossage...
742 if field.startswith("ARRAY"):
743 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
745 # Have apt try to parse them...
747 apt_pkg.ParseSrcDepends(field)
749 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
752 # Ensure the version number in the .dsc matches the version number in the .changes
753 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
754 changes_version = files[dsc_filename]["version"]
755 if epochless_dsc_version != files[dsc_filename]["version"]:
756 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
758 # Ensure there is a .tar.gz in the .dsc file
760 for f in dsc_files.keys():
761 m = re_issource.match(f)
763 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
766 if ftype == "orig.tar.gz" or ftype == "tar.gz":
769 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
771 # Ensure source is newer than existing source in target suites
772 reject(Upload.check_source_against_db(dsc_filename),"")
774 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
775 reject(reject_msg, "")
777 if not Options["No-Action"]:
778 copy_to_holding(is_in_incoming)
779 orig_tar_gz = os.path.basename(is_in_incoming)
780 files[orig_tar_gz] = {}
781 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
782 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
783 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
784 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
785 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
786 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
787 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
788 files[orig_tar_gz]["type"] = "orig.tar.gz"
793 ################################################################################
795 def get_changelog_versions(source_dir):
796 """Extracts a the source package and (optionally) grabs the
797 version history out of debian/changelog for the BTS."""
799 # Find the .dsc (again)
801 for f in files.keys():
802 if files[f]["type"] == "dsc":
805 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
809 # Create a symlink mirror of the source files in our temporary directory
810 for f in files.keys():
811 m = re_issource.match(f)
813 src = os.path.join(source_dir, f)
814 # If a file is missing for whatever reason, give up.
815 if not os.path.exists(src):
818 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
820 dest = os.path.join(os.getcwd(), f)
821 os.symlink(src, dest)
823 # If the orig.tar.gz is not a part of the upload, create a symlink to the
826 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
827 os.symlink(pkg.orig_tar_gz, dest)
830 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
831 (result, output) = commands.getstatusoutput(cmd)
833 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
834 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
837 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
840 # Get the upstream version
841 upstr_version = re_no_epoch.sub('', dsc["version"])
842 if re_strip_revision.search(upstr_version):
843 upstr_version = re_strip_revision.sub('', upstr_version)
845 # Ensure the changelog file exists
846 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
847 if not os.path.exists(changelog_filename):
848 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
851 # Parse the changelog
852 dsc["bts changelog"] = ""
853 changelog_file = utils.open_file(changelog_filename)
854 for line in changelog_file.readlines():
855 m = re_changelog_versions.match(line)
857 dsc["bts changelog"] += line
858 changelog_file.close()
860 # Check we found at least one revision in the changelog
861 if not dsc["bts changelog"]:
862 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
864 ########################################
868 # a) there's no source
869 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
870 # or c) the orig.tar.gz is MIA
871 if not changes["architecture"].has_key("source") or reprocess == 2 \
872 or pkg.orig_tar_gz == -1:
875 tmpdir = create_tmpdir()
877 # Move into the temporary directory
881 # Get the changelog version history
882 get_changelog_versions(cwd)
884 # Move back and cleanup the temporary tree
887 shutil.rmtree(tmpdir)
889 if errno.errorcode[e.errno] != 'EACCES':
890 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
892 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
893 # We probably have u-r or u-w directories so chmod everything
895 cmd = "chmod -R u+rwx %s" % (tmpdir)
896 result = os.system(cmd)
898 utils.fubar("'%s' failed with result %s." % (cmd, result))
899 shutil.rmtree(tmpdir)
901 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
903 ################################################################################
905 # FIXME: should be a debian specific check called from a hook
907 def check_urgency ():
908 if changes["architecture"].has_key("source"):
909 if not changes.has_key("urgency"):
910 changes["urgency"] = Cnf["Urgency::Default"]
911 changes["urgency"] = changes["urgency"].lower()
912 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
913 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
914 changes["urgency"] = Cnf["Urgency::Default"]
916 ################################################################################
919 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
920 utils.check_size(".changes", files)
921 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
922 utils.check_size(".dsc", dsc_files)
924 # This is stupid API, but it'll have to do for now until
925 # we actually have proper abstraction
926 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
929 ################################################################################
931 # Sanity check the time stamps of files inside debs.
932 # [Files in the near future cause ugly warnings and extreme time
933 # travel can cause errors on extraction]
935 def check_timestamps():
937 def __init__(self, future_cutoff, past_cutoff):
939 self.future_cutoff = future_cutoff
940 self.past_cutoff = past_cutoff
943 self.future_files = {}
944 self.ancient_files = {}
946 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
947 if MTime > self.future_cutoff:
948 self.future_files[Name] = MTime
949 if MTime < self.past_cutoff:
950 self.ancient_files[Name] = MTime
953 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
954 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
955 tar = Tar(future_cutoff, past_cutoff)
956 for filename in files.keys():
957 if files[filename]["type"] == "deb":
960 deb_file = utils.open_file(filename)
961 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
964 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
965 except SystemError, e:
966 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
967 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
970 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
973 future_files = tar.future_files.keys()
975 num_future_files = len(future_files)
976 future_file = future_files[0]
977 future_date = tar.future_files[future_file]
978 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
979 % (filename, num_future_files, future_file,
980 time.ctime(future_date)))
982 ancient_files = tar.ancient_files.keys()
984 num_ancient_files = len(ancient_files)
985 ancient_file = ancient_files[0]
986 ancient_date = tar.ancient_files[ancient_file]
987 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
988 % (filename, num_ancient_files, ancient_file,
989 time.ctime(ancient_date)))
991 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
993 ################################################################################
995 def lookup_uid_from_fingerprint(fpr):
997 Return the uid,name,isdm for a given gpg fingerprint
1000 @param fpr: a 40 byte GPG fingerprint
1002 @return (uid, name, isdm)
1004 cursor = DBConn().cursor()
1005 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
1006 qs = cursor.fetchone()
1010 return (None, None, None)
1012 def check_signed_by_key():
1013 """Ensure the .changes is signed by an authorized uploader."""
1015 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1016 if uid_name == None:
1019 # match claimed name with actual name:
1021 uid, uid_email = changes["fingerprint"], uid
1022 may_nmu, may_sponsor = 1, 1
1023 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1024 # and can't get one in there if we don't allow nmu/sponsorship
1027 may_nmu, may_sponsor = 0, 0
1029 uid_email = "%s@debian.org" % (uid)
1030 may_nmu, may_sponsor = 1, 1
1032 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1034 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1036 if uid_name == "": sponsored = 1
1039 if ("source" in changes["architecture"] and
1040 uid_email and utils.is_email_alias(uid_email)):
1041 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1042 if (changes["maintaineremail"] not in sponsor_addresses and
1043 changes["changedbyemail"] not in sponsor_addresses):
1044 changes["sponsoremail"] = uid_email
1046 if sponsored and not may_sponsor:
1047 reject("%s is not authorised to sponsor uploads" % (uid))
1049 if not sponsored and not may_nmu:
1051 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1053 highest_sid, highest_version = None, None
1055 should_reject = True
1057 si = cursor.fetchone()
1061 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1063 highest_version = si[1]
1065 if highest_sid == None:
1066 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1069 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1072 m = cursor.fetchone()
1076 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1077 if email == uid_email or name == uid_name:
1081 if should_reject == True:
1082 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1084 for b in changes["binary"].keys():
1085 for suite in changes["distribution"].keys():
1086 suite_id = DBConn().get_suite_id(suite)
1088 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1090 s = cursor.fetchone()
1094 if s[0] != changes["source"]:
1095 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1097 for f in files.keys():
1098 if files[f].has_key("byhand"):
1099 reject("%s may not upload BYHAND file %s" % (uid, f))
1100 if files[f].has_key("new"):
1101 reject("%s may not upload NEW file %s" % (uid, f))
1104 ################################################################################
1105 ################################################################################
1107 # If any file of an upload has a recent mtime then chances are good
1108 # the file is still being uploaded.
1110 def upload_too_new():
1112 # Move back to the original directory to get accurate time stamps
1114 os.chdir(pkg.directory)
1115 file_list = pkg.files.keys()
1116 file_list.extend(pkg.dsc_files.keys())
1117 file_list.append(pkg.changes_file)
1120 last_modified = time.time()-os.path.getmtime(f)
1121 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1129 ################################################################################
1132 # changes["distribution"] may not exist in corner cases
1133 # (e.g. unreadable changes files)
1134 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1135 changes["distribution"] = {}
1137 (summary, short_summary) = Upload.build_summaries()
1139 # q-unapproved hax0ring
1141 "New": { "is": is_new, "process": acknowledge_new },
1142 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1143 "Byhand" : { "is": is_byhand, "process": do_byhand },
1144 "OldStableUpdate" : { "is": is_oldstableupdate,
1145 "process": do_oldstableupdate },
1146 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1147 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1148 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1150 queues = [ "New", "Autobyhand", "Byhand" ]
1151 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1152 queues += [ "Unembargo", "Embargo" ]
1154 queues += [ "OldStableUpdate", "StableUpdate" ]
1156 (prompt, answer) = ("", "XXX")
1157 if Options["No-Action"] or Options["Automatic"]:
1162 if reject_message.find("Rejected") != -1:
1163 if upload_too_new():
1164 print "SKIP (too new)\n" + reject_message,
1165 prompt = "[S]kip, Quit ?"
1167 print "REJECT\n" + reject_message,
1168 prompt = "[R]eject, Skip, Quit ?"
1169 if Options["Automatic"]:
1174 if queue_info[q]["is"]():
1178 print "%s for %s\n%s%s" % (
1179 qu.upper(), ", ".join(changes["distribution"].keys()),
1180 reject_message, summary),
1181 queuekey = qu[0].upper()
1182 if queuekey in "RQSA":
1184 prompt = "[D]ivert, Skip, Quit ?"
1186 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1187 if Options["Automatic"]:
1190 print "ACCEPT\n" + reject_message + summary,
1191 prompt = "[A]ccept, Skip, Quit ?"
1192 if Options["Automatic"]:
1195 while prompt.find(answer) == -1:
1196 answer = utils.our_raw_input(prompt)
1197 m = re_default_answer.match(prompt)
1200 answer = answer[:1].upper()
1203 os.chdir (pkg.directory)
1204 Upload.do_reject(0, reject_message)
1206 accept(summary, short_summary)
1207 remove_from_unchecked()
1208 elif answer == queuekey:
1209 queue_info[qu]["process"](summary, short_summary)
1210 remove_from_unchecked()
1214 def remove_from_unchecked():
1215 os.chdir (pkg.directory)
1216 for f in files.keys():
1218 os.unlink(pkg.changes_file)
1220 ################################################################################
1222 def accept (summary, short_summary):
1223 Upload.accept(summary, short_summary)
1224 Upload.check_override()
1226 ################################################################################
1228 def move_to_dir (dest, perms=0660, changesperms=0664):
1229 utils.move (pkg.changes_file, dest, perms=changesperms)
1230 file_keys = files.keys()
1232 utils.move (f, dest, perms=perms)
1234 ################################################################################
1236 def is_unembargo ():
1237 cursor = DBConn().cursor()
1238 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1239 if cursor.fetchone():
1242 oldcwd = os.getcwd()
1243 os.chdir(Cnf["Dir::Queue::Disembargo"])
1244 disdir = os.getcwd()
1247 if pkg.directory == disdir:
1248 if changes["architecture"].has_key("source"):
1249 if Options["No-Action"]: return 1
1251 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1253 cursor.execute( "COMMIT" )
1258 def queue_unembargo (summary, short_summary):
1259 print "Moving to UNEMBARGOED holding area."
1260 Logger.log(["Moving to unembargoed", pkg.changes_file])
1262 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1263 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1264 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1266 # Check for override disparities
1267 Upload.Subst["__SUMMARY__"] = summary
1268 Upload.check_override()
1270 # Send accept mail, announce to lists, close bugs and check for
1271 # override disparities
1272 if not Cnf["Dinstall::Options::No-Mail"]:
1273 Upload.Subst["__SUITE__"] = ""
1274 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1275 utils.send_mail(mail_message)
1276 Upload.announce(short_summary, 1)
1278 ################################################################################
1281 # if embargoed queues are enabled always embargo
1284 def queue_embargo (summary, short_summary):
1285 print "Moving to EMBARGOED holding area."
1286 Logger.log(["Moving to embargoed", pkg.changes_file])
1288 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1289 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1290 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1292 # Check for override disparities
1293 Upload.Subst["__SUMMARY__"] = summary
1294 Upload.check_override()
1296 # Send accept mail, announce to lists, close bugs and check for
1297 # override disparities
1298 if not Cnf["Dinstall::Options::No-Mail"]:
1299 Upload.Subst["__SUITE__"] = ""
1300 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1301 utils.send_mail(mail_message)
1302 Upload.announce(short_summary, 1)
1304 ################################################################################
1306 def is_stableupdate ():
1307 if not changes["distribution"].has_key("proposed-updates"):
1310 if not changes["architecture"].has_key("source"):
1311 pusuite = DBConn().get_suite_id("proposed-updates")
1312 cursor = DBConn().cursor()
1313 cursor.execute( """SELECT 1 FROM source s
1314 JOIN src_associations sa ON (s.id = sa.source)
1315 WHERE s.source = %(source)s
1316 AND s.version = '%(version)s'
1317 AND sa.suite = %(suite)d""",
1318 {'source' : changes['source'],
1319 'version' : changes['version'],
1322 if cursor.fetchone():
1323 # source is already in proposed-updates so no need to hold
1328 def do_stableupdate (summary, short_summary):
1329 print "Moving to PROPOSED-UPDATES holding area."
1330 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1332 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1333 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1335 # Check for override disparities
1336 Upload.Subst["__SUMMARY__"] = summary
1337 Upload.check_override()
1339 ################################################################################
1341 def is_oldstableupdate ():
1342 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1345 if not changes["architecture"].has_key("source"):
1346 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1347 cursor = DBConn().cursor()
1348 cursor.execute( """"SELECT 1 FROM source s
1349 JOIN src_associations sa ON (s.id = sa.source)
1350 WHERE s.source = %(source)s
1351 AND s.version = %(version)s
1352 AND sa.suite = %d""",
1353 {'source' : changes['source'],
1354 'version' : changes['version'],
1356 if cursor.fetchone():
1361 def do_oldstableupdate (summary, short_summary):
1362 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1363 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1365 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1366 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1368 # Check for override disparities
1369 Upload.Subst["__SUMMARY__"] = summary
1370 Upload.check_override()
1372 ################################################################################
1374 def is_autobyhand ():
1377 for f in files.keys():
1378 if files[f].has_key("byhand"):
1381 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1382 # don't contain underscores, and ARCH doesn't contain dots.
1383 # further VER matches the .changes Version:, and ARCH should be in
1384 # the .changes Architecture: list.
1385 if f.count("_") < 2:
1389 (pckg, ver, archext) = f.split("_", 2)
1390 if archext.count(".") < 1 or changes["version"] != ver:
1394 ABH = Cnf.SubTree("AutomaticByHandPackages")
1395 if not ABH.has_key(pckg) or \
1396 ABH["%s::Source" % (pckg)] != changes["source"]:
1397 print "not match %s %s" % (pckg, changes["source"])
1401 (arch, ext) = archext.split(".", 1)
1402 if arch not in changes["architecture"]:
1406 files[f]["byhand-arch"] = arch
1407 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1409 return any_auto and all_auto
1411 def do_autobyhand (summary, short_summary):
1412 print "Attempting AUTOBYHAND."
1414 for f in files.keys():
1416 if not files[f].has_key("byhand"):
1418 if not files[f].has_key("byhand-script"):
1422 os.system("ls -l %s" % byhandfile)
1423 result = os.system("%s %s %s %s %s" % (
1424 files[f]["byhand-script"], byhandfile,
1425 changes["version"], files[f]["byhand-arch"],
1426 os.path.abspath(pkg.changes_file)))
1428 os.unlink(byhandfile)
1431 print "Error processing %s, left as byhand." % (f)
1435 do_byhand(summary, short_summary)
1437 accept(summary, short_summary)
1439 ################################################################################
1442 for f in files.keys():
1443 if files[f].has_key("byhand"):
1447 def do_byhand (summary, short_summary):
1448 print "Moving to BYHAND holding area."
1449 Logger.log(["Moving to byhand", pkg.changes_file])
1451 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1452 move_to_dir(Cnf["Dir::Queue::Byhand"])
1454 # Check for override disparities
1455 Upload.Subst["__SUMMARY__"] = summary
1456 Upload.check_override()
1458 ################################################################################
1461 for f in files.keys():
1462 if files[f].has_key("new"):
1466 def acknowledge_new (summary, short_summary):
1467 Subst = Upload.Subst
1469 print "Moving to NEW holding area."
1470 Logger.log(["Moving to new", pkg.changes_file])
1472 Upload.dump_vars(Cnf["Dir::Queue::New"])
1473 move_to_dir(Cnf["Dir::Queue::New"])
1475 if not Options["No-Mail"]:
1476 print "Sending new ack."
1477 Subst["__SUMMARY__"] = summary
1478 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1479 utils.send_mail(new_ack_message)
1481 ################################################################################
1483 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1484 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1485 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1486 # not have processed it during it's checks of -2. If -1 has been
1487 # deleted or otherwise not checked by 'dak process-unchecked', the
1488 # .orig.tar.gz will not have been checked at all. To get round this,
1489 # we force the .orig.tar.gz into the .changes structure and reprocess
1490 # the .changes file.
1492 def process_it (changes_file):
1493 global reprocess, reject_message
1495 # Reset some globals
1498 # Some defaults in case we can't fully process the .changes file
1499 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1500 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1503 # Absolutize the filename to avoid the requirement of being in the
1504 # same directory as the .changes file.
1505 pkg.changes_file = os.path.abspath(changes_file)
1507 # Remember where we are so we can come back after cd-ing into the
1508 # holding directory.
1509 pkg.directory = os.getcwd()
1512 # If this is the Real Thing(tm), copy things into a private
1513 # holding directory first to avoid replacable file races.
1514 if not Options["No-Action"]:
1515 os.chdir(Cnf["Dir::Queue::Holding"])
1516 copy_to_holding(pkg.changes_file)
1517 # Relativize the filename so we use the copy in holding
1518 # rather than the original...
1519 pkg.changes_file = os.path.basename(pkg.changes_file)
1520 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1521 if changes["fingerprint"]:
1522 valid_changes_p = check_changes()
1527 check_distributions()
1529 valid_dsc_p = check_dsc()
1535 check_signed_by_key()
1536 Upload.update_subst(reject_message)
1542 traceback.print_exc(file=sys.stderr)
1545 # Restore previous WD
1546 os.chdir(pkg.directory)
1548 ###############################################################################
1551 global Cnf, Options, Logger
1553 changes_files = init()
1555 # -n/--dry-run invalidates some other options which would involve things happening
1556 if Options["No-Action"]:
1557 Options["Automatic"] = ""
1559 # Ensure all the arguments we were given are .changes files
1560 for f in changes_files:
1561 if not f.endswith(".changes"):
1562 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1563 changes_files.remove(f)
1565 if changes_files == []:
1566 utils.fubar("Need at least one .changes file as an argument.")
1568 # Check that we aren't going to clash with the daily cron job
1570 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1571 utils.fubar("Archive maintenance in progress. Try again later.")
1573 # Obtain lock if not in no-action mode and initialize the log
1575 if not Options["No-Action"]:
1576 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1578 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1580 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1581 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1584 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1586 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1587 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1588 if Cnf.has_key("Dinstall::Bcc"):
1589 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1591 Upload.Subst["__BCC__"] = bcc
1594 # Sort the .changes files so that we process sourceful ones first
1595 changes_files.sort(utils.changes_compare)
1597 # Process the changes files
1598 for changes_file in changes_files:
1599 print "\n" + changes_file
1601 process_it (changes_file)
1603 if not Options["No-Action"]:
1606 accept_count = Upload.accept_count
1607 accept_bytes = Upload.accept_bytes
1610 if accept_count > 1:
1612 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1613 Logger.log(["total",accept_count,accept_bytes])
1615 if not Options["No-Action"]:
1618 ################################################################################
1620 if __name__ == '__main__':