4 Checks Debian packages from Incoming
5 @contact: Debian FTP Master <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 # Originally based on dinstall by Guy Maor <maor@debian.org>
27 ################################################################################
29 # Computer games don't affect kids. I mean if Pacman affected our generation as
30 # kids, we'd all run around in a darkened room munching pills and listening to
34 ################################################################################
49 from debian_bundle import deb822
50 from daklib.dbconn import DBConn
51 from daklib.binary import Binary
52 from daklib import logging
53 from daklib import queue
54 from daklib import utils
55 from daklib.dak_exceptions import *
56 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
57 re_strip_revision, re_strip_srcver, re_spacestrip, \
58 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
59 re_isadeb, re_extract_src_version, re_issource, re_default_answer
63 ################################################################################
66 ################################################################################
77 # Aliases to the real vars in the Upload class; hysterical raisins.
85 ###############################################################################
88 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
92 Cnf = apt_pkg.newConfiguration()
93 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
95 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
96 ('h',"help","Dinstall::Options::Help"),
97 ('n',"no-action","Dinstall::Options::No-Action"),
98 ('p',"no-lock", "Dinstall::Options::No-Lock"),
99 ('s',"no-mail", "Dinstall::Options::No-Mail"),
100 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
102 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
103 "override-distribution", "version", "directory"]:
104 Cnf["Dinstall::Options::%s" % (i)] = ""
106 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
107 Options = Cnf.SubTree("Dinstall::Options")
112 # If we have a directory flag, use it to find our files
113 if Cnf["Dinstall::Options::Directory"] != "":
114 # Note that we clobber the list of files we were given in this case
115 # so warn if the user has done both
116 if len(changes_files) > 0:
117 utils.warn("Directory provided so ignoring files given on command line")
119 changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
121 Upload = queue.Upload(Cnf)
123 changes = Upload.pkg.changes
125 dsc_files = Upload.pkg.dsc_files
126 files = Upload.pkg.files
131 ################################################################################
133 def usage (exit_code=0):
134 print """Usage: dinstall [OPTION]... [CHANGES]...
135 -a, --automatic automatic run
136 -h, --help show this help and exit.
137 -n, --no-action don't do anything
138 -p, --no-lock don't check lockfile !! for cron.daily only !!
139 -s, --no-mail don't send any mail
140 -V, --version display the version number and exit"""
143 ################################################################################
145 def reject (str, prefix="Rejected: "):
146 global reject_message
148 reject_message += prefix + str + "\n"
150 ################################################################################
152 def copy_to_holding(filename):
155 base_filename = os.path.basename(filename)
157 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
159 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
162 # Shouldn't happen, but will if, for example, someone lists a
163 # file twice in the .changes.
164 if errno.errorcode[e.errno] == 'EEXIST':
165 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
170 shutil.copy(filename, dest)
172 # In either case (ENOENT or EACCES) we want to remove the
173 # O_CREAT | O_EXCLed ghost file, so add the file to the list
174 # of 'in holding' even if it's not the real file.
175 if errno.errorcode[e.errno] == 'ENOENT':
176 reject("%s: can not copy to holding area: file not found." % (base_filename))
179 elif errno.errorcode[e.errno] == 'EACCES':
180 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
185 in_holding[base_filename] = ""
187 ################################################################################
193 os.chdir(Cnf["Dir::Queue::Holding"])
194 for f in in_holding.keys():
195 if os.path.exists(f):
196 if f.find('/') != -1:
197 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
203 ################################################################################
206 filename = pkg.changes_file
208 # Parse the .changes field into a dictionary
210 changes.update(utils.parse_changes(filename))
211 except CantOpenError:
212 reject("%s: can't read file." % (filename))
214 except ParseChangesError, line:
215 reject("%s: parse error, can't grok: %s." % (filename, line))
217 except ChangesUnicodeError:
218 reject("%s: changes file not proper utf-8" % (filename))
221 # Parse the Files field from the .changes into another dictionary
223 files.update(utils.build_file_list(changes))
224 except ParseChangesError, line:
225 reject("%s: parse error, can't grok: %s." % (filename, line))
226 except UnknownFormatError, format:
227 reject("%s: unknown format '%s'." % (filename, format))
230 # Check for mandatory fields
231 for i in ("source", "binary", "architecture", "version", "distribution",
232 "maintainer", "files", "changes", "description"):
233 if not changes.has_key(i):
234 reject("%s: Missing mandatory field `%s'." % (filename, i))
235 return 0 # Avoid <undef> errors during later tests
237 # Strip a source version in brackets from the source field
238 if re_strip_srcver.search(changes["source"]):
239 changes["source"] = re_strip_srcver.sub('', changes["source"])
241 # Ensure the source field is a valid package name.
242 if not re_valid_pkg_name.match(changes["source"]):
243 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
245 # Split multi-value fields into a lower-level dictionary
246 for i in ("architecture", "distribution", "binary", "closes"):
247 o = changes.get(i, "")
254 # Fix the Maintainer: field to be RFC822/2047 compatible
256 (changes["maintainer822"], changes["maintainer2047"],
257 changes["maintainername"], changes["maintaineremail"]) = \
258 utils.fix_maintainer (changes["maintainer"])
259 except ParseMaintError, msg:
260 reject("%s: Maintainer field ('%s') failed to parse: %s" \
261 % (filename, changes["maintainer"], msg))
263 # ...likewise for the Changed-By: field if it exists.
265 (changes["changedby822"], changes["changedby2047"],
266 changes["changedbyname"], changes["changedbyemail"]) = \
267 utils.fix_maintainer (changes.get("changed-by", ""))
268 except ParseMaintError, msg:
269 (changes["changedby822"], changes["changedby2047"],
270 changes["changedbyname"], changes["changedbyemail"]) = \
272 reject("%s: Changed-By field ('%s') failed to parse: %s" \
273 % (filename, changes["changed-by"], msg))
275 # Ensure all the values in Closes: are numbers
276 if changes.has_key("closes"):
277 for i in changes["closes"].keys():
278 if re_isanum.match (i) == None:
279 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
282 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
283 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
284 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
286 # Check there isn't already a changes file of the same name in one
287 # of the queue directories.
288 base_filename = os.path.basename(filename)
289 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
290 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
291 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
293 # Check the .changes is non-empty
295 reject("%s: nothing to do (Files field is empty)." % (base_filename))
300 ################################################################################
302 def check_distributions():
303 "Check and map the Distribution field of a .changes file."
305 # Handle suite mappings
306 for m in Cnf.ValueList("SuiteMappings"):
309 if mtype == "map" or mtype == "silent-map":
310 (source, dest) = args[1:3]
311 if changes["distribution"].has_key(source):
312 del changes["distribution"][source]
313 changes["distribution"][dest] = 1
314 if mtype != "silent-map":
315 reject("Mapping %s to %s." % (source, dest),"")
316 if changes.has_key("distribution-version"):
317 if changes["distribution-version"].has_key(source):
318 changes["distribution-version"][source]=dest
319 elif mtype == "map-unreleased":
320 (source, dest) = args[1:3]
321 if changes["distribution"].has_key(source):
322 for arch in changes["architecture"].keys():
323 if arch not in DBConn().get_suite_architectures(source):
324 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
325 del changes["distribution"][source]
326 changes["distribution"][dest] = 1
328 elif mtype == "ignore":
330 if changes["distribution"].has_key(suite):
331 del changes["distribution"][suite]
332 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
333 elif mtype == "reject":
335 if changes["distribution"].has_key(suite):
336 reject("Uploads to %s are not accepted." % (suite))
337 elif mtype == "propup-version":
338 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
340 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
341 if changes["distribution"].has_key(args[1]):
342 changes.setdefault("distribution-version", {})
343 for suite in args[2:]: changes["distribution-version"][suite]=suite
345 # Ensure there is (still) a target distribution
346 if changes["distribution"].keys() == []:
347 reject("no valid distribution.")
349 # Ensure target distributions exist
350 for suite in changes["distribution"].keys():
351 if not Cnf.has_key("Suite::%s" % (suite)):
352 reject("Unknown distribution `%s'." % (suite))
354 ################################################################################
359 archive = utils.where_am_i()
360 file_keys = files.keys()
362 # if reprocess is 2 we've already done this and we're checking
363 # things again for the new .orig.tar.gz.
364 # [Yes, I'm fully aware of how disgusting this is]
365 if not Options["No-Action"] and reprocess < 2:
367 os.chdir(pkg.directory)
372 # Check there isn't already a .changes or .dak file of the same name in
373 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
374 # [NB: this check must be done post-suite mapping]
375 base_filename = os.path.basename(pkg.changes_file)
376 dot_dak_filename = base_filename[:-8]+".dak"
377 for suite in changes["distribution"].keys():
378 copychanges = "Suite::%s::CopyChanges" % (suite)
379 if Cnf.has_key(copychanges) and \
380 os.path.exists(Cnf[copychanges]+"/"+base_filename):
381 reject("%s: a file with this name already exists in %s" \
382 % (base_filename, Cnf[copychanges]))
384 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
385 if Cnf.has_key(copy_dot_dak) and \
386 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
387 reject("%s: a file with this name already exists in %s" \
388 % (dot_dak_filename, Cnf[copy_dot_dak]))
394 cursor = DBConn().cursor()
395 # Check for packages that have moved from one component to another
396 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
397 DBConn().prepare("moved_pkg_q", """
398 PREPARE moved_pkg_q(text,text,text) AS
399 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
400 component c, architecture a, files f
401 WHERE b.package = $1 AND s.suite_name = $2
402 AND (a.arch_string = $3 OR a.arch_string = 'all')
403 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
404 AND f.location = l.id
405 AND l.component = c.id
406 AND b.file = f.id""")
409 # Ensure the file does not already exist in one of the accepted directories
410 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
411 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
412 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
413 reject("%s file already exists in the %s directory." % (f, d))
414 if not re_taint_free.match(f):
415 reject("!!WARNING!! tainted filename: '%s'." % (f))
416 # Check the file is readable
417 if os.access(f, os.R_OK) == 0:
418 # When running in -n, copy_to_holding() won't have
419 # generated the reject_message, so we need to.
420 if Options["No-Action"]:
421 if os.path.exists(f):
422 reject("Can't read `%s'. [permission denied]" % (f))
424 reject("Can't read `%s'. [file not found]" % (f))
425 files[f]["type"] = "unreadable"
427 # If it's byhand skip remaining checks
428 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
429 files[f]["byhand"] = 1
430 files[f]["type"] = "byhand"
431 # Checks for a binary package...
432 elif re_isadeb.match(f):
434 files[f]["type"] = "deb"
436 # Extract package control information
437 deb_file = utils.open_file(f)
439 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
441 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
443 # Can't continue, none of the checks on control would work.
446 # Check for mandantory "Description:"
449 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
451 reject("%s: Missing Description in binary package" % (f))
456 # Check for mandatory fields
457 for field in [ "Package", "Architecture", "Version" ]:
458 if control.Find(field) == None:
459 reject("%s: No %s field in control." % (f, field))
463 # Ensure the package name matches the one give in the .changes
464 if not changes["binary"].has_key(control.Find("Package", "")):
465 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
467 # Validate the package field
468 package = control.Find("Package")
469 if not re_valid_pkg_name.match(package):
470 reject("%s: invalid package name '%s'." % (f, package))
472 # Validate the version field
473 version = control.Find("Version")
474 if not re_valid_version.match(version):
475 reject("%s: invalid version number '%s'." % (f, version))
477 # Ensure the architecture of the .deb is one we know about.
478 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
479 architecture = control.Find("Architecture")
480 upload_suite = changes["distribution"].keys()[0]
481 if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite):
482 reject("Unknown architecture '%s'." % (architecture))
484 # Ensure the architecture of the .deb is one of the ones
485 # listed in the .changes.
486 if not changes["architecture"].has_key(architecture):
487 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
489 # Sanity-check the Depends field
490 depends = control.Find("Depends")
492 reject("%s: Depends field is empty." % (f))
494 # Sanity-check the Provides field
495 provides = control.Find("Provides")
497 provide = re_spacestrip.sub('', provides)
499 reject("%s: Provides field is empty." % (f))
500 prov_list = provide.split(",")
501 for prov in prov_list:
502 if not re_valid_pkg_name.match(prov):
503 reject("%s: Invalid Provides field content %s." % (f, prov))
506 # Check the section & priority match those given in the .changes (non-fatal)
507 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
508 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
509 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
510 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
512 files[f]["package"] = package
513 files[f]["architecture"] = architecture
514 files[f]["version"] = version
515 files[f]["maintainer"] = control.Find("Maintainer", "")
516 if f.endswith(".udeb"):
517 files[f]["dbtype"] = "udeb"
518 elif f.endswith(".deb"):
519 files[f]["dbtype"] = "deb"
521 reject("%s is neither a .deb or a .udeb." % (f))
522 files[f]["source"] = control.Find("Source", files[f]["package"])
523 # Get the source version
524 source = files[f]["source"]
526 if source.find("(") != -1:
527 m = re_extract_src_version.match(source)
529 source_version = m.group(2)
530 if not source_version:
531 source_version = files[f]["version"]
532 files[f]["source package"] = source
533 files[f]["source version"] = source_version
535 # Ensure the filename matches the contents of the .deb
536 m = re_isadeb.match(f)
538 file_package = m.group(1)
539 if files[f]["package"] != file_package:
540 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
541 epochless_version = re_no_epoch.sub('', control.Find("Version"))
543 file_version = m.group(2)
544 if epochless_version != file_version:
545 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
547 file_architecture = m.group(3)
548 if files[f]["architecture"] != file_architecture:
549 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
551 # Check for existent source
552 source_version = files[f]["source version"]
553 source_package = files[f]["source package"]
554 if changes["architecture"].has_key("source"):
555 if source_version != changes["version"]:
556 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
558 # Check in the SQL database
559 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
560 # Check in one of the other directories
561 source_epochless_version = re_no_epoch.sub('', source_version)
562 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
563 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
564 files[f]["byhand"] = 1
565 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
569 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
570 if Cnf.has_key("Dir::Queue::%s" % (myq)):
571 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
574 if not dsc_file_exists:
575 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
576 # Check the version and for file overwrites
577 reject(Upload.check_binary_against_db(f),"")
579 Binary(f, reject).scan_package()
581 # Checks for a source package...
583 m = re_issource.match(f)
586 files[f]["package"] = m.group(1)
587 files[f]["version"] = m.group(2)
588 files[f]["type"] = m.group(3)
590 # Ensure the source package name matches the Source filed in the .changes
591 if changes["source"] != files[f]["package"]:
592 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
594 # Ensure the source version matches the version in the .changes file
595 if files[f]["type"] == "orig.tar.gz":
596 changes_version = changes["chopversion2"]
598 changes_version = changes["chopversion"]
599 if changes_version != files[f]["version"]:
600 reject("%s: should be %s according to changes file." % (f, changes_version))
602 # Ensure the .changes lists source in the Architecture field
603 if not changes["architecture"].has_key("source"):
604 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
606 # Check the signature of a .dsc file
607 if files[f]["type"] == "dsc":
608 dsc["fingerprint"] = utils.check_signature(f, reject)
610 files[f]["architecture"] = "source"
612 # Not a binary or source package? Assume byhand...
614 files[f]["byhand"] = 1
615 files[f]["type"] = "byhand"
617 # Per-suite file checks
618 files[f]["oldfiles"] = {}
619 for suite in changes["distribution"].keys():
621 if files[f].has_key("byhand"):
624 # Handle component mappings
625 for m in Cnf.ValueList("ComponentMappings"):
626 (source, dest) = m.split()
627 if files[f]["component"] == source:
628 files[f]["original component"] = source
629 files[f]["component"] = dest
631 # Ensure the component is valid for the target suite
632 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
633 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
634 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
637 # Validate the component
638 component = files[f]["component"]
639 component_id = DBConn().get_component_id(component)
640 if component_id == -1:
641 reject("file '%s' has unknown component '%s'." % (f, component))
644 # See if the package is NEW
645 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
648 # Validate the priority
649 if files[f]["priority"].find('/') != -1:
650 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
652 # Determine the location
653 location = Cnf["Dir::Pool"]
654 location_id = DBConn().get_location_id(location, component, archive)
655 if location_id == -1:
656 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
657 files[f]["location id"] = location_id
659 # Check the md5sum & size against existing files (if any)
660 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
661 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
663 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
665 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
666 files[f]["files id"] = files_id
668 # Check for packages that have moved from one component to another
669 files[f]['suite'] = suite
670 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
671 ql = cursor.fetchone()
673 files[f]["othercomponents"] = ql[0][0]
675 # If the .changes file says it has source, it must have source.
676 if changes["architecture"].has_key("source"):
678 reject("no source found and Architecture line in changes mention source.")
680 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
681 reject("source only uploads are not supported.")
683 ###############################################################################
688 # Ensure there is source to check
689 if not changes["architecture"].has_key("source"):
694 for f in files.keys():
695 if files[f]["type"] == "dsc":
697 reject("can not process a .changes file with multiple .dsc's.")
702 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
704 reject("source uploads must contain a dsc file")
707 # Parse the .dsc file
709 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
710 except CantOpenError:
711 # if not -n copy_to_holding() will have done this for us...
712 if Options["No-Action"]:
713 reject("%s: can't read file." % (dsc_filename))
714 except ParseChangesError, line:
715 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
716 except InvalidDscError, line:
717 reject("%s: syntax error on line %s." % (dsc_filename, line))
718 except ChangesUnicodeError:
719 reject("%s: dsc file not proper utf-8." % (dsc_filename))
721 # Build up the file list of files mentioned by the .dsc
723 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
724 except NoFilesFieldError:
725 reject("%s: no Files: field." % (dsc_filename))
727 except UnknownFormatError, format:
728 reject("%s: unknown format '%s'." % (dsc_filename, format))
730 except ParseChangesError, line:
731 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
734 # Enforce mandatory fields
735 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
736 if not dsc.has_key(i):
737 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
740 # Validate the source and version fields
741 if not re_valid_pkg_name.match(dsc["source"]):
742 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
743 if not re_valid_version.match(dsc["version"]):
744 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
746 # Bumping the version number of the .dsc breaks extraction by stable's
747 # dpkg-source. So let's not do that...
748 if dsc["format"] != "1.0":
749 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
751 # Validate the Maintainer field
753 utils.fix_maintainer (dsc["maintainer"])
754 except ParseMaintError, msg:
755 reject("%s: Maintainer field ('%s') failed to parse: %s" \
756 % (dsc_filename, dsc["maintainer"], msg))
758 # Validate the build-depends field(s)
759 for field_name in [ "build-depends", "build-depends-indep" ]:
760 field = dsc.get(field_name)
762 # Check for broken dpkg-dev lossage...
763 if field.startswith("ARRAY"):
764 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
766 # Have apt try to parse them...
768 apt_pkg.ParseSrcDepends(field)
770 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
773 # Ensure the version number in the .dsc matches the version number in the .changes
774 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
775 changes_version = files[dsc_filename]["version"]
776 if epochless_dsc_version != files[dsc_filename]["version"]:
777 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
779 # Ensure there is a .tar.gz in the .dsc file
781 for f in dsc_files.keys():
782 m = re_issource.match(f)
784 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
787 if ftype == "orig.tar.gz" or ftype == "tar.gz":
790 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
792 # Ensure source is newer than existing source in target suites
793 reject(Upload.check_source_against_db(dsc_filename),"")
795 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
796 reject(reject_msg, "")
798 if not Options["No-Action"]:
799 copy_to_holding(is_in_incoming)
800 orig_tar_gz = os.path.basename(is_in_incoming)
801 files[orig_tar_gz] = {}
802 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
803 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
804 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
805 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
806 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
807 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
808 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
809 files[orig_tar_gz]["type"] = "orig.tar.gz"
814 ################################################################################
816 def get_changelog_versions(source_dir):
817 """Extracts a the source package and (optionally) grabs the
818 version history out of debian/changelog for the BTS."""
820 # Find the .dsc (again)
822 for f in files.keys():
823 if files[f]["type"] == "dsc":
826 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
830 # Create a symlink mirror of the source files in our temporary directory
831 for f in files.keys():
832 m = re_issource.match(f)
834 src = os.path.join(source_dir, f)
835 # If a file is missing for whatever reason, give up.
836 if not os.path.exists(src):
839 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
841 dest = os.path.join(os.getcwd(), f)
842 os.symlink(src, dest)
844 # If the orig.tar.gz is not a part of the upload, create a symlink to the
847 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
848 os.symlink(pkg.orig_tar_gz, dest)
851 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
852 (result, output) = commands.getstatusoutput(cmd)
854 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
855 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
858 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
861 # Get the upstream version
862 upstr_version = re_no_epoch.sub('', dsc["version"])
863 if re_strip_revision.search(upstr_version):
864 upstr_version = re_strip_revision.sub('', upstr_version)
866 # Ensure the changelog file exists
867 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
868 if not os.path.exists(changelog_filename):
869 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
872 # Parse the changelog
873 dsc["bts changelog"] = ""
874 changelog_file = utils.open_file(changelog_filename)
875 for line in changelog_file.readlines():
876 m = re_changelog_versions.match(line)
878 dsc["bts changelog"] += line
879 changelog_file.close()
881 # Check we found at least one revision in the changelog
882 if not dsc["bts changelog"]:
883 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
885 ########################################
889 # a) there's no source
890 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
891 # or c) the orig.tar.gz is MIA
892 if not changes["architecture"].has_key("source") or reprocess == 2 \
893 or pkg.orig_tar_gz == -1:
896 tmpdir = utils.temp_dirname()
898 # Move into the temporary directory
902 # Get the changelog version history
903 get_changelog_versions(cwd)
905 # Move back and cleanup the temporary tree
908 shutil.rmtree(tmpdir)
910 if errno.errorcode[e.errno] != 'EACCES':
911 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
913 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
914 # We probably have u-r or u-w directories so chmod everything
916 cmd = "chmod -R u+rwx %s" % (tmpdir)
917 result = os.system(cmd)
919 utils.fubar("'%s' failed with result %s." % (cmd, result))
920 shutil.rmtree(tmpdir)
922 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
924 ################################################################################
926 # FIXME: should be a debian specific check called from a hook
928 def check_urgency ():
929 if changes["architecture"].has_key("source"):
930 if not changes.has_key("urgency"):
931 changes["urgency"] = Cnf["Urgency::Default"]
932 changes["urgency"] = changes["urgency"].lower()
933 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
934 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
935 changes["urgency"] = Cnf["Urgency::Default"]
937 ################################################################################
940 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
941 utils.check_size(".changes", files)
942 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
943 utils.check_size(".dsc", dsc_files)
945 # This is stupid API, but it'll have to do for now until
946 # we actually have proper abstraction
947 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
950 ################################################################################
952 # Sanity check the time stamps of files inside debs.
953 # [Files in the near future cause ugly warnings and extreme time
954 # travel can cause errors on extraction]
956 def check_timestamps():
958 def __init__(self, future_cutoff, past_cutoff):
960 self.future_cutoff = future_cutoff
961 self.past_cutoff = past_cutoff
964 self.future_files = {}
965 self.ancient_files = {}
967 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
968 if MTime > self.future_cutoff:
969 self.future_files[Name] = MTime
970 if MTime < self.past_cutoff:
971 self.ancient_files[Name] = MTime
974 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
975 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
976 tar = Tar(future_cutoff, past_cutoff)
977 for filename in files.keys():
978 if files[filename]["type"] == "deb":
981 deb_file = utils.open_file(filename)
982 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
985 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
986 except SystemError, e:
987 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
988 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
991 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
994 future_files = tar.future_files.keys()
996 num_future_files = len(future_files)
997 future_file = future_files[0]
998 future_date = tar.future_files[future_file]
999 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1000 % (filename, num_future_files, future_file,
1001 time.ctime(future_date)))
1003 ancient_files = tar.ancient_files.keys()
1005 num_ancient_files = len(ancient_files)
1006 ancient_file = ancient_files[0]
1007 ancient_date = tar.ancient_files[ancient_file]
1008 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1009 % (filename, num_ancient_files, ancient_file,
1010 time.ctime(ancient_date)))
1012 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1014 ################################################################################
1016 def lookup_uid_from_fingerprint(fpr):
1018 Return the uid,name,isdm for a given gpg fingerprint
1021 @param fpr: a 40 byte GPG fingerprint
1023 @return: (uid, name, isdm)
1025 cursor = DBConn().cursor()
1026 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
1027 qs = cursor.fetchone()
1031 return (None, None, False)
1033 def check_signed_by_key():
1034 """Ensure the .changes is signed by an authorized uploader."""
1036 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1037 if uid_name == None:
1040 # match claimed name with actual name:
1042 # This is fundamentally broken but need us to refactor how we get
1043 # the UIDs/Fingerprints in order for us to fix it properly
1044 uid, uid_email = changes["fingerprint"], uid
1045 may_nmu, may_sponsor = 1, 1
1046 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1047 # and can't get one in there if we don't allow nmu/sponsorship
1048 elif is_dm is False:
1049 # If is_dm is False, we allow full upload rights
1050 uid_email = "%s@debian.org" % (uid)
1051 may_nmu, may_sponsor = 1, 1
1053 # Assume limited upload rights unless we've discovered otherwise
1055 may_nmu, may_sponsor = 0, 0
1058 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1060 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1062 if uid_name == "": sponsored = 1
1065 if ("source" in changes["architecture"] and
1066 uid_email and utils.is_email_alias(uid_email)):
1067 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1068 if (changes["maintaineremail"] not in sponsor_addresses and
1069 changes["changedbyemail"] not in sponsor_addresses):
1070 changes["sponsoremail"] = uid_email
1072 if sponsored and not may_sponsor:
1073 reject("%s is not authorised to sponsor uploads" % (uid))
1075 cursor = DBConn().cursor()
1076 if not sponsored and not may_nmu:
1078 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1080 highest_sid, highest_version = None, None
1082 should_reject = True
1084 si = cursor.fetchone()
1088 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1090 highest_version = si[1]
1092 if highest_sid == None:
1093 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1096 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1099 m = cursor.fetchone()
1103 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1104 if email == uid_email or name == uid_name:
1108 if should_reject == True:
1109 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1111 for b in changes["binary"].keys():
1112 for suite in changes["distribution"].keys():
1113 suite_id = DBConn().get_suite_id(suite)
1115 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1117 s = cursor.fetchone()
1121 if s[0] != changes["source"]:
1122 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1124 for f in files.keys():
1125 if files[f].has_key("byhand"):
1126 reject("%s may not upload BYHAND file %s" % (uid, f))
1127 if files[f].has_key("new"):
1128 reject("%s may not upload NEW file %s" % (uid, f))
1131 ################################################################################
1132 ################################################################################
1134 # If any file of an upload has a recent mtime then chances are good
1135 # the file is still being uploaded.
1137 def upload_too_new():
1139 # Move back to the original directory to get accurate time stamps
1141 os.chdir(pkg.directory)
1142 file_list = pkg.files.keys()
1143 file_list.extend(pkg.dsc_files.keys())
1144 file_list.append(pkg.changes_file)
1147 last_modified = time.time()-os.path.getmtime(f)
1148 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1156 ################################################################################
1159 # changes["distribution"] may not exist in corner cases
1160 # (e.g. unreadable changes files)
1161 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1162 changes["distribution"] = {}
1164 (summary, short_summary) = Upload.build_summaries()
1166 # q-unapproved hax0ring
1168 "New": { "is": is_new, "process": acknowledge_new },
1169 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1170 "Byhand" : { "is": is_byhand, "process": do_byhand },
1171 "OldStableUpdate" : { "is": is_oldstableupdate,
1172 "process": do_oldstableupdate },
1173 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1174 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1175 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1177 queues = [ "New", "Autobyhand", "Byhand" ]
1178 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1179 queues += [ "Unembargo", "Embargo" ]
1181 queues += [ "OldStableUpdate", "StableUpdate" ]
1183 (prompt, answer) = ("", "XXX")
1184 if Options["No-Action"] or Options["Automatic"]:
1189 if reject_message.find("Rejected") != -1:
1190 if upload_too_new():
1191 print "SKIP (too new)\n" + reject_message,
1192 prompt = "[S]kip, Quit ?"
1194 print "REJECT\n" + reject_message,
1195 prompt = "[R]eject, Skip, Quit ?"
1196 if Options["Automatic"]:
1201 if queue_info[q]["is"]():
1205 print "%s for %s\n%s%s" % (
1206 qu.upper(), ", ".join(changes["distribution"].keys()),
1207 reject_message, summary),
1208 queuekey = qu[0].upper()
1209 if queuekey in "RQSA":
1211 prompt = "[D]ivert, Skip, Quit ?"
1213 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1214 if Options["Automatic"]:
1217 print "ACCEPT\n" + reject_message + summary,
1218 prompt = "[A]ccept, Skip, Quit ?"
1219 if Options["Automatic"]:
1222 while prompt.find(answer) == -1:
1223 answer = utils.our_raw_input(prompt)
1224 m = re_default_answer.match(prompt)
1227 answer = answer[:1].upper()
1230 os.chdir (pkg.directory)
1231 Upload.do_reject(0, reject_message)
1233 accept(summary, short_summary)
1234 remove_from_unchecked()
1235 elif answer == queuekey:
1236 queue_info[qu]["process"](summary, short_summary)
1237 remove_from_unchecked()
1241 def remove_from_unchecked():
1242 os.chdir (pkg.directory)
1243 for f in files.keys():
1245 os.unlink(pkg.changes_file)
1247 ################################################################################
1249 def accept (summary, short_summary):
1250 Upload.accept(summary, short_summary)
1251 Upload.check_override()
1253 ################################################################################
1255 def move_to_dir (dest, perms=0660, changesperms=0664):
1256 utils.move (pkg.changes_file, dest, perms=changesperms)
1257 file_keys = files.keys()
1259 utils.move (f, dest, perms=perms)
1261 ################################################################################
1263 def is_unembargo ():
1264 cursor = DBConn().cursor()
1265 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1266 if cursor.fetchone():
1269 oldcwd = os.getcwd()
1270 os.chdir(Cnf["Dir::Queue::Disembargo"])
1271 disdir = os.getcwd()
1274 if pkg.directory == disdir:
1275 if changes["architecture"].has_key("source"):
1276 if Options["No-Action"]: return 1
1278 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1280 cursor.execute( "COMMIT" )
1285 def queue_unembargo (summary, short_summary):
1286 print "Moving to UNEMBARGOED holding area."
1287 Logger.log(["Moving to unembargoed", pkg.changes_file])
1289 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1290 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1291 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1293 # Check for override disparities
1294 Upload.Subst["__SUMMARY__"] = summary
1295 Upload.check_override()
1297 # Send accept mail, announce to lists, close bugs and check for
1298 # override disparities
1299 if not Cnf["Dinstall::Options::No-Mail"]:
1300 Upload.Subst["__SUITE__"] = ""
1301 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1302 utils.send_mail(mail_message)
1303 Upload.announce(short_summary, 1)
1305 ################################################################################
1308 # if embargoed queues are enabled always embargo
1311 def queue_embargo (summary, short_summary):
1312 print "Moving to EMBARGOED holding area."
1313 Logger.log(["Moving to embargoed", pkg.changes_file])
1315 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1316 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1317 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1319 # Check for override disparities
1320 Upload.Subst["__SUMMARY__"] = summary
1321 Upload.check_override()
1323 # Send accept mail, announce to lists, close bugs and check for
1324 # override disparities
1325 if not Cnf["Dinstall::Options::No-Mail"]:
1326 Upload.Subst["__SUITE__"] = ""
1327 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1328 utils.send_mail(mail_message)
1329 Upload.announce(short_summary, 1)
1331 ################################################################################
1333 def is_stableupdate ():
1334 if not changes["distribution"].has_key("proposed-updates"):
1337 if not changes["architecture"].has_key("source"):
1338 pusuite = DBConn().get_suite_id("proposed-updates")
1339 cursor = DBConn().cursor()
1340 cursor.execute( """SELECT 1 FROM source s
1341 JOIN src_associations sa ON (s.id = sa.source)
1342 WHERE s.source = %(source)s
1343 AND s.version = %(version)s
1344 AND sa.suite = %(suite)s""",
1345 {'source' : changes['source'],
1346 'version' : changes['version'],
1349 if cursor.fetchone():
1350 # source is already in proposed-updates so no need to hold
1355 def do_stableupdate (summary, short_summary):
1356 print "Moving to PROPOSED-UPDATES holding area."
1357 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1359 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1360 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1362 # Check for override disparities
1363 Upload.Subst["__SUMMARY__"] = summary
1364 Upload.check_override()
1366 ################################################################################
1368 def is_oldstableupdate ():
1369 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1372 if not changes["architecture"].has_key("source"):
1373 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1374 cursor = DBConn().cursor()
1375 cursor.execute( """SELECT 1 FROM source s
1376 JOIN src_associations sa ON (s.id = sa.source)
1377 WHERE s.source = %(source)s
1378 AND s.version = %(version)s
1379 AND sa.suite = %(suite)s""",
1380 {'source' : changes['source'],
1381 'version' : changes['version'],
1383 if cursor.fetchone():
1388 def do_oldstableupdate (summary, short_summary):
1389 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1390 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1392 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1393 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1395 # Check for override disparities
1396 Upload.Subst["__SUMMARY__"] = summary
1397 Upload.check_override()
1399 ################################################################################
1401 def is_autobyhand ():
1404 for f in files.keys():
1405 if files[f].has_key("byhand"):
1408 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1409 # don't contain underscores, and ARCH doesn't contain dots.
1410 # further VER matches the .changes Version:, and ARCH should be in
1411 # the .changes Architecture: list.
1412 if f.count("_") < 2:
1416 (pckg, ver, archext) = f.split("_", 2)
1417 if archext.count(".") < 1 or changes["version"] != ver:
1421 ABH = Cnf.SubTree("AutomaticByHandPackages")
1422 if not ABH.has_key(pckg) or \
1423 ABH["%s::Source" % (pckg)] != changes["source"]:
1424 print "not match %s %s" % (pckg, changes["source"])
1428 (arch, ext) = archext.split(".", 1)
1429 if arch not in changes["architecture"]:
1433 files[f]["byhand-arch"] = arch
1434 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1436 return any_auto and all_auto
1438 def do_autobyhand (summary, short_summary):
1439 print "Attempting AUTOBYHAND."
1441 for f in files.keys():
1443 if not files[f].has_key("byhand"):
1445 if not files[f].has_key("byhand-script"):
1449 os.system("ls -l %s" % byhandfile)
1450 result = os.system("%s %s %s %s %s" % (
1451 files[f]["byhand-script"], byhandfile,
1452 changes["version"], files[f]["byhand-arch"],
1453 os.path.abspath(pkg.changes_file)))
1455 os.unlink(byhandfile)
1458 print "Error processing %s, left as byhand." % (f)
1462 do_byhand(summary, short_summary)
1464 accept(summary, short_summary)
1466 ################################################################################
1469 for f in files.keys():
1470 if files[f].has_key("byhand"):
1474 def do_byhand (summary, short_summary):
1475 print "Moving to BYHAND holding area."
1476 Logger.log(["Moving to byhand", pkg.changes_file])
1478 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1479 move_to_dir(Cnf["Dir::Queue::Byhand"])
1481 # Check for override disparities
1482 Upload.Subst["__SUMMARY__"] = summary
1483 Upload.check_override()
1485 ################################################################################
1488 for f in files.keys():
1489 if files[f].has_key("new"):
1493 def acknowledge_new (summary, short_summary):
1494 Subst = Upload.Subst
1496 print "Moving to NEW holding area."
1497 Logger.log(["Moving to new", pkg.changes_file])
1499 Upload.dump_vars(Cnf["Dir::Queue::New"])
1500 move_to_dir(Cnf["Dir::Queue::New"], perms=0640, changesperms=0644)
1502 if not Options["No-Mail"]:
1503 print "Sending new ack."
1504 Subst["__SUMMARY__"] = summary
1505 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1506 utils.send_mail(new_ack_message)
1508 ################################################################################
1510 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1511 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1512 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1513 # not have processed it during it's checks of -2. If -1 has been
1514 # deleted or otherwise not checked by 'dak process-unchecked', the
1515 # .orig.tar.gz will not have been checked at all. To get round this,
1516 # we force the .orig.tar.gz into the .changes structure and reprocess
1517 # the .changes file.
1519 def process_it (changes_file):
1520 global reprocess, reject_message
1522 # Reset some globals
1525 # Some defaults in case we can't fully process the .changes file
1526 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1527 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1530 # Absolutize the filename to avoid the requirement of being in the
1531 # same directory as the .changes file.
1532 pkg.changes_file = os.path.abspath(changes_file)
1534 # Remember where we are so we can come back after cd-ing into the
1535 # holding directory.
1536 pkg.directory = os.getcwd()
1539 # If this is the Real Thing(tm), copy things into a private
1540 # holding directory first to avoid replacable file races.
1541 if not Options["No-Action"]:
1542 os.chdir(Cnf["Dir::Queue::Holding"])
1543 copy_to_holding(pkg.changes_file)
1544 # Relativize the filename so we use the copy in holding
1545 # rather than the original...
1546 pkg.changes_file = os.path.basename(pkg.changes_file)
1547 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1548 if changes["fingerprint"]:
1549 valid_changes_p = check_changes()
1554 check_distributions()
1556 valid_dsc_p = check_dsc()
1562 check_signed_by_key()
1563 Upload.update_subst(reject_message)
1569 traceback.print_exc(file=sys.stderr)
1572 # Restore previous WD
1573 os.chdir(pkg.directory)
1575 ###############################################################################
1578 global Cnf, Options, Logger
1580 changes_files = init()
1582 # -n/--dry-run invalidates some other options which would involve things happening
1583 if Options["No-Action"]:
1584 Options["Automatic"] = ""
1586 # Ensure all the arguments we were given are .changes files
1587 for f in changes_files:
1588 if not f.endswith(".changes"):
1589 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1590 changes_files.remove(f)
1592 if changes_files == []:
1593 if Cnf["Dinstall::Options::Directory"] == "":
1594 utils.fubar("Need at least one .changes file as an argument.")
1598 # Check that we aren't going to clash with the daily cron job
1600 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1601 utils.fubar("Archive maintenance in progress. Try again later.")
1603 # Obtain lock if not in no-action mode and initialize the log
1605 if not Options["No-Action"]:
1606 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1608 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1610 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1611 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1614 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1616 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1617 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1618 if Cnf.has_key("Dinstall::Bcc"):
1619 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1621 Upload.Subst["__BCC__"] = bcc
1624 # Sort the .changes files so that we process sourceful ones first
1625 changes_files.sort(utils.changes_compare)
1627 # Process the changes files
1628 for changes_file in changes_files:
1629 print "\n" + changes_file
1631 process_it (changes_file)
1633 if not Options["No-Action"]:
1636 accept_count = Upload.accept_count
1637 accept_bytes = Upload.accept_bytes
1640 if accept_count > 1:
1642 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1643 Logger.log(["total",accept_count,accept_bytes])
1645 if not Options["No-Action"]:
1648 ################################################################################
1650 if __name__ == '__main__':