3 """ Checks Debian packages from Incoming """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Originally based on dinstall by Guy Maor <maor@debian.org>
22 ################################################################################
24 # Computer games don't affect kids. I mean if Pacman affected our generation as
25 # kids, we'd all run around in a darkened room munching pills and listening to
29 ################################################################################
44 from debian_bundle import deb822
45 from daklib.dbconn import DBConn
46 from daklib.binary import Binary
47 from daklib import logging
48 from daklib import queue
49 from daklib import utils
50 from daklib.dak_exceptions import *
51 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
52 re_strip_revision, re_strip_srcver, re_spacestrip, \
53 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
54 re_isadeb, re_extract_src_version, re_issource, re_default_answer
58 ################################################################################
61 ################################################################################
72 # Aliases to the real vars in the Upload class; hysterical raisins.
80 ###############################################################################
83 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
87 Cnf = apt_pkg.newConfiguration()
88 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
90 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
91 ('h',"help","Dinstall::Options::Help"),
92 ('n',"no-action","Dinstall::Options::No-Action"),
93 ('p',"no-lock", "Dinstall::Options::No-Lock"),
94 ('s',"no-mail", "Dinstall::Options::No-Mail"),
95 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
97 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
98 "override-distribution", "version", "directory"]:
99 Cnf["Dinstall::Options::%s" % (i)] = ""
101 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
102 Options = Cnf.SubTree("Dinstall::Options")
107 # If we have a directory flag, use it to find our files
108 if Cnf["Dinstall::Options::Directory"] != "":
109 # Note that we clobber the list of files we were given in this case
110 # so warn if the user has done both
111 if len(changes_files) > 0:
112 utils.warn("Directory provided so ignoring files given on command line")
114 changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
116 Upload = queue.Upload(Cnf)
118 changes = Upload.pkg.changes
120 dsc_files = Upload.pkg.dsc_files
121 files = Upload.pkg.files
126 ################################################################################
128 def usage (exit_code=0):
129 print """Usage: dinstall [OPTION]... [CHANGES]...
130 -a, --automatic automatic run
131 -h, --help show this help and exit.
132 -n, --no-action don't do anything
133 -p, --no-lock don't check lockfile !! for cron.daily only !!
134 -s, --no-mail don't send any mail
135 -V, --version display the version number and exit"""
138 ################################################################################
140 def reject (str, prefix="Rejected: "):
141 global reject_message
143 reject_message += prefix + str + "\n"
145 ################################################################################
147 def copy_to_holding(filename):
150 base_filename = os.path.basename(filename)
152 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
154 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
157 # Shouldn't happen, but will if, for example, someone lists a
158 # file twice in the .changes.
159 if errno.errorcode[e.errno] == 'EEXIST':
160 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
165 shutil.copy(filename, dest)
167 # In either case (ENOENT or EACCES) we want to remove the
168 # O_CREAT | O_EXCLed ghost file, so add the file to the list
169 # of 'in holding' even if it's not the real file.
170 if errno.errorcode[e.errno] == 'ENOENT':
171 reject("%s: can not copy to holding area: file not found." % (base_filename))
174 elif errno.errorcode[e.errno] == 'EACCES':
175 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
180 in_holding[base_filename] = ""
182 ################################################################################
188 os.chdir(Cnf["Dir::Queue::Holding"])
189 for f in in_holding.keys():
190 if os.path.exists(f):
191 if f.find('/') != -1:
192 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
198 ################################################################################
201 filename = pkg.changes_file
203 # Parse the .changes field into a dictionary
205 changes.update(utils.parse_changes(filename))
206 except CantOpenError:
207 reject("%s: can't read file." % (filename))
209 except ParseChangesError, line:
210 reject("%s: parse error, can't grok: %s." % (filename, line))
212 except ChangesUnicodeError:
213 reject("%s: changes file not proper utf-8" % (filename))
216 # Parse the Files field from the .changes into another dictionary
218 files.update(utils.build_file_list(changes))
219 except ParseChangesError, line:
220 reject("%s: parse error, can't grok: %s." % (filename, line))
221 except UnknownFormatError, format:
222 reject("%s: unknown format '%s'." % (filename, format))
225 # Check for mandatory fields
226 for i in ("source", "binary", "architecture", "version", "distribution",
227 "maintainer", "files", "changes", "description"):
228 if not changes.has_key(i):
229 reject("%s: Missing mandatory field `%s'." % (filename, i))
230 return 0 # Avoid <undef> errors during later tests
232 # Strip a source version in brackets from the source field
233 if re_strip_srcver.search(changes["source"]):
234 changes["source"] = re_strip_srcver.sub('', changes["source"])
236 # Ensure the source field is a valid package name.
237 if not re_valid_pkg_name.match(changes["source"]):
238 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
240 # Split multi-value fields into a lower-level dictionary
241 for i in ("architecture", "distribution", "binary", "closes"):
242 o = changes.get(i, "")
249 # Fix the Maintainer: field to be RFC822/2047 compatible
251 (changes["maintainer822"], changes["maintainer2047"],
252 changes["maintainername"], changes["maintaineremail"]) = \
253 utils.fix_maintainer (changes["maintainer"])
254 except ParseMaintError, msg:
255 reject("%s: Maintainer field ('%s') failed to parse: %s" \
256 % (filename, changes["maintainer"], msg))
258 # ...likewise for the Changed-By: field if it exists.
260 (changes["changedby822"], changes["changedby2047"],
261 changes["changedbyname"], changes["changedbyemail"]) = \
262 utils.fix_maintainer (changes.get("changed-by", ""))
263 except ParseMaintError, msg:
264 (changes["changedby822"], changes["changedby2047"],
265 changes["changedbyname"], changes["changedbyemail"]) = \
267 reject("%s: Changed-By field ('%s') failed to parse: %s" \
268 % (filename, changes["changed-by"], msg))
270 # Ensure all the values in Closes: are numbers
271 if changes.has_key("closes"):
272 for i in changes["closes"].keys():
273 if re_isanum.match (i) == None:
274 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
277 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
278 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
279 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
281 # Check there isn't already a changes file of the same name in one
282 # of the queue directories.
283 base_filename = os.path.basename(filename)
284 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
285 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
286 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
288 # Check the .changes is non-empty
290 reject("%s: nothing to do (Files field is empty)." % (base_filename))
295 ################################################################################
297 def check_distributions():
298 "Check and map the Distribution field of a .changes file."
300 # Handle suite mappings
301 for m in Cnf.ValueList("SuiteMappings"):
304 if mtype == "map" or mtype == "silent-map":
305 (source, dest) = args[1:3]
306 if changes["distribution"].has_key(source):
307 del changes["distribution"][source]
308 changes["distribution"][dest] = 1
309 if mtype != "silent-map":
310 reject("Mapping %s to %s." % (source, dest),"")
311 if changes.has_key("distribution-version"):
312 if changes["distribution-version"].has_key(source):
313 changes["distribution-version"][source]=dest
314 elif mtype == "map-unreleased":
315 (source, dest) = args[1:3]
316 if changes["distribution"].has_key(source):
317 for arch in changes["architecture"].keys():
318 if arch not in DBConn().get_suite_architectures(source):
319 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
320 del changes["distribution"][source]
321 changes["distribution"][dest] = 1
323 elif mtype == "ignore":
325 if changes["distribution"].has_key(suite):
326 del changes["distribution"][suite]
327 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
328 elif mtype == "reject":
330 if changes["distribution"].has_key(suite):
331 reject("Uploads to %s are not accepted." % (suite))
332 elif mtype == "propup-version":
333 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
335 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
336 if changes["distribution"].has_key(args[1]):
337 changes.setdefault("distribution-version", {})
338 for suite in args[2:]: changes["distribution-version"][suite]=suite
340 # Ensure there is (still) a target distribution
341 if changes["distribution"].keys() == []:
342 reject("no valid distribution.")
344 # Ensure target distributions exist
345 for suite in changes["distribution"].keys():
346 if not Cnf.has_key("Suite::%s" % (suite)):
347 reject("Unknown distribution `%s'." % (suite))
349 ################################################################################
354 archive = utils.where_am_i()
355 file_keys = files.keys()
357 # if reprocess is 2 we've already done this and we're checking
358 # things again for the new .orig.tar.gz.
359 # [Yes, I'm fully aware of how disgusting this is]
360 if not Options["No-Action"] and reprocess < 2:
362 os.chdir(pkg.directory)
367 # Check there isn't already a .changes or .dak file of the same name in
368 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
369 # [NB: this check must be done post-suite mapping]
370 base_filename = os.path.basename(pkg.changes_file)
371 dot_dak_filename = base_filename[:-8]+".dak"
372 for suite in changes["distribution"].keys():
373 copychanges = "Suite::%s::CopyChanges" % (suite)
374 if Cnf.has_key(copychanges) and \
375 os.path.exists(Cnf[copychanges]+"/"+base_filename):
376 reject("%s: a file with this name already exists in %s" \
377 % (base_filename, Cnf[copychanges]))
379 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
380 if Cnf.has_key(copy_dot_dak) and \
381 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
382 reject("%s: a file with this name already exists in %s" \
383 % (dot_dak_filename, Cnf[copy_dot_dak]))
389 cursor = DBConn().cursor()
390 # Check for packages that have moved from one component to another
391 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
392 DBConn().prepare("moved_pkg_q", """
393 PREPARE moved_pkg_q(text,text,text) AS
394 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
395 component c, architecture a, files f
396 WHERE b.package = $1 AND s.suite_name = $2
397 AND (a.arch_string = $3 OR a.arch_string = 'all')
398 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
399 AND f.location = l.id
400 AND l.component = c.id
401 AND b.file = f.id""")
404 # Ensure the file does not already exist in one of the accepted directories
405 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
406 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
407 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
408 reject("%s file already exists in the %s directory." % (f, d))
409 if not re_taint_free.match(f):
410 reject("!!WARNING!! tainted filename: '%s'." % (f))
411 # Check the file is readable
412 if os.access(f, os.R_OK) == 0:
413 # When running in -n, copy_to_holding() won't have
414 # generated the reject_message, so we need to.
415 if Options["No-Action"]:
416 if os.path.exists(f):
417 reject("Can't read `%s'. [permission denied]" % (f))
419 reject("Can't read `%s'. [file not found]" % (f))
420 files[f]["type"] = "unreadable"
422 # If it's byhand skip remaining checks
423 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
424 files[f]["byhand"] = 1
425 files[f]["type"] = "byhand"
426 # Checks for a binary package...
427 elif re_isadeb.match(f):
429 files[f]["type"] = "deb"
431 # Extract package control information
432 deb_file = utils.open_file(f)
434 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
436 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
438 # Can't continue, none of the checks on control would work.
442 # Check for mandatory fields
443 for field in [ "Package", "Architecture", "Version" ]:
444 if control.Find(field) == None:
445 reject("%s: No %s field in control." % (f, field))
449 # Ensure the package name matches the one give in the .changes
450 if not changes["binary"].has_key(control.Find("Package", "")):
451 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
453 # Validate the package field
454 package = control.Find("Package")
455 if not re_valid_pkg_name.match(package):
456 reject("%s: invalid package name '%s'." % (f, package))
458 # Validate the version field
459 version = control.Find("Version")
460 if not re_valid_version.match(version):
461 reject("%s: invalid version number '%s'." % (f, version))
463 # Ensure the architecture of the .deb is one we know about.
464 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
465 architecture = control.Find("Architecture")
466 upload_suite = changes["distribution"].keys()[0]
467 if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite):
468 reject("Unknown architecture '%s'." % (architecture))
470 # Ensure the architecture of the .deb is one of the ones
471 # listed in the .changes.
472 if not changes["architecture"].has_key(architecture):
473 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
475 # Sanity-check the Depends field
476 depends = control.Find("Depends")
478 reject("%s: Depends field is empty." % (f))
480 # Sanity-check the Provides field
481 provides = control.Find("Provides")
483 provide = re_spacestrip.sub('', provides)
485 reject("%s: Provides field is empty." % (f))
486 prov_list = provide.split(",")
487 for prov in prov_list:
488 if not re_valid_pkg_name.match(prov):
489 reject("%s: Invalid Provides field content %s." % (f, prov))
492 # Check the section & priority match those given in the .changes (non-fatal)
493 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
494 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
495 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
496 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
498 files[f]["package"] = package
499 files[f]["architecture"] = architecture
500 files[f]["version"] = version
501 files[f]["maintainer"] = control.Find("Maintainer", "")
502 if f.endswith(".udeb"):
503 files[f]["dbtype"] = "udeb"
504 elif f.endswith(".deb"):
505 files[f]["dbtype"] = "deb"
507 reject("%s is neither a .deb or a .udeb." % (f))
508 files[f]["source"] = control.Find("Source", files[f]["package"])
509 # Get the source version
510 source = files[f]["source"]
512 if source.find("(") != -1:
513 m = re_extract_src_version.match(source)
515 source_version = m.group(2)
516 if not source_version:
517 source_version = files[f]["version"]
518 files[f]["source package"] = source
519 files[f]["source version"] = source_version
521 # Ensure the filename matches the contents of the .deb
522 m = re_isadeb.match(f)
524 file_package = m.group(1)
525 if files[f]["package"] != file_package:
526 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
527 epochless_version = re_no_epoch.sub('', control.Find("Version"))
529 file_version = m.group(2)
530 if epochless_version != file_version:
531 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
533 file_architecture = m.group(3)
534 if files[f]["architecture"] != file_architecture:
535 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
537 # Check for existent source
538 source_version = files[f]["source version"]
539 source_package = files[f]["source package"]
540 if changes["architecture"].has_key("source"):
541 if source_version != changes["version"]:
542 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
544 # Check in the SQL database
545 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
546 # Check in one of the other directories
547 source_epochless_version = re_no_epoch.sub('', source_version)
548 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
549 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
550 files[f]["byhand"] = 1
551 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
555 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
556 if Cnf.has_key("Dir::Queue::%s" % (myq)):
557 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
560 if not dsc_file_exists:
561 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
562 # Check the version and for file overwrites
563 reject(Upload.check_binary_against_db(f),"")
565 Binary(f, reject).scan_package()
567 # Checks for a source package...
569 m = re_issource.match(f)
572 files[f]["package"] = m.group(1)
573 files[f]["version"] = m.group(2)
574 files[f]["type"] = m.group(3)
576 # Ensure the source package name matches the Source filed in the .changes
577 if changes["source"] != files[f]["package"]:
578 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
580 # Ensure the source version matches the version in the .changes file
581 if files[f]["type"] == "orig.tar.gz":
582 changes_version = changes["chopversion2"]
584 changes_version = changes["chopversion"]
585 if changes_version != files[f]["version"]:
586 reject("%s: should be %s according to changes file." % (f, changes_version))
588 # Ensure the .changes lists source in the Architecture field
589 if not changes["architecture"].has_key("source"):
590 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
592 # Check the signature of a .dsc file
593 if files[f]["type"] == "dsc":
594 dsc["fingerprint"] = utils.check_signature(f, reject)
596 files[f]["architecture"] = "source"
598 # Not a binary or source package? Assume byhand...
600 files[f]["byhand"] = 1
601 files[f]["type"] = "byhand"
603 # Per-suite file checks
604 files[f]["oldfiles"] = {}
605 for suite in changes["distribution"].keys():
607 if files[f].has_key("byhand"):
610 # Handle component mappings
611 for m in Cnf.ValueList("ComponentMappings"):
612 (source, dest) = m.split()
613 if files[f]["component"] == source:
614 files[f]["original component"] = source
615 files[f]["component"] = dest
617 # Ensure the component is valid for the target suite
618 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
619 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
620 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
623 # Validate the component
624 component = files[f]["component"]
625 component_id = DBConn().get_component_id(component)
626 if component_id == -1:
627 reject("file '%s' has unknown component '%s'." % (f, component))
630 # See if the package is NEW
631 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
634 # Validate the priority
635 if files[f]["priority"].find('/') != -1:
636 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
638 # Determine the location
639 location = Cnf["Dir::Pool"]
640 location_id = DBConn().get_location_id(location, component, archive)
641 if location_id == -1:
642 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
643 files[f]["location id"] = location_id
645 # Check the md5sum & size against existing files (if any)
646 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
647 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
649 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
651 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
652 files[f]["files id"] = files_id
654 # Check for packages that have moved from one component to another
655 files[f]['suite'] = suite
656 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
657 ql = cursor.fetchone()
659 files[f]["othercomponents"] = ql[0][0]
661 # If the .changes file says it has source, it must have source.
662 if changes["architecture"].has_key("source"):
664 reject("no source found and Architecture line in changes mention source.")
666 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
667 reject("source only uploads are not supported.")
669 ###############################################################################
674 # Ensure there is source to check
675 if not changes["architecture"].has_key("source"):
680 for f in files.keys():
681 if files[f]["type"] == "dsc":
683 reject("can not process a .changes file with multiple .dsc's.")
688 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
690 reject("source uploads must contain a dsc file")
693 # Parse the .dsc file
695 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
696 except CantOpenError:
697 # if not -n copy_to_holding() will have done this for us...
698 if Options["No-Action"]:
699 reject("%s: can't read file." % (dsc_filename))
700 except ParseChangesError, line:
701 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
702 except InvalidDscError, line:
703 reject("%s: syntax error on line %s." % (dsc_filename, line))
704 except ChangesUnicodeError:
705 reject("%s: dsc file not proper utf-8." % (dsc_filename))
707 # Build up the file list of files mentioned by the .dsc
709 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
710 except NoFilesFieldError:
711 reject("%s: no Files: field." % (dsc_filename))
713 except UnknownFormatError, format:
714 reject("%s: unknown format '%s'." % (dsc_filename, format))
716 except ParseChangesError, line:
717 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
720 # Enforce mandatory fields
721 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
722 if not dsc.has_key(i):
723 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
726 # Validate the source and version fields
727 if not re_valid_pkg_name.match(dsc["source"]):
728 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
729 if not re_valid_version.match(dsc["version"]):
730 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
732 # Bumping the version number of the .dsc breaks extraction by stable's
733 # dpkg-source. So let's not do that...
734 if dsc["format"] != "1.0":
735 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
737 # Validate the Maintainer field
739 utils.fix_maintainer (dsc["maintainer"])
740 except ParseMaintError, msg:
741 reject("%s: Maintainer field ('%s') failed to parse: %s" \
742 % (dsc_filename, dsc["maintainer"], msg))
744 # Validate the build-depends field(s)
745 for field_name in [ "build-depends", "build-depends-indep" ]:
746 field = dsc.get(field_name)
748 # Check for broken dpkg-dev lossage...
749 if field.startswith("ARRAY"):
750 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
752 # Have apt try to parse them...
754 apt_pkg.ParseSrcDepends(field)
756 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
759 # Ensure the version number in the .dsc matches the version number in the .changes
760 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
761 changes_version = files[dsc_filename]["version"]
762 if epochless_dsc_version != files[dsc_filename]["version"]:
763 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
765 # Ensure there is a .tar.gz in the .dsc file
767 for f in dsc_files.keys():
768 m = re_issource.match(f)
770 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
773 if ftype == "orig.tar.gz" or ftype == "tar.gz":
776 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
778 # Ensure source is newer than existing source in target suites
779 reject(Upload.check_source_against_db(dsc_filename),"")
781 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
782 reject(reject_msg, "")
784 if not Options["No-Action"]:
785 copy_to_holding(is_in_incoming)
786 orig_tar_gz = os.path.basename(is_in_incoming)
787 files[orig_tar_gz] = {}
788 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
789 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
790 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
791 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
792 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
793 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
794 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
795 files[orig_tar_gz]["type"] = "orig.tar.gz"
800 ################################################################################
802 def get_changelog_versions(source_dir):
803 """Extracts a the source package and (optionally) grabs the
804 version history out of debian/changelog for the BTS."""
806 # Find the .dsc (again)
808 for f in files.keys():
809 if files[f]["type"] == "dsc":
812 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
816 # Create a symlink mirror of the source files in our temporary directory
817 for f in files.keys():
818 m = re_issource.match(f)
820 src = os.path.join(source_dir, f)
821 # If a file is missing for whatever reason, give up.
822 if not os.path.exists(src):
825 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
827 dest = os.path.join(os.getcwd(), f)
828 os.symlink(src, dest)
830 # If the orig.tar.gz is not a part of the upload, create a symlink to the
833 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
834 os.symlink(pkg.orig_tar_gz, dest)
837 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
838 (result, output) = commands.getstatusoutput(cmd)
840 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
841 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
844 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
847 # Get the upstream version
848 upstr_version = re_no_epoch.sub('', dsc["version"])
849 if re_strip_revision.search(upstr_version):
850 upstr_version = re_strip_revision.sub('', upstr_version)
852 # Ensure the changelog file exists
853 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
854 if not os.path.exists(changelog_filename):
855 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
858 # Parse the changelog
859 dsc["bts changelog"] = ""
860 changelog_file = utils.open_file(changelog_filename)
861 for line in changelog_file.readlines():
862 m = re_changelog_versions.match(line)
864 dsc["bts changelog"] += line
865 changelog_file.close()
867 # Check we found at least one revision in the changelog
868 if not dsc["bts changelog"]:
869 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
871 ########################################
875 # a) there's no source
876 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
877 # or c) the orig.tar.gz is MIA
878 if not changes["architecture"].has_key("source") or reprocess == 2 \
879 or pkg.orig_tar_gz == -1:
882 tmpdir = utils.temp_dirname()
884 # Move into the temporary directory
888 # Get the changelog version history
889 get_changelog_versions(cwd)
891 # Move back and cleanup the temporary tree
894 shutil.rmtree(tmpdir)
896 if errno.errorcode[e.errno] != 'EACCES':
897 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
899 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
900 # We probably have u-r or u-w directories so chmod everything
902 cmd = "chmod -R u+rwx %s" % (tmpdir)
903 result = os.system(cmd)
905 utils.fubar("'%s' failed with result %s." % (cmd, result))
906 shutil.rmtree(tmpdir)
908 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
910 ################################################################################
912 # FIXME: should be a debian specific check called from a hook
914 def check_urgency ():
915 if changes["architecture"].has_key("source"):
916 if not changes.has_key("urgency"):
917 changes["urgency"] = Cnf["Urgency::Default"]
918 changes["urgency"] = changes["urgency"].lower()
919 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
920 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
921 changes["urgency"] = Cnf["Urgency::Default"]
923 ################################################################################
926 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
927 utils.check_size(".changes", files)
928 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
929 utils.check_size(".dsc", dsc_files)
931 # This is stupid API, but it'll have to do for now until
932 # we actually have proper abstraction
933 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
936 ################################################################################
938 # Sanity check the time stamps of files inside debs.
939 # [Files in the near future cause ugly warnings and extreme time
940 # travel can cause errors on extraction]
942 def check_timestamps():
944 def __init__(self, future_cutoff, past_cutoff):
946 self.future_cutoff = future_cutoff
947 self.past_cutoff = past_cutoff
950 self.future_files = {}
951 self.ancient_files = {}
953 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
954 if MTime > self.future_cutoff:
955 self.future_files[Name] = MTime
956 if MTime < self.past_cutoff:
957 self.ancient_files[Name] = MTime
960 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
961 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
962 tar = Tar(future_cutoff, past_cutoff)
963 for filename in files.keys():
964 if files[filename]["type"] == "deb":
967 deb_file = utils.open_file(filename)
968 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
971 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
972 except SystemError, e:
973 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
974 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
977 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
980 future_files = tar.future_files.keys()
982 num_future_files = len(future_files)
983 future_file = future_files[0]
984 future_date = tar.future_files[future_file]
985 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
986 % (filename, num_future_files, future_file,
987 time.ctime(future_date)))
989 ancient_files = tar.ancient_files.keys()
991 num_ancient_files = len(ancient_files)
992 ancient_file = ancient_files[0]
993 ancient_date = tar.ancient_files[ancient_file]
994 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
995 % (filename, num_ancient_files, ancient_file,
996 time.ctime(ancient_date)))
998 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1000 ################################################################################
1002 def lookup_uid_from_fingerprint(fpr):
1004 Return the uid,name,isdm for a given gpg fingerprint
1007 @param fpr: a 40 byte GPG fingerprint
1009 @return (uid, name, isdm)
1011 cursor = DBConn().cursor()
1012 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
1013 qs = cursor.fetchone()
1017 return (None, None, False)
1019 def check_signed_by_key():
1020 """Ensure the .changes is signed by an authorized uploader."""
1022 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1023 if uid_name == None:
1026 # match claimed name with actual name:
1028 # This is fundamentally broken but need us to refactor how we get
1029 # the UIDs/Fingerprints in order for us to fix it properly
1030 uid, uid_email = changes["fingerprint"], uid
1031 may_nmu, may_sponsor = 1, 1
1032 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1033 # and can't get one in there if we don't allow nmu/sponsorship
1034 elif is_dm is False:
1035 # If is_dm is False, we allow full upload rights
1036 uid_email = "%s@debian.org" % (uid)
1037 may_nmu, may_sponsor = 1, 1
1039 # Assume limited upload rights unless we've discovered otherwise
1041 may_nmu, may_sponsor = 0, 0
1044 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1046 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1048 if uid_name == "": sponsored = 1
1051 if ("source" in changes["architecture"] and
1052 uid_email and utils.is_email_alias(uid_email)):
1053 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1054 if (changes["maintaineremail"] not in sponsor_addresses and
1055 changes["changedbyemail"] not in sponsor_addresses):
1056 changes["sponsoremail"] = uid_email
1058 if sponsored and not may_sponsor:
1059 reject("%s is not authorised to sponsor uploads" % (uid))
1061 if not sponsored and not may_nmu:
1063 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1065 highest_sid, highest_version = None, None
1067 should_reject = True
1069 si = cursor.fetchone()
1073 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1075 highest_version = si[1]
1077 if highest_sid == None:
1078 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1081 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1084 m = cursor.fetchone()
1088 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1089 if email == uid_email or name == uid_name:
1093 if should_reject == True:
1094 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1096 for b in changes["binary"].keys():
1097 for suite in changes["distribution"].keys():
1098 suite_id = DBConn().get_suite_id(suite)
1100 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1102 s = cursor.fetchone()
1106 if s[0] != changes["source"]:
1107 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1109 for f in files.keys():
1110 if files[f].has_key("byhand"):
1111 reject("%s may not upload BYHAND file %s" % (uid, f))
1112 if files[f].has_key("new"):
1113 reject("%s may not upload NEW file %s" % (uid, f))
1116 ################################################################################
1117 ################################################################################
1119 # If any file of an upload has a recent mtime then chances are good
1120 # the file is still being uploaded.
1122 def upload_too_new():
1124 # Move back to the original directory to get accurate time stamps
1126 os.chdir(pkg.directory)
1127 file_list = pkg.files.keys()
1128 file_list.extend(pkg.dsc_files.keys())
1129 file_list.append(pkg.changes_file)
1132 last_modified = time.time()-os.path.getmtime(f)
1133 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1141 ################################################################################
1144 # changes["distribution"] may not exist in corner cases
1145 # (e.g. unreadable changes files)
1146 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1147 changes["distribution"] = {}
1149 (summary, short_summary) = Upload.build_summaries()
1151 # q-unapproved hax0ring
1153 "New": { "is": is_new, "process": acknowledge_new },
1154 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1155 "Byhand" : { "is": is_byhand, "process": do_byhand },
1156 "OldStableUpdate" : { "is": is_oldstableupdate,
1157 "process": do_oldstableupdate },
1158 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1159 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1160 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1162 queues = [ "New", "Autobyhand", "Byhand" ]
1163 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1164 queues += [ "Unembargo", "Embargo" ]
1166 queues += [ "OldStableUpdate", "StableUpdate" ]
1168 (prompt, answer) = ("", "XXX")
1169 if Options["No-Action"] or Options["Automatic"]:
1174 if reject_message.find("Rejected") != -1:
1175 if upload_too_new():
1176 print "SKIP (too new)\n" + reject_message,
1177 prompt = "[S]kip, Quit ?"
1179 print "REJECT\n" + reject_message,
1180 prompt = "[R]eject, Skip, Quit ?"
1181 if Options["Automatic"]:
1186 if queue_info[q]["is"]():
1190 print "%s for %s\n%s%s" % (
1191 qu.upper(), ", ".join(changes["distribution"].keys()),
1192 reject_message, summary),
1193 queuekey = qu[0].upper()
1194 if queuekey in "RQSA":
1196 prompt = "[D]ivert, Skip, Quit ?"
1198 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1199 if Options["Automatic"]:
1202 print "ACCEPT\n" + reject_message + summary,
1203 prompt = "[A]ccept, Skip, Quit ?"
1204 if Options["Automatic"]:
1207 while prompt.find(answer) == -1:
1208 answer = utils.our_raw_input(prompt)
1209 m = re_default_answer.match(prompt)
1212 answer = answer[:1].upper()
1215 os.chdir (pkg.directory)
1216 Upload.do_reject(0, reject_message)
1218 accept(summary, short_summary)
1219 remove_from_unchecked()
1220 elif answer == queuekey:
1221 queue_info[qu]["process"](summary, short_summary)
1222 remove_from_unchecked()
1226 def remove_from_unchecked():
1227 os.chdir (pkg.directory)
1228 for f in files.keys():
1230 os.unlink(pkg.changes_file)
1232 ################################################################################
1234 def accept (summary, short_summary):
1235 Upload.accept(summary, short_summary)
1236 Upload.check_override()
1238 ################################################################################
1240 def move_to_dir (dest, perms=0660, changesperms=0664):
1241 utils.move (pkg.changes_file, dest, perms=changesperms)
1242 file_keys = files.keys()
1244 utils.move (f, dest, perms=perms)
1246 ################################################################################
1248 def is_unembargo ():
1249 cursor = DBConn().cursor()
1250 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1251 if cursor.fetchone():
1254 oldcwd = os.getcwd()
1255 os.chdir(Cnf["Dir::Queue::Disembargo"])
1256 disdir = os.getcwd()
1259 if pkg.directory == disdir:
1260 if changes["architecture"].has_key("source"):
1261 if Options["No-Action"]: return 1
1263 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1265 cursor.execute( "COMMIT" )
1270 def queue_unembargo (summary, short_summary):
1271 print "Moving to UNEMBARGOED holding area."
1272 Logger.log(["Moving to unembargoed", pkg.changes_file])
1274 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1275 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1276 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1278 # Check for override disparities
1279 Upload.Subst["__SUMMARY__"] = summary
1280 Upload.check_override()
1282 # Send accept mail, announce to lists, close bugs and check for
1283 # override disparities
1284 if not Cnf["Dinstall::Options::No-Mail"]:
1285 Upload.Subst["__SUITE__"] = ""
1286 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1287 utils.send_mail(mail_message)
1288 Upload.announce(short_summary, 1)
1290 ################################################################################
1293 # if embargoed queues are enabled always embargo
1296 def queue_embargo (summary, short_summary):
1297 print "Moving to EMBARGOED holding area."
1298 Logger.log(["Moving to embargoed", pkg.changes_file])
1300 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1301 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1302 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1304 # Check for override disparities
1305 Upload.Subst["__SUMMARY__"] = summary
1306 Upload.check_override()
1308 # Send accept mail, announce to lists, close bugs and check for
1309 # override disparities
1310 if not Cnf["Dinstall::Options::No-Mail"]:
1311 Upload.Subst["__SUITE__"] = ""
1312 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1313 utils.send_mail(mail_message)
1314 Upload.announce(short_summary, 1)
1316 ################################################################################
1318 def is_stableupdate ():
1319 if not changes["distribution"].has_key("proposed-updates"):
1322 if not changes["architecture"].has_key("source"):
1323 pusuite = DBConn().get_suite_id("proposed-updates")
1324 cursor = DBConn().cursor()
1325 cursor.execute( """SELECT 1 FROM source s
1326 JOIN src_associations sa ON (s.id = sa.source)
1327 WHERE s.source = %(source)s
1328 AND s.version = %(version)s
1329 AND sa.suite = %(suite)s""",
1330 {'source' : changes['source'],
1331 'version' : changes['version'],
1334 if cursor.fetchone():
1335 # source is already in proposed-updates so no need to hold
1340 def do_stableupdate (summary, short_summary):
1341 print "Moving to PROPOSED-UPDATES holding area."
1342 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1344 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1345 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1347 # Check for override disparities
1348 Upload.Subst["__SUMMARY__"] = summary
1349 Upload.check_override()
1351 ################################################################################
1353 def is_oldstableupdate ():
1354 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1357 if not changes["architecture"].has_key("source"):
1358 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1359 cursor = DBConn().cursor()
1360 cursor.execute( """SELECT 1 FROM source s
1361 JOIN src_associations sa ON (s.id = sa.source)
1362 WHERE s.source = %(source)s
1363 AND s.version = %(version)s
1364 AND sa.suite = %(suite)s""",
1365 {'source' : changes['source'],
1366 'version' : changes['version'],
1368 if cursor.fetchone():
1373 def do_oldstableupdate (summary, short_summary):
1374 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1375 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1377 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1378 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1380 # Check for override disparities
1381 Upload.Subst["__SUMMARY__"] = summary
1382 Upload.check_override()
1384 ################################################################################
1386 def is_autobyhand ():
1389 for f in files.keys():
1390 if files[f].has_key("byhand"):
1393 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1394 # don't contain underscores, and ARCH doesn't contain dots.
1395 # further VER matches the .changes Version:, and ARCH should be in
1396 # the .changes Architecture: list.
1397 if f.count("_") < 2:
1401 (pckg, ver, archext) = f.split("_", 2)
1402 if archext.count(".") < 1 or changes["version"] != ver:
1406 ABH = Cnf.SubTree("AutomaticByHandPackages")
1407 if not ABH.has_key(pckg) or \
1408 ABH["%s::Source" % (pckg)] != changes["source"]:
1409 print "not match %s %s" % (pckg, changes["source"])
1413 (arch, ext) = archext.split(".", 1)
1414 if arch not in changes["architecture"]:
1418 files[f]["byhand-arch"] = arch
1419 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1421 return any_auto and all_auto
1423 def do_autobyhand (summary, short_summary):
1424 print "Attempting AUTOBYHAND."
1426 for f in files.keys():
1428 if not files[f].has_key("byhand"):
1430 if not files[f].has_key("byhand-script"):
1434 os.system("ls -l %s" % byhandfile)
1435 result = os.system("%s %s %s %s %s" % (
1436 files[f]["byhand-script"], byhandfile,
1437 changes["version"], files[f]["byhand-arch"],
1438 os.path.abspath(pkg.changes_file)))
1440 os.unlink(byhandfile)
1443 print "Error processing %s, left as byhand." % (f)
1447 do_byhand(summary, short_summary)
1449 accept(summary, short_summary)
1451 ################################################################################
1454 for f in files.keys():
1455 if files[f].has_key("byhand"):
1459 def do_byhand (summary, short_summary):
1460 print "Moving to BYHAND holding area."
1461 Logger.log(["Moving to byhand", pkg.changes_file])
1463 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1464 move_to_dir(Cnf["Dir::Queue::Byhand"])
1466 # Check for override disparities
1467 Upload.Subst["__SUMMARY__"] = summary
1468 Upload.check_override()
1470 ################################################################################
1473 for f in files.keys():
1474 if files[f].has_key("new"):
1478 def acknowledge_new (summary, short_summary):
1479 Subst = Upload.Subst
1481 print "Moving to NEW holding area."
1482 Logger.log(["Moving to new", pkg.changes_file])
1484 Upload.dump_vars(Cnf["Dir::Queue::New"])
1485 move_to_dir(Cnf["Dir::Queue::New"])
1487 if not Options["No-Mail"]:
1488 print "Sending new ack."
1489 Subst["__SUMMARY__"] = summary
1490 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1491 utils.send_mail(new_ack_message)
1493 ################################################################################
1495 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1496 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1497 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1498 # not have processed it during it's checks of -2. If -1 has been
1499 # deleted or otherwise not checked by 'dak process-unchecked', the
1500 # .orig.tar.gz will not have been checked at all. To get round this,
1501 # we force the .orig.tar.gz into the .changes structure and reprocess
1502 # the .changes file.
1504 def process_it (changes_file):
1505 global reprocess, reject_message
1507 # Reset some globals
1510 # Some defaults in case we can't fully process the .changes file
1511 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1512 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1515 # Absolutize the filename to avoid the requirement of being in the
1516 # same directory as the .changes file.
1517 pkg.changes_file = os.path.abspath(changes_file)
1519 # Remember where we are so we can come back after cd-ing into the
1520 # holding directory.
1521 pkg.directory = os.getcwd()
1524 # If this is the Real Thing(tm), copy things into a private
1525 # holding directory first to avoid replacable file races.
1526 if not Options["No-Action"]:
1527 os.chdir(Cnf["Dir::Queue::Holding"])
1528 copy_to_holding(pkg.changes_file)
1529 # Relativize the filename so we use the copy in holding
1530 # rather than the original...
1531 pkg.changes_file = os.path.basename(pkg.changes_file)
1532 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1533 if changes["fingerprint"]:
1534 valid_changes_p = check_changes()
1539 check_distributions()
1541 valid_dsc_p = check_dsc()
1547 check_signed_by_key()
1548 Upload.update_subst(reject_message)
1554 traceback.print_exc(file=sys.stderr)
1557 # Restore previous WD
1558 os.chdir(pkg.directory)
1560 ###############################################################################
1563 global Cnf, Options, Logger
1565 changes_files = init()
1567 # -n/--dry-run invalidates some other options which would involve things happening
1568 if Options["No-Action"]:
1569 Options["Automatic"] = ""
1571 # Ensure all the arguments we were given are .changes files
1572 for f in changes_files:
1573 if not f.endswith(".changes"):
1574 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1575 changes_files.remove(f)
1577 if changes_files == []:
1578 utils.fubar("Need at least one .changes file as an argument.")
1580 # Check that we aren't going to clash with the daily cron job
1582 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1583 utils.fubar("Archive maintenance in progress. Try again later.")
1585 # Obtain lock if not in no-action mode and initialize the log
1587 if not Options["No-Action"]:
1588 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1590 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1592 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1593 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1596 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1598 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1599 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1600 if Cnf.has_key("Dinstall::Bcc"):
1601 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1603 Upload.Subst["__BCC__"] = bcc
1606 # Sort the .changes files so that we process sourceful ones first
1607 changes_files.sort(utils.changes_compare)
1609 # Process the changes files
1610 for changes_file in changes_files:
1611 print "\n" + changes_file
1613 process_it (changes_file)
1615 if not Options["No-Action"]:
1618 accept_count = Upload.accept_count
1619 accept_bytes = Upload.accept_bytes
1622 if accept_count > 1:
1624 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1625 Logger.log(["total",accept_count,accept_bytes])
1627 if not Options["No-Action"]:
1630 ################################################################################
1632 if __name__ == '__main__':