2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_taint_free, re_gpg_uid, re_re_mark, \
49 re_whitespace_comment, re_issource
51 from srcformats import srcformats
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
67 # all situations under lenny's Python.
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output = "".join(pipe.stdout.readlines())
80 commands.getstatusoutput = dak_getstatusoutput
82 ################################################################################
85 """ Escape html chars """
86 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
88 ################################################################################
90 def open_file(filename, mode='r'):
92 Open C{file}, return fileobject.
94 @type filename: string
95 @param filename: path/filename to open
98 @param mode: open mode
101 @return: open fileobject
103 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
107 f = open(filename, mode)
109 raise CantOpenError, filename
112 ################################################################################
114 def our_raw_input(prompt=""):
116 sys.stdout.write(prompt)
122 sys.stderr.write("\nUser interrupt (^D).\n")
125 ################################################################################
127 def extract_component_from_section(section):
130 if section.find('/') != -1:
131 component = section.split('/')[0]
133 # Expand default component
135 if Cnf.has_key("Component::%s" % section):
140 return (section, component)
142 ################################################################################
144 def parse_deb822(contents, signing_rules=0):
148 # Split the lines in the input, keeping the linebreaks.
149 lines = contents.splitlines(True)
152 raise ParseChangesError, "[Empty changes file]"
154 # Reindex by line number so we can easily verify the format of
160 indexed_lines[index] = line[:-1]
164 num_of_lines = len(indexed_lines.keys())
167 while index < num_of_lines:
169 line = indexed_lines[index]
171 if signing_rules == 1:
173 if index > num_of_lines:
174 raise InvalidDscError, index
175 line = indexed_lines[index]
176 if not line.startswith("-----BEGIN PGP SIGNATURE"):
177 raise InvalidDscError, index
182 if line.startswith("-----BEGIN PGP SIGNATURE"):
184 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
186 if signing_rules == 1:
187 while index < num_of_lines and line != "":
189 line = indexed_lines[index]
191 # If we're not inside the signed data, don't process anything
192 if signing_rules >= 0 and not inside_signature:
194 slf = re_single_line_field.match(line)
196 field = slf.groups()[0].lower()
197 changes[field] = slf.groups()[1]
201 changes[field] += '\n'
203 mlf = re_multi_line_field.match(line)
206 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
207 if first == 1 and changes[field] != "":
208 changes[field] += '\n'
210 changes[field] += mlf.groups()[0] + '\n'
214 if signing_rules == 1 and inside_signature:
215 raise InvalidDscError, index
217 changes["filecontents"] = "".join(lines)
219 if changes.has_key("source"):
220 # Strip the source version in brackets from the source field,
221 # put it in the "source-version" field instead.
222 srcver = re_srchasver.search(changes["source"])
224 changes["source"] = srcver.group(1)
225 changes["source-version"] = srcver.group(2)
228 raise ParseChangesError, error
232 ################################################################################
234 def parse_changes(filename, signing_rules=0):
236 Parses a changes file and returns a dictionary where each field is a
237 key. The mandatory first argument is the filename of the .changes
240 signing_rules is an optional argument:
242 - If signing_rules == -1, no signature is required.
243 - If signing_rules == 0 (the default), a signature is required.
244 - If signing_rules == 1, it turns on the same strict format checking
247 The rules for (signing_rules == 1)-mode are:
249 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
250 followed by any PGP header data and must end with a blank line.
252 - The data section must end with a blank line and must be followed by
253 "-----BEGIN PGP SIGNATURE-----".
256 changes_in = open_file(filename)
257 content = changes_in.read()
260 unicode(content, 'utf-8')
262 raise ChangesUnicodeError, "Changes file not proper utf-8"
263 return parse_deb822(content, signing_rules)
265 ################################################################################
267 def hash_key(hashname):
268 return '%ssum' % hashname
270 ################################################################################
272 def create_hash(where, files, hashname, hashfunc):
274 create_hash extends the passed files dict with the given hash by
275 iterating over all files on disk and passing them to the hashing
280 for f in files.keys():
282 file_handle = open_file(f)
283 except CantOpenError:
284 rejmsg.append("Could not open file %s for checksumming" % (f))
287 files[f][hash_key(hashname)] = hashfunc(file_handle)
292 ################################################################################
294 def check_hash(where, files, hashname, hashfunc):
296 check_hash checks the given hash in the files dict against the actual
297 files on disk. The hash values need to be present consistently in
298 all file entries. It does not modify its input in any way.
302 for f in files.keys():
306 file_handle = open_file(f)
308 # Check for the hash entry, to not trigger a KeyError.
309 if not files[f].has_key(hash_key(hashname)):
310 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
314 # Actually check the hash for correctness.
315 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
316 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
318 except CantOpenError:
319 # TODO: This happens when the file is in the pool.
320 # warn("Cannot open file %s" % f)
327 ################################################################################
329 def check_size(where, files):
331 check_size checks the file sizes in the passed files dict against the
336 for f in files.keys():
341 # TODO: This happens when the file is in the pool.
345 actual_size = entry[stat.ST_SIZE]
346 size = int(files[f]["size"])
347 if size != actual_size:
348 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
349 % (f, actual_size, size, where))
352 ################################################################################
354 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
356 Verify that the files listed in the Files field of the .dsc are
357 those expected given the announced Format.
359 @type dsc_filename: string
360 @param dsc_filename: path of .dsc file
363 @param dsc: the content of the .dsc parsed by C{parse_changes()}
365 @type dsc_files: dict
366 @param dsc_files: the file list returned by C{build_file_list()}
369 @return: all errors detected
373 # Parse the file if needed
375 dsc = parse_changes(dsc_filename, signing_rules=1);
377 if dsc_files is None:
378 dsc_files = build_file_list(dsc, is_a_dsc=1)
380 # Ensure .dsc lists proper set of source files according to the format
382 has = defaultdict(lambda: 0)
385 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
386 (r'diff.gz', ('debian_diff',)),
387 (r'tar.gz', ('native_tar_gz', 'native_tar')),
388 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
389 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
390 (r'tar\.(gz|bz2)', ('native_tar',)),
391 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
394 for f in dsc_files.keys():
395 m = re_issource.match(f)
397 rejmsg.append("%s: %s in Files field not recognised as source."
401 # Populate 'has' dictionary by resolving keys in lookup table
403 for regex, keys in ftype_lookup:
404 if re.match(regex, m.group(3)):
410 # File does not match anything in lookup table; reject
412 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
414 # Check for multiple files
415 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
416 if has[file_type] > 1:
417 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
419 # Source format specific tests
420 for format in srcformats:
421 if format.re_format.match(dsc['format']):
423 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
429 ################################################################################
431 def check_hash_fields(what, manifest):
433 check_hash_fields ensures that there are no checksum fields in the
434 given dict that we do not know about.
438 hashes = map(lambda x: x[0], known_hashes)
439 for field in manifest:
440 if field.startswith("checksums-"):
441 hashname = field.split("-",1)[1]
442 if hashname not in hashes:
443 rejmsg.append("Unsupported checksum field for %s "\
444 "in %s" % (hashname, what))
447 ################################################################################
449 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
450 if format >= version:
451 # The version should contain the specified hash.
454 # Import hashes from the changes
455 rejmsg = parse_checksums(".changes", files, changes, hashname)
459 # We need to calculate the hash because it can't possibly
462 return func(".changes", files, hashname, hashfunc)
464 # We could add the orig which might be in the pool to the files dict to
465 # access the checksums easily.
467 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
469 ensure_dsc_hashes' task is to ensure that each and every *present* hash
470 in the dsc is correct, i.e. identical to the changes file and if necessary
471 the pool. The latter task is delegated to check_hash.
475 if not dsc.has_key('Checksums-%s' % (hashname,)):
477 # Import hashes from the dsc
478 parse_checksums(".dsc", dsc_files, dsc, hashname)
480 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
483 ################################################################################
485 def parse_checksums(where, files, manifest, hashname):
487 field = 'checksums-%s' % hashname
488 if not field in manifest:
490 for line in manifest[field].split('\n'):
493 clist = line.strip().split(' ')
495 checksum, size, checkfile = clist
497 rejmsg.append("Cannot parse checksum line [%s]" % (line))
499 if not files.has_key(checkfile):
500 # TODO: check for the file's entry in the original files dict, not
501 # the one modified by (auto)byhand and other weird stuff
502 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
503 # (file, hashname, where))
505 if not files[checkfile]["size"] == size:
506 rejmsg.append("%s: size differs for files and checksums-%s entry "\
507 "in %s" % (checkfile, hashname, where))
509 files[checkfile][hash_key(hashname)] = checksum
510 for f in files.keys():
511 if not files[f].has_key(hash_key(hashname)):
512 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
516 ################################################################################
518 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
520 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
523 # Make sure we have a Files: field to parse...
524 if not changes.has_key(field):
525 raise NoFilesFieldError
527 # Make sure we recognise the format of the Files: field
528 format = re_verwithext.search(changes.get("format", "0.0"))
530 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
532 format = format.groups()
533 if format[1] == None:
534 format = int(float(format[0])), 0, format[2]
536 format = int(format[0]), int(format[1]), format[2]
537 if format[2] == None:
541 # format = (0,0) are missing format headers of which we still
542 # have some in the archive.
543 if format != (1,0) and format != (0,0) and \
544 format != (3,0,"quilt") and format != (3,0,"native"):
545 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
547 if (format < (1,5) or format > (1,8)):
548 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
549 if field != "files" and format < (1,8):
550 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
552 includes_section = (not is_a_dsc) and field == "files"
554 # Parse each entry/line:
555 for i in changes[field].split('\n'):
559 section = priority = ""
562 (md5, size, section, priority, name) = s
564 (md5, size, name) = s
566 raise ParseChangesError, i
573 (section, component) = extract_component_from_section(section)
575 files[name] = Dict(size=size, section=section,
576 priority=priority, component=component)
577 files[name][hashname] = md5
581 ################################################################################
583 def send_mail (message, filename=""):
584 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
586 # If we've been passed a string dump it into a temporary file
588 (fd, filename) = tempfile.mkstemp()
589 os.write (fd, message)
592 if Cnf.has_key("Dinstall::MailWhiteList") and \
593 Cnf["Dinstall::MailWhiteList"] != "":
594 message_in = open_file(filename)
595 message_raw = modemail.message_from_file(message_in)
599 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
601 for line in whitelist_in:
602 if not re_whitespace_comment.match(line):
603 if re_re_mark.match(line):
604 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
606 whitelist.append(re.compile(re.escape(line.strip())))
611 fields = ["To", "Bcc", "Cc"]
614 value = message_raw.get(field, None)
617 for item in value.split(","):
618 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
624 if not mail_whitelisted:
625 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
629 # Doesn't have any mail in whitelist so remove the header
631 del message_raw[field]
633 message_raw.replace_header(field, string.join(match, ", "))
635 # Change message fields in order if we don't have a To header
636 if not message_raw.has_key("To"):
639 if message_raw.has_key(field):
640 message_raw[fields[-1]] = message_raw[field]
641 del message_raw[field]
644 # Clean up any temporary files
645 # and return, as we removed all recipients.
647 os.unlink (filename);
650 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
651 os.write (fd, message_raw.as_string(True));
655 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
657 raise SendmailFailedError, output
659 # Clean up any temporary files
663 ################################################################################
665 def poolify (source, component):
668 if source[:3] == "lib":
669 return component + source[:4] + '/' + source + '/'
671 return component + source[:1] + '/' + source + '/'
673 ################################################################################
675 def move (src, dest, overwrite = 0, perms = 0664):
676 if os.path.exists(dest) and os.path.isdir(dest):
679 dest_dir = os.path.dirname(dest)
680 if not os.path.exists(dest_dir):
681 umask = os.umask(00000)
682 os.makedirs(dest_dir, 02775)
684 #print "Moving %s to %s..." % (src, dest)
685 if os.path.exists(dest) and os.path.isdir(dest):
686 dest += '/' + os.path.basename(src)
687 # Don't overwrite unless forced to
688 if os.path.exists(dest):
690 fubar("Can't move %s to %s - file already exists." % (src, dest))
692 if not os.access(dest, os.W_OK):
693 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
694 shutil.copy2(src, dest)
695 os.chmod(dest, perms)
698 def copy (src, dest, overwrite = 0, perms = 0664):
699 if os.path.exists(dest) and os.path.isdir(dest):
702 dest_dir = os.path.dirname(dest)
703 if not os.path.exists(dest_dir):
704 umask = os.umask(00000)
705 os.makedirs(dest_dir, 02775)
707 #print "Copying %s to %s..." % (src, dest)
708 if os.path.exists(dest) and os.path.isdir(dest):
709 dest += '/' + os.path.basename(src)
710 # Don't overwrite unless forced to
711 if os.path.exists(dest):
713 raise FileExistsError
715 if not os.access(dest, os.W_OK):
716 raise CantOverwriteError
717 shutil.copy2(src, dest)
718 os.chmod(dest, perms)
720 ################################################################################
723 res = socket.gethostbyaddr(socket.gethostname())
724 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
725 if database_hostname:
726 return database_hostname
730 def which_conf_file ():
731 res = socket.gethostbyaddr(socket.gethostname())
732 # In case we allow local config files per user, try if one exists
733 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
734 homedir = os.getenv("HOME")
735 confpath = os.path.join(homedir, "/etc/dak.conf")
736 if os.path.exists(confpath):
737 apt_pkg.ReadConfigFileISC(Cnf,default_config)
739 # We are still in here, so there is no local config file or we do
740 # not allow local files. Do the normal stuff.
741 if Cnf.get("Config::" + res[0] + "::DakConfig"):
742 return Cnf["Config::" + res[0] + "::DakConfig"]
744 return default_config
746 def which_apt_conf_file ():
747 res = socket.gethostbyaddr(socket.gethostname())
748 # In case we allow local config files per user, try if one exists
749 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
750 homedir = os.getenv("HOME")
751 confpath = os.path.join(homedir, "/etc/dak.conf")
752 if os.path.exists(confpath):
753 apt_pkg.ReadConfigFileISC(Cnf,default_config)
755 if Cnf.get("Config::" + res[0] + "::AptConfig"):
756 return Cnf["Config::" + res[0] + "::AptConfig"]
758 return default_apt_config
760 def which_alias_file():
761 hostname = socket.gethostbyaddr(socket.gethostname())[0]
762 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
763 if os.path.exists(aliasfn):
768 ################################################################################
770 def TemplateSubst(map, filename):
771 """ Perform a substition of template """
772 templatefile = open_file(filename)
773 template = templatefile.read()
775 template = template.replace(x, str(map[x]))
779 ################################################################################
781 def fubar(msg, exit_code=1):
782 sys.stderr.write("E: %s\n" % (msg))
786 sys.stderr.write("W: %s\n" % (msg))
788 ################################################################################
790 # Returns the user name with a laughable attempt at rfc822 conformancy
791 # (read: removing stray periods).
793 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
796 return pwd.getpwuid(os.getuid())[0]
798 ################################################################################
808 return ("%d%s" % (c, t))
810 ################################################################################
812 def cc_fix_changes (changes):
813 o = changes.get("architecture", "")
815 del changes["architecture"]
816 changes["architecture"] = {}
818 changes["architecture"][j] = 1
820 def changes_compare (a, b):
821 """ Sort by source name, source version, 'have source', and then by filename """
823 a_changes = parse_changes(a)
828 b_changes = parse_changes(b)
832 cc_fix_changes (a_changes)
833 cc_fix_changes (b_changes)
835 # Sort by source name
836 a_source = a_changes.get("source")
837 b_source = b_changes.get("source")
838 q = cmp (a_source, b_source)
842 # Sort by source version
843 a_version = a_changes.get("version", "0")
844 b_version = b_changes.get("version", "0")
845 q = apt_pkg.VersionCompare(a_version, b_version)
849 # Sort by 'have source'
850 a_has_source = a_changes["architecture"].get("source")
851 b_has_source = b_changes["architecture"].get("source")
852 if a_has_source and not b_has_source:
854 elif b_has_source and not a_has_source:
857 # Fall back to sort by filename
860 ################################################################################
862 def find_next_free (dest, too_many=100):
865 while os.path.exists(dest) and extra < too_many:
866 dest = orig_dest + '.' + repr(extra)
868 if extra >= too_many:
869 raise NoFreeFilenameError
872 ################################################################################
874 def result_join (original, sep = '\t'):
876 for i in xrange(len(original)):
877 if original[i] == None:
878 resultlist.append("")
880 resultlist.append(original[i])
881 return sep.join(resultlist)
883 ################################################################################
885 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
887 for line in str.split('\n'):
889 if line or include_blank_lines:
890 out += "%s%s\n" % (prefix, line)
891 # Strip trailing new line
896 ################################################################################
898 def validate_changes_file_arg(filename, require_changes=1):
900 'filename' is either a .changes or .dak file. If 'filename' is a
901 .dak file, it's changed to be the corresponding .changes file. The
902 function then checks if the .changes file a) exists and b) is
903 readable and returns the .changes filename if so. If there's a
904 problem, the next action depends on the option 'require_changes'
907 - If 'require_changes' == -1, errors are ignored and the .changes
908 filename is returned.
909 - If 'require_changes' == 0, a warning is given and 'None' is returned.
910 - If 'require_changes' == 1, a fatal error is raised.
915 orig_filename = filename
916 if filename.endswith(".dak"):
917 filename = filename[:-4]+".changes"
919 if not filename.endswith(".changes"):
920 error = "invalid file type; not a changes file"
922 if not os.access(filename,os.R_OK):
923 if os.path.exists(filename):
924 error = "permission denied"
926 error = "file not found"
929 if require_changes == 1:
930 fubar("%s: %s." % (orig_filename, error))
931 elif require_changes == 0:
932 warn("Skipping %s - %s" % (orig_filename, error))
934 else: # We only care about the .dak file
939 ################################################################################
942 return (arch != "source" and arch != "all")
944 ################################################################################
946 def join_with_commas_and(list):
947 if len(list) == 0: return "nothing"
948 if len(list) == 1: return list[0]
949 return ", ".join(list[:-1]) + " and " + list[-1]
951 ################################################################################
956 (pkg, version, constraint) = atom
958 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
961 pp_deps.append(pp_dep)
962 return " |".join(pp_deps)
964 ################################################################################
969 ################################################################################
971 def parse_args(Options):
972 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
973 # XXX: This should go away and everything which calls it be converted
974 # to use SQLA properly. For now, we'll just fix it not to use
975 # the old Pg interface though
976 session = DBConn().session()
980 for suitename in split_args(Options["Suite"]):
981 suite = get_suite(suitename, session=session)
982 if suite.suite_id is None:
983 warn("suite '%s' not recognised." % (suite.suite_name))
985 suite_ids_list.append(suite.suite_id)
987 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
989 fubar("No valid suite given.")
994 if Options["Component"]:
995 component_ids_list = []
996 for componentname in split_args(Options["Component"]):
997 component = get_component(componentname, session=session)
998 if component is None:
999 warn("component '%s' not recognised." % (componentname))
1001 component_ids_list.append(component.component_id)
1002 if component_ids_list:
1003 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1005 fubar("No valid component given.")
1009 # Process architecture
1010 con_architectures = ""
1012 if Options["Architecture"]:
1014 for archname in split_args(Options["Architecture"]):
1015 if archname == "source":
1018 arch = get_architecture(archname, session=session)
1020 warn("architecture '%s' not recognised." % (archname))
1022 arch_ids_list.append(arch.arch_id)
1024 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1026 if not check_source:
1027 fubar("No valid architecture given.")
1031 return (con_suites, con_architectures, con_components, check_source)
1033 ################################################################################
1035 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1036 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1039 tb = sys.exc_info()[2]
1046 frame = frame.f_back
1048 traceback.print_exc()
1050 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1051 frame.f_code.co_filename,
1053 for key, value in frame.f_locals.items():
1054 print "\t%20s = " % key,
1058 print "<unable to print>"
1060 ################################################################################
1062 def try_with_debug(function):
1070 ################################################################################
1072 def arch_compare_sw (a, b):
1074 Function for use in sorting lists of architectures.
1076 Sorts normally except that 'source' dominates all others.
1079 if a == "source" and b == "source":
1088 ################################################################################
1090 def split_args (s, dwim=1):
1092 Split command line arguments which can be separated by either commas
1093 or whitespace. If dwim is set, it will complain about string ending
1094 in comma since this usually means someone did 'dak ls -a i386, m68k
1095 foo' or something and the inevitable confusion resulting from 'm68k'
1096 being treated as an argument is undesirable.
1099 if s.find(",") == -1:
1102 if s[-1:] == "," and dwim:
1103 fubar("split_args: found trailing comma, spurious space maybe?")
1106 ################################################################################
1108 def Dict(**dict): return dict
1110 ########################################
1112 def gpgv_get_status_output(cmd, status_read, status_write):
1114 Our very own version of commands.getouputstatus(), hacked to support
1118 cmd = ['/bin/sh', '-c', cmd]
1119 p2cread, p2cwrite = os.pipe()
1120 c2pread, c2pwrite = os.pipe()
1121 errout, errin = os.pipe()
1131 for i in range(3, 256):
1132 if i != status_write:
1138 os.execvp(cmd[0], cmd)
1144 os.dup2(c2pread, c2pwrite)
1145 os.dup2(errout, errin)
1147 output = status = ""
1149 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1152 r = os.read(fd, 8196)
1154 more_data.append(fd)
1155 if fd == c2pwrite or fd == errin:
1157 elif fd == status_read:
1160 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1162 pid, exit_status = os.waitpid(pid, 0)
1164 os.close(status_write)
1165 os.close(status_read)
1175 return output, status, exit_status
1177 ################################################################################
1179 def process_gpgv_output(status):
1180 # Process the status-fd output
1183 for line in status.split('\n'):
1187 split = line.split()
1189 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1191 (gnupg, keyword) = split[:2]
1192 if gnupg != "[GNUPG:]":
1193 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1196 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1197 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1200 keywords[keyword] = args
1202 return (keywords, internal_error)
1204 ################################################################################
1206 def retrieve_key (filename, keyserver=None, keyring=None):
1208 Retrieve the key that signed 'filename' from 'keyserver' and
1209 add it to 'keyring'. Returns nothing on success, or an error message
1213 # Defaults for keyserver and keyring
1215 keyserver = Cnf["Dinstall::KeyServer"]
1217 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1219 # Ensure the filename contains no shell meta-characters or other badness
1220 if not re_taint_free.match(filename):
1221 return "%s: tainted filename" % (filename)
1223 # Invoke gpgv on the file
1224 status_read, status_write = os.pipe()
1225 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1226 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1228 # Process the status-fd output
1229 (keywords, internal_error) = process_gpgv_output(status)
1231 return internal_error
1233 if not keywords.has_key("NO_PUBKEY"):
1234 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1236 fingerprint = keywords["NO_PUBKEY"][0]
1237 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1238 # it'll try to create a lockfile in /dev. A better solution might
1239 # be a tempfile or something.
1240 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1241 % (Cnf["Dinstall::SigningKeyring"])
1242 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1243 % (keyring, keyserver, fingerprint)
1244 (result, output) = commands.getstatusoutput(cmd)
1246 return "'%s' failed with exit code %s" % (cmd, result)
1250 ################################################################################
1252 def gpg_keyring_args(keyrings=None):
1254 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1256 return " ".join(["--keyring %s" % x for x in keyrings])
1258 ################################################################################
1260 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1262 Check the signature of a file and return the fingerprint if the
1263 signature is valid or 'None' if it's not. The first argument is the
1264 filename whose signature should be checked. The second argument is a
1265 reject function and is called when an error is found. The reject()
1266 function must allow for two arguments: the first is the error message,
1267 the second is an optional prefix string. It's possible for reject()
1268 to be called more than once during an invocation of check_signature().
1269 The third argument is optional and is the name of the files the
1270 detached signature applies to. The fourth argument is optional and is
1271 a *list* of keyrings to use. 'autofetch' can either be None, True or
1272 False. If None, the default behaviour specified in the config will be
1278 # Ensure the filename contains no shell meta-characters or other badness
1279 if not re_taint_free.match(sig_filename):
1280 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1281 return (None, rejects)
1283 if data_filename and not re_taint_free.match(data_filename):
1284 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1285 return (None, rejects)
1288 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1290 # Autofetch the signing key if that's enabled
1291 if autofetch == None:
1292 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1294 error_msg = retrieve_key(sig_filename)
1296 rejects.append(error_msg)
1297 return (None, rejects)
1299 # Build the command line
1300 status_read, status_write = os.pipe()
1301 cmd = "gpgv --status-fd %s %s %s %s" % (
1302 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1304 # Invoke gpgv on the file
1305 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1307 # Process the status-fd output
1308 (keywords, internal_error) = process_gpgv_output(status)
1310 # If we failed to parse the status-fd output, let's just whine and bail now
1312 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1313 rejects.append(internal_error, "")
1314 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1315 return (None, rejects)
1317 # Now check for obviously bad things in the processed output
1318 if keywords.has_key("KEYREVOKED"):
1319 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1320 if keywords.has_key("BADSIG"):
1321 rejects.append("bad signature on %s." % (sig_filename))
1322 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1323 rejects.append("failed to check signature on %s." % (sig_filename))
1324 if keywords.has_key("NO_PUBKEY"):
1325 args = keywords["NO_PUBKEY"]
1328 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1329 if keywords.has_key("BADARMOR"):
1330 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1331 if keywords.has_key("NODATA"):
1332 rejects.append("no signature found in %s." % (sig_filename))
1333 if keywords.has_key("EXPKEYSIG"):
1334 args = keywords["EXPKEYSIG"]
1337 rejects.append("Signature made by expired key 0x%s" % (key))
1338 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1339 args = keywords["KEYEXPIRED"]
1343 if timestamp.count("T") == 0:
1345 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1347 expiredate = "unknown (%s)" % (timestamp)
1349 expiredate = timestamp
1350 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1352 if len(rejects) > 0:
1353 return (None, rejects)
1355 # Next check gpgv exited with a zero return code
1357 rejects.append("gpgv failed while checking %s." % (sig_filename))
1359 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1361 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1362 return (None, rejects)
1364 # Sanity check the good stuff we expect
1365 if not keywords.has_key("VALIDSIG"):
1366 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1368 args = keywords["VALIDSIG"]
1370 rejects.append("internal error while checking signature on %s." % (sig_filename))
1372 fingerprint = args[0]
1373 if not keywords.has_key("GOODSIG"):
1374 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1375 if not keywords.has_key("SIG_ID"):
1376 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1378 # Finally ensure there's not something we don't recognise
1379 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1380 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1381 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1383 for keyword in keywords.keys():
1384 if not known_keywords.has_key(keyword):
1385 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1387 if len(rejects) > 0:
1388 return (None, rejects)
1390 return (fingerprint, [])
1392 ################################################################################
1394 def gpg_get_key_addresses(fingerprint):
1395 """retreive email addresses from gpg key uids for a given fingerprint"""
1396 addresses = key_uid_email_cache.get(fingerprint)
1397 if addresses != None:
1400 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1401 % (gpg_keyring_args(), fingerprint)
1402 (result, output) = commands.getstatusoutput(cmd)
1404 for l in output.split('\n'):
1405 m = re_gpg_uid.match(l)
1407 addresses.add(m.group(1))
1408 key_uid_email_cache[fingerprint] = addresses
1411 ################################################################################
1413 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1415 def wrap(paragraph, max_length, prefix=""):
1419 words = paragraph.split()
1422 word_size = len(word)
1423 if word_size > max_length:
1425 s += line + '\n' + prefix
1426 s += word + '\n' + prefix
1429 new_length = len(line) + word_size + 1
1430 if new_length > max_length:
1431 s += line + '\n' + prefix
1444 ################################################################################
1446 def clean_symlink (src, dest, root):
1448 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1451 src = src.replace(root, '', 1)
1452 dest = dest.replace(root, '', 1)
1453 dest = os.path.dirname(dest)
1454 new_src = '../' * len(dest.split('/'))
1455 return new_src + src
1457 ################################################################################
1459 def temp_filename(directory=None, prefix="dak", suffix=""):
1461 Return a secure and unique filename by pre-creating it.
1462 If 'directory' is non-null, it will be the directory the file is pre-created in.
1463 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1464 If 'suffix' is non-null, the filename will end with it.
1466 Returns a pair (fd, name).
1469 return tempfile.mkstemp(suffix, prefix, directory)
1471 ################################################################################
1473 def temp_dirname(parent=None, prefix="dak", suffix=""):
1475 Return a secure and unique directory by pre-creating it.
1476 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1477 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1478 If 'suffix' is non-null, the filename will end with it.
1480 Returns a pathname to the new directory
1483 return tempfile.mkdtemp(suffix, prefix, parent)
1485 ################################################################################
1487 def is_email_alias(email):
1488 """ checks if the user part of the email is listed in the alias file """
1490 if alias_cache == None:
1491 aliasfn = which_alias_file()
1494 for l in open(aliasfn):
1495 alias_cache.add(l.split(':')[0])
1496 uid = email.split('@')[0]
1497 return uid in alias_cache
1499 ################################################################################
1501 def get_changes_files(dir):
1503 Takes a directory and lists all .changes files in it (as well as chdir'ing
1504 to the directory; this is due to broken behaviour on the part of p-u/p-a
1505 when you're not in the right place)
1507 Returns a list of filenames
1510 # Much of the rest of p-u/p-a depends on being in the right place
1512 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1514 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1516 return changes_files
1518 ################################################################################
1522 Cnf = apt_pkg.newConfiguration()
1523 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1525 if which_conf_file() != default_config:
1526 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1528 ###############################################################################