2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite
43 from dak_exceptions import *
44 from textutils import fix_maintainer
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_taint_free, \
47 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
50 from formats import parse_format, validate_changes_format
51 from srcformats import get_format_from_string
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
67 # code in lenny's Python. This also affects commands.getoutput and
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output = pipe.stdout.read()
77 if output[-1:] == '\n':
85 commands.getstatusoutput = dak_getstatusoutput
87 ################################################################################
90 """ Escape html chars """
91 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
93 ################################################################################
95 def open_file(filename, mode='r'):
97 Open C{file}, return fileobject.
99 @type filename: string
100 @param filename: path/filename to open
103 @param mode: open mode
106 @return: open fileobject
108 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
112 f = open(filename, mode)
114 raise CantOpenError, filename
117 ################################################################################
119 def our_raw_input(prompt=""):
123 sys.stdout.write(prompt)
132 sys.stderr.write("\nUser interrupt (^D).\n")
135 ################################################################################
137 def extract_component_from_section(section):
140 if section.find('/') != -1:
141 component = section.split('/')[0]
143 # Expand default component
145 if Cnf.has_key("Component::%s" % section):
150 return (section, component)
152 ################################################################################
154 def parse_deb822(contents, signing_rules=0):
158 # Split the lines in the input, keeping the linebreaks.
159 lines = contents.splitlines(True)
162 raise ParseChangesError, "[Empty changes file]"
164 # Reindex by line number so we can easily verify the format of
170 indexed_lines[index] = line[:-1]
174 num_of_lines = len(indexed_lines.keys())
177 while index < num_of_lines:
179 line = indexed_lines[index]
181 if signing_rules == 1:
183 if index > num_of_lines:
184 raise InvalidDscError, index
185 line = indexed_lines[index]
186 if not line.startswith("-----BEGIN PGP SIGNATURE"):
187 raise InvalidDscError, index
192 if line.startswith("-----BEGIN PGP SIGNATURE"):
194 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
196 if signing_rules == 1:
197 while index < num_of_lines and line != "":
199 line = indexed_lines[index]
201 # If we're not inside the signed data, don't process anything
202 if signing_rules >= 0 and not inside_signature:
204 slf = re_single_line_field.match(line)
206 field = slf.groups()[0].lower()
207 changes[field] = slf.groups()[1]
211 changes[field] += '\n'
213 mlf = re_multi_line_field.match(line)
216 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
217 if first == 1 and changes[field] != "":
218 changes[field] += '\n'
220 changes[field] += mlf.groups()[0] + '\n'
224 if signing_rules == 1 and inside_signature:
225 raise InvalidDscError, index
227 changes["filecontents"] = "".join(lines)
229 if changes.has_key("source"):
230 # Strip the source version in brackets from the source field,
231 # put it in the "source-version" field instead.
232 srcver = re_srchasver.search(changes["source"])
234 changes["source"] = srcver.group(1)
235 changes["source-version"] = srcver.group(2)
238 raise ParseChangesError, error
242 ################################################################################
244 def parse_changes(filename, signing_rules=0, dsc_file=0):
246 Parses a changes file and returns a dictionary where each field is a
247 key. The mandatory first argument is the filename of the .changes
250 signing_rules is an optional argument:
252 - If signing_rules == -1, no signature is required.
253 - If signing_rules == 0 (the default), a signature is required.
254 - If signing_rules == 1, it turns on the same strict format checking
257 The rules for (signing_rules == 1)-mode are:
259 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
260 followed by any PGP header data and must end with a blank line.
262 - The data section must end with a blank line and must be followed by
263 "-----BEGIN PGP SIGNATURE-----".
266 changes_in = open_file(filename)
267 content = changes_in.read()
270 unicode(content, 'utf-8')
272 raise ChangesUnicodeError, "Changes file not proper utf-8"
273 changes = parse_deb822(content, signing_rules)
277 # Finally ensure that everything needed for .changes is there
278 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
279 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
282 for keyword in must_keywords:
283 if not changes.has_key(keyword.lower()):
284 missingfields.append(keyword)
286 if len(missingfields):
287 raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)
291 ################################################################################
293 def hash_key(hashname):
294 return '%ssum' % hashname
296 ################################################################################
298 def create_hash(where, files, hashname, hashfunc):
300 create_hash extends the passed files dict with the given hash by
301 iterating over all files on disk and passing them to the hashing
306 for f in files.keys():
308 file_handle = open_file(f)
309 except CantOpenError:
310 rejmsg.append("Could not open file %s for checksumming" % (f))
313 files[f][hash_key(hashname)] = hashfunc(file_handle)
318 ################################################################################
320 def check_hash(where, files, hashname, hashfunc):
322 check_hash checks the given hash in the files dict against the actual
323 files on disk. The hash values need to be present consistently in
324 all file entries. It does not modify its input in any way.
328 for f in files.keys():
332 file_handle = open_file(f)
334 # Check for the hash entry, to not trigger a KeyError.
335 if not files[f].has_key(hash_key(hashname)):
336 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
340 # Actually check the hash for correctness.
341 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
342 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
344 except CantOpenError:
345 # TODO: This happens when the file is in the pool.
346 # warn("Cannot open file %s" % f)
353 ################################################################################
355 def check_size(where, files):
357 check_size checks the file sizes in the passed files dict against the
362 for f in files.keys():
367 # TODO: This happens when the file is in the pool.
371 actual_size = entry[stat.ST_SIZE]
372 size = int(files[f]["size"])
373 if size != actual_size:
374 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
375 % (f, actual_size, size, where))
378 ################################################################################
380 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
382 Verify that the files listed in the Files field of the .dsc are
383 those expected given the announced Format.
385 @type dsc_filename: string
386 @param dsc_filename: path of .dsc file
389 @param dsc: the content of the .dsc parsed by C{parse_changes()}
391 @type dsc_files: dict
392 @param dsc_files: the file list returned by C{build_file_list()}
395 @return: all errors detected
399 # Parse the file if needed
401 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
403 if dsc_files is None:
404 dsc_files = build_file_list(dsc, is_a_dsc=1)
406 # Ensure .dsc lists proper set of source files according to the format
408 has = defaultdict(lambda: 0)
411 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
412 (r'diff.gz', ('debian_diff',)),
413 (r'tar.gz', ('native_tar_gz', 'native_tar')),
414 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
415 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
416 (r'tar\.(gz|bz2)', ('native_tar',)),
417 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
420 for f in dsc_files.keys():
421 m = re_issource.match(f)
423 rejmsg.append("%s: %s in Files field not recognised as source."
427 # Populate 'has' dictionary by resolving keys in lookup table
429 for regex, keys in ftype_lookup:
430 if re.match(regex, m.group(3)):
436 # File does not match anything in lookup table; reject
438 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
440 # Check for multiple files
441 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
442 if has[file_type] > 1:
443 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
445 # Source format specific tests
447 format = get_format_from_string(dsc['format'])
449 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
452 except UnknownFormatError:
453 # Not an error here for now
458 ################################################################################
460 def check_hash_fields(what, manifest):
462 check_hash_fields ensures that there are no checksum fields in the
463 given dict that we do not know about.
467 hashes = map(lambda x: x[0], known_hashes)
468 for field in manifest:
469 if field.startswith("checksums-"):
470 hashname = field.split("-",1)[1]
471 if hashname not in hashes:
472 rejmsg.append("Unsupported checksum field for %s "\
473 "in %s" % (hashname, what))
476 ################################################################################
478 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
479 if format >= version:
480 # The version should contain the specified hash.
483 # Import hashes from the changes
484 rejmsg = parse_checksums(".changes", files, changes, hashname)
488 # We need to calculate the hash because it can't possibly
491 return func(".changes", files, hashname, hashfunc)
493 # We could add the orig which might be in the pool to the files dict to
494 # access the checksums easily.
496 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
498 ensure_dsc_hashes' task is to ensure that each and every *present* hash
499 in the dsc is correct, i.e. identical to the changes file and if necessary
500 the pool. The latter task is delegated to check_hash.
504 if not dsc.has_key('Checksums-%s' % (hashname,)):
506 # Import hashes from the dsc
507 parse_checksums(".dsc", dsc_files, dsc, hashname)
509 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
512 ################################################################################
514 def parse_checksums(where, files, manifest, hashname):
516 field = 'checksums-%s' % hashname
517 if not field in manifest:
519 for line in manifest[field].split('\n'):
522 clist = line.strip().split(' ')
524 checksum, size, checkfile = clist
526 rejmsg.append("Cannot parse checksum line [%s]" % (line))
528 if not files.has_key(checkfile):
529 # TODO: check for the file's entry in the original files dict, not
530 # the one modified by (auto)byhand and other weird stuff
531 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
532 # (file, hashname, where))
534 if not files[checkfile]["size"] == size:
535 rejmsg.append("%s: size differs for files and checksums-%s entry "\
536 "in %s" % (checkfile, hashname, where))
538 files[checkfile][hash_key(hashname)] = checksum
539 for f in files.keys():
540 if not files[f].has_key(hash_key(hashname)):
541 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
545 ################################################################################
547 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
549 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
552 # Make sure we have a Files: field to parse...
553 if not changes.has_key(field):
554 raise NoFilesFieldError
556 # Validate .changes Format: field
558 validate_changes_format(parse_format(changes['format']), field)
560 includes_section = (not is_a_dsc) and field == "files"
562 # Parse each entry/line:
563 for i in changes[field].split('\n'):
567 section = priority = ""
570 (md5, size, section, priority, name) = s
572 (md5, size, name) = s
574 raise ParseChangesError, i
581 (section, component) = extract_component_from_section(section)
583 files[name] = dict(size=size, section=section,
584 priority=priority, component=component)
585 files[name][hashname] = md5
589 ################################################################################
591 def send_mail (message, filename=""):
592 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
594 # Check whether we're supposed to be sending mail
595 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
598 # If we've been passed a string dump it into a temporary file
600 (fd, filename) = tempfile.mkstemp()
601 os.write (fd, message)
604 if Cnf.has_key("Dinstall::MailWhiteList") and \
605 Cnf["Dinstall::MailWhiteList"] != "":
606 message_in = open_file(filename)
607 message_raw = modemail.message_from_file(message_in)
611 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
613 for line in whitelist_in:
614 if not re_whitespace_comment.match(line):
615 if re_re_mark.match(line):
616 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
618 whitelist.append(re.compile(re.escape(line.strip())))
623 fields = ["To", "Bcc", "Cc"]
626 value = message_raw.get(field, None)
629 for item in value.split(","):
630 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
636 if not mail_whitelisted:
637 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
641 # Doesn't have any mail in whitelist so remove the header
643 del message_raw[field]
645 message_raw.replace_header(field, ', '.join(match))
647 # Change message fields in order if we don't have a To header
648 if not message_raw.has_key("To"):
651 if message_raw.has_key(field):
652 message_raw[fields[-1]] = message_raw[field]
653 del message_raw[field]
656 # Clean up any temporary files
657 # and return, as we removed all recipients.
659 os.unlink (filename);
662 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
663 os.write (fd, message_raw.as_string(True));
667 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
669 raise SendmailFailedError, output
671 # Clean up any temporary files
675 ################################################################################
677 def poolify (source, component):
680 if source[:3] == "lib":
681 return component + source[:4] + '/' + source + '/'
683 return component + source[:1] + '/' + source + '/'
685 ################################################################################
687 def move (src, dest, overwrite = 0, perms = 0664):
688 if os.path.exists(dest) and os.path.isdir(dest):
691 dest_dir = os.path.dirname(dest)
692 if not os.path.exists(dest_dir):
693 umask = os.umask(00000)
694 os.makedirs(dest_dir, 02775)
696 #print "Moving %s to %s..." % (src, dest)
697 if os.path.exists(dest) and os.path.isdir(dest):
698 dest += '/' + os.path.basename(src)
699 # Don't overwrite unless forced to
700 if os.path.exists(dest):
702 fubar("Can't move %s to %s - file already exists." % (src, dest))
704 if not os.access(dest, os.W_OK):
705 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
706 shutil.copy2(src, dest)
707 os.chmod(dest, perms)
710 def copy (src, dest, overwrite = 0, perms = 0664):
711 if os.path.exists(dest) and os.path.isdir(dest):
714 dest_dir = os.path.dirname(dest)
715 if not os.path.exists(dest_dir):
716 umask = os.umask(00000)
717 os.makedirs(dest_dir, 02775)
719 #print "Copying %s to %s..." % (src, dest)
720 if os.path.exists(dest) and os.path.isdir(dest):
721 dest += '/' + os.path.basename(src)
722 # Don't overwrite unless forced to
723 if os.path.exists(dest):
725 raise FileExistsError
727 if not os.access(dest, os.W_OK):
728 raise CantOverwriteError
729 shutil.copy2(src, dest)
730 os.chmod(dest, perms)
732 ################################################################################
735 res = socket.getfqdn()
736 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
737 if database_hostname:
738 return database_hostname
742 def which_conf_file ():
743 if os.getenv('DAK_CONFIG'):
744 return os.getenv('DAK_CONFIG')
746 res = socket.getfqdn()
747 # In case we allow local config files per user, try if one exists
748 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
749 homedir = os.getenv("HOME")
750 confpath = os.path.join(homedir, "/etc/dak.conf")
751 if os.path.exists(confpath):
752 apt_pkg.ReadConfigFileISC(Cnf,default_config)
754 # We are still in here, so there is no local config file or we do
755 # not allow local files. Do the normal stuff.
756 if Cnf.get("Config::" + res + "::DakConfig"):
757 return Cnf["Config::" + res + "::DakConfig"]
759 return default_config
761 def which_apt_conf_file ():
762 res = socket.getfqdn()
763 # In case we allow local config files per user, try if one exists
764 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
765 homedir = os.getenv("HOME")
766 confpath = os.path.join(homedir, "/etc/dak.conf")
767 if os.path.exists(confpath):
768 apt_pkg.ReadConfigFileISC(Cnf,default_config)
770 if Cnf.get("Config::" + res + "::AptConfig"):
771 return Cnf["Config::" + res + "::AptConfig"]
773 return default_apt_config
775 def which_alias_file():
776 hostname = socket.getfqdn()
777 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
778 if os.path.exists(aliasfn):
783 ################################################################################
785 def TemplateSubst(subst_map, filename):
786 """ Perform a substition of template """
787 templatefile = open_file(filename)
788 template = templatefile.read()
789 for k, v in subst_map.iteritems():
790 template = template.replace(k, str(v))
794 ################################################################################
796 def fubar(msg, exit_code=1):
797 sys.stderr.write("E: %s\n" % (msg))
801 sys.stderr.write("W: %s\n" % (msg))
803 ################################################################################
805 # Returns the user name with a laughable attempt at rfc822 conformancy
806 # (read: removing stray periods).
808 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
811 return pwd.getpwuid(os.getuid())[0]
813 ################################################################################
823 return ("%d%s" % (c, t))
825 ################################################################################
827 def cc_fix_changes (changes):
828 o = changes.get("architecture", "")
830 del changes["architecture"]
831 changes["architecture"] = {}
833 changes["architecture"][j] = 1
835 def changes_compare (a, b):
836 """ Sort by source name, source version, 'have source', and then by filename """
838 a_changes = parse_changes(a)
843 b_changes = parse_changes(b)
847 cc_fix_changes (a_changes)
848 cc_fix_changes (b_changes)
850 # Sort by source name
851 a_source = a_changes.get("source")
852 b_source = b_changes.get("source")
853 q = cmp (a_source, b_source)
857 # Sort by source version
858 a_version = a_changes.get("version", "0")
859 b_version = b_changes.get("version", "0")
860 q = apt_pkg.VersionCompare(a_version, b_version)
864 # Sort by 'have source'
865 a_has_source = a_changes["architecture"].get("source")
866 b_has_source = b_changes["architecture"].get("source")
867 if a_has_source and not b_has_source:
869 elif b_has_source and not a_has_source:
872 # Fall back to sort by filename
875 ################################################################################
877 def find_next_free (dest, too_many=100):
880 while os.path.exists(dest) and extra < too_many:
881 dest = orig_dest + '.' + repr(extra)
883 if extra >= too_many:
884 raise NoFreeFilenameError
887 ################################################################################
889 def result_join (original, sep = '\t'):
891 for i in xrange(len(original)):
892 if original[i] == None:
893 resultlist.append("")
895 resultlist.append(original[i])
896 return sep.join(resultlist)
898 ################################################################################
900 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
902 for line in str.split('\n'):
904 if line or include_blank_lines:
905 out += "%s%s\n" % (prefix, line)
906 # Strip trailing new line
911 ################################################################################
913 def validate_changes_file_arg(filename, require_changes=1):
915 'filename' is either a .changes or .dak file. If 'filename' is a
916 .dak file, it's changed to be the corresponding .changes file. The
917 function then checks if the .changes file a) exists and b) is
918 readable and returns the .changes filename if so. If there's a
919 problem, the next action depends on the option 'require_changes'
922 - If 'require_changes' == -1, errors are ignored and the .changes
923 filename is returned.
924 - If 'require_changes' == 0, a warning is given and 'None' is returned.
925 - If 'require_changes' == 1, a fatal error is raised.
930 orig_filename = filename
931 if filename.endswith(".dak"):
932 filename = filename[:-4]+".changes"
934 if not filename.endswith(".changes"):
935 error = "invalid file type; not a changes file"
937 if not os.access(filename,os.R_OK):
938 if os.path.exists(filename):
939 error = "permission denied"
941 error = "file not found"
944 if require_changes == 1:
945 fubar("%s: %s." % (orig_filename, error))
946 elif require_changes == 0:
947 warn("Skipping %s - %s" % (orig_filename, error))
949 else: # We only care about the .dak file
954 ################################################################################
957 return (arch != "source" and arch != "all")
959 ################################################################################
961 def join_with_commas_and(list):
962 if len(list) == 0: return "nothing"
963 if len(list) == 1: return list[0]
964 return ", ".join(list[:-1]) + " and " + list[-1]
966 ################################################################################
971 (pkg, version, constraint) = atom
973 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
976 pp_deps.append(pp_dep)
977 return " |".join(pp_deps)
979 ################################################################################
984 ################################################################################
986 def parse_args(Options):
987 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
988 # XXX: This should go away and everything which calls it be converted
989 # to use SQLA properly. For now, we'll just fix it not to use
990 # the old Pg interface though
991 session = DBConn().session()
995 for suitename in split_args(Options["Suite"]):
996 suite = get_suite(suitename, session=session)
997 if suite.suite_id is None:
998 warn("suite '%s' not recognised." % (suite.suite_name))
1000 suite_ids_list.append(suite.suite_id)
1002 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1004 fubar("No valid suite given.")
1009 if Options["Component"]:
1010 component_ids_list = []
1011 for componentname in split_args(Options["Component"]):
1012 component = get_component(componentname, session=session)
1013 if component is None:
1014 warn("component '%s' not recognised." % (componentname))
1016 component_ids_list.append(component.component_id)
1017 if component_ids_list:
1018 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1020 fubar("No valid component given.")
1024 # Process architecture
1025 con_architectures = ""
1027 if Options["Architecture"]:
1029 for archname in split_args(Options["Architecture"]):
1030 if archname == "source":
1033 arch = get_architecture(archname, session=session)
1035 warn("architecture '%s' not recognised." % (archname))
1037 arch_ids_list.append(arch.arch_id)
1039 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1041 if not check_source:
1042 fubar("No valid architecture given.")
1046 return (con_suites, con_architectures, con_components, check_source)
1048 ################################################################################
1050 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1051 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1054 tb = sys.exc_info()[2]
1061 frame = frame.f_back
1063 traceback.print_exc()
1065 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1066 frame.f_code.co_filename,
1068 for key, value in frame.f_locals.items():
1069 print "\t%20s = " % key,
1073 print "<unable to print>"
1075 ################################################################################
1077 def try_with_debug(function):
1085 ################################################################################
1087 def arch_compare_sw (a, b):
1089 Function for use in sorting lists of architectures.
1091 Sorts normally except that 'source' dominates all others.
1094 if a == "source" and b == "source":
1103 ################################################################################
1105 def split_args (s, dwim=1):
1107 Split command line arguments which can be separated by either commas
1108 or whitespace. If dwim is set, it will complain about string ending
1109 in comma since this usually means someone did 'dak ls -a i386, m68k
1110 foo' or something and the inevitable confusion resulting from 'm68k'
1111 being treated as an argument is undesirable.
1114 if s.find(",") == -1:
1117 if s[-1:] == "," and dwim:
1118 fubar("split_args: found trailing comma, spurious space maybe?")
1121 ################################################################################
1123 def gpgv_get_status_output(cmd, status_read, status_write):
1125 Our very own version of commands.getouputstatus(), hacked to support
1129 cmd = ['/bin/sh', '-c', cmd]
1130 p2cread, p2cwrite = os.pipe()
1131 c2pread, c2pwrite = os.pipe()
1132 errout, errin = os.pipe()
1142 for i in range(3, 256):
1143 if i != status_write:
1149 os.execvp(cmd[0], cmd)
1155 os.dup2(c2pread, c2pwrite)
1156 os.dup2(errout, errin)
1158 output = status = ""
1160 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1163 r = os.read(fd, 8196)
1165 more_data.append(fd)
1166 if fd == c2pwrite or fd == errin:
1168 elif fd == status_read:
1171 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1173 pid, exit_status = os.waitpid(pid, 0)
1175 os.close(status_write)
1176 os.close(status_read)
1186 return output, status, exit_status
1188 ################################################################################
1190 def process_gpgv_output(status):
1191 # Process the status-fd output
1194 for line in status.split('\n'):
1198 split = line.split()
1200 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1202 (gnupg, keyword) = split[:2]
1203 if gnupg != "[GNUPG:]":
1204 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1207 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1208 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1211 keywords[keyword] = args
1213 return (keywords, internal_error)
1215 ################################################################################
1217 def retrieve_key (filename, keyserver=None, keyring=None):
1219 Retrieve the key that signed 'filename' from 'keyserver' and
1220 add it to 'keyring'. Returns nothing on success, or an error message
1224 # Defaults for keyserver and keyring
1226 keyserver = Cnf["Dinstall::KeyServer"]
1228 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1230 # Ensure the filename contains no shell meta-characters or other badness
1231 if not re_taint_free.match(filename):
1232 return "%s: tainted filename" % (filename)
1234 # Invoke gpgv on the file
1235 status_read, status_write = os.pipe()
1236 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1237 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1239 # Process the status-fd output
1240 (keywords, internal_error) = process_gpgv_output(status)
1242 return internal_error
1244 if not keywords.has_key("NO_PUBKEY"):
1245 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1247 fingerprint = keywords["NO_PUBKEY"][0]
1248 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1249 # it'll try to create a lockfile in /dev. A better solution might
1250 # be a tempfile or something.
1251 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1252 % (Cnf["Dinstall::SigningKeyring"])
1253 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1254 % (keyring, keyserver, fingerprint)
1255 (result, output) = commands.getstatusoutput(cmd)
1257 return "'%s' failed with exit code %s" % (cmd, result)
1261 ################################################################################
1263 def gpg_keyring_args(keyrings=None):
1265 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1267 return " ".join(["--keyring %s" % x for x in keyrings])
1269 ################################################################################
1271 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1273 Check the signature of a file and return the fingerprint if the
1274 signature is valid or 'None' if it's not. The first argument is the
1275 filename whose signature should be checked. The second argument is a
1276 reject function and is called when an error is found. The reject()
1277 function must allow for two arguments: the first is the error message,
1278 the second is an optional prefix string. It's possible for reject()
1279 to be called more than once during an invocation of check_signature().
1280 The third argument is optional and is the name of the files the
1281 detached signature applies to. The fourth argument is optional and is
1282 a *list* of keyrings to use. 'autofetch' can either be None, True or
1283 False. If None, the default behaviour specified in the config will be
1289 # Ensure the filename contains no shell meta-characters or other badness
1290 if not re_taint_free.match(sig_filename):
1291 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1292 return (None, rejects)
1294 if data_filename and not re_taint_free.match(data_filename):
1295 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1296 return (None, rejects)
1299 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1301 # Autofetch the signing key if that's enabled
1302 if autofetch == None:
1303 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1305 error_msg = retrieve_key(sig_filename)
1307 rejects.append(error_msg)
1308 return (None, rejects)
1310 # Build the command line
1311 status_read, status_write = os.pipe()
1312 cmd = "gpgv --status-fd %s %s %s %s" % (
1313 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1315 # Invoke gpgv on the file
1316 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1318 # Process the status-fd output
1319 (keywords, internal_error) = process_gpgv_output(status)
1321 # If we failed to parse the status-fd output, let's just whine and bail now
1323 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1324 rejects.append(internal_error, "")
1325 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1326 return (None, rejects)
1328 # Now check for obviously bad things in the processed output
1329 if keywords.has_key("KEYREVOKED"):
1330 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1331 if keywords.has_key("BADSIG"):
1332 rejects.append("bad signature on %s." % (sig_filename))
1333 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1334 rejects.append("failed to check signature on %s." % (sig_filename))
1335 if keywords.has_key("NO_PUBKEY"):
1336 args = keywords["NO_PUBKEY"]
1339 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1340 if keywords.has_key("BADARMOR"):
1341 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1342 if keywords.has_key("NODATA"):
1343 rejects.append("no signature found in %s." % (sig_filename))
1344 if keywords.has_key("EXPKEYSIG"):
1345 args = keywords["EXPKEYSIG"]
1348 rejects.append("Signature made by expired key 0x%s" % (key))
1349 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1350 args = keywords["KEYEXPIRED"]
1354 if timestamp.count("T") == 0:
1356 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1358 expiredate = "unknown (%s)" % (timestamp)
1360 expiredate = timestamp
1361 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1363 if len(rejects) > 0:
1364 return (None, rejects)
1366 # Next check gpgv exited with a zero return code
1368 rejects.append("gpgv failed while checking %s." % (sig_filename))
1370 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1372 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1373 return (None, rejects)
1375 # Sanity check the good stuff we expect
1376 if not keywords.has_key("VALIDSIG"):
1377 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1379 args = keywords["VALIDSIG"]
1381 rejects.append("internal error while checking signature on %s." % (sig_filename))
1383 fingerprint = args[0]
1384 if not keywords.has_key("GOODSIG"):
1385 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1386 if not keywords.has_key("SIG_ID"):
1387 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1389 # Finally ensure there's not something we don't recognise
1390 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1391 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1392 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1394 for keyword in keywords.keys():
1395 if not known_keywords.has_key(keyword):
1396 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1398 if len(rejects) > 0:
1399 return (None, rejects)
1401 return (fingerprint, [])
1403 ################################################################################
1405 def gpg_get_key_addresses(fingerprint):
1406 """retreive email addresses from gpg key uids for a given fingerprint"""
1407 addresses = key_uid_email_cache.get(fingerprint)
1408 if addresses != None:
1411 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1412 % (gpg_keyring_args(), fingerprint)
1413 (result, output) = commands.getstatusoutput(cmd)
1415 for l in output.split('\n'):
1416 m = re_gpg_uid.match(l)
1418 addresses.add(m.group(1))
1419 key_uid_email_cache[fingerprint] = addresses
1422 ################################################################################
1424 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1426 def wrap(paragraph, max_length, prefix=""):
1430 words = paragraph.split()
1433 word_size = len(word)
1434 if word_size > max_length:
1436 s += line + '\n' + prefix
1437 s += word + '\n' + prefix
1440 new_length = len(line) + word_size + 1
1441 if new_length > max_length:
1442 s += line + '\n' + prefix
1455 ################################################################################
1457 def clean_symlink (src, dest, root):
1459 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1462 src = src.replace(root, '', 1)
1463 dest = dest.replace(root, '', 1)
1464 dest = os.path.dirname(dest)
1465 new_src = '../' * len(dest.split('/'))
1466 return new_src + src
1468 ################################################################################
1470 def temp_filename(directory=None, prefix="dak", suffix=""):
1472 Return a secure and unique filename by pre-creating it.
1473 If 'directory' is non-null, it will be the directory the file is pre-created in.
1474 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1475 If 'suffix' is non-null, the filename will end with it.
1477 Returns a pair (fd, name).
1480 return tempfile.mkstemp(suffix, prefix, directory)
1482 ################################################################################
1484 def temp_dirname(parent=None, prefix="dak", suffix=""):
1486 Return a secure and unique directory by pre-creating it.
1487 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1488 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1489 If 'suffix' is non-null, the filename will end with it.
1491 Returns a pathname to the new directory
1494 return tempfile.mkdtemp(suffix, prefix, parent)
1496 ################################################################################
1498 def is_email_alias(email):
1499 """ checks if the user part of the email is listed in the alias file """
1501 if alias_cache == None:
1502 aliasfn = which_alias_file()
1505 for l in open(aliasfn):
1506 alias_cache.add(l.split(':')[0])
1507 uid = email.split('@')[0]
1508 return uid in alias_cache
1510 ################################################################################
1512 def get_changes_files(from_dir):
1514 Takes a directory and lists all .changes files in it (as well as chdir'ing
1515 to the directory; this is due to broken behaviour on the part of p-u/p-a
1516 when you're not in the right place)
1518 Returns a list of filenames
1521 # Much of the rest of p-u/p-a depends on being in the right place
1523 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1525 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1527 return changes_files
1529 ################################################################################
1533 Cnf = apt_pkg.newConfiguration()
1534 if not os.getenv("DAK_TEST"):
1535 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1537 if which_conf_file() != default_config:
1538 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1540 ################################################################################
1542 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1544 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1545 Well, actually it parsed a local copy, but let's document the source
1548 returns a dict associating source package name with a list of open wnpp
1549 bugs (Yes, there might be more than one)
1555 lines = f.readlines()
1557 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1562 splited_line = line.split(": ", 1)
1563 if len(splited_line) > 1:
1564 wnpp[splited_line[0]] = splited_line[1].split("|")
1566 for source in wnpp.keys():
1568 for wnpp_bug in wnpp[source]:
1569 bug_no = re.search("(\d)+", wnpp_bug).group()
1575 ################################################################################
1577 def get_packages_from_ftp(root, suite, component, architecture):
1579 Returns an object containing apt_pkg-parseable data collected by
1580 aggregating Packages.gz files gathered for each architecture.
1583 @param root: path to ftp archive root directory
1586 @param suite: suite to extract files from
1588 @type component: string
1589 @param component: component to extract files from
1591 @type architecture: string
1592 @param architecture: architecture to extract files from
1595 @return: apt_pkg class containing package data
1598 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1599 (fd, temp_file) = temp_filename()
1600 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1602 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1603 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1604 if os.path.exists(filename):
1605 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1607 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1608 packages = open_file(temp_file)
1609 Packages = apt_pkg.ParseTagFile(packages)
1610 os.unlink(temp_file)