1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 """module to manipulate the archive
19 This module provides classes to manipulate the archive.
22 from daklib.dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from daklib.fstransactions import FilesystemTransaction
28 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
29 import daklib.daksubprocess
32 from datetime import datetime
35 from sqlalchemy.orm.exc import NoResultFound
36 from sqlalchemy.orm import object_session
41 class ArchiveException(Exception):
44 class HashMismatchException(ArchiveException):
47 class ArchiveTransaction(object):
48 """manipulate the archive in a transaction
51 self.fs = FilesystemTransaction()
52 self.session = DBConn().session()
54 def get_file(self, hashed_file, source_name, check_hashes=True):
55 """Look for file C{hashed_file} in database
57 @type hashed_file: L{daklib.upload.HashedFile}
58 @param hashed_file: file to look for in the database
60 @type source_name: str
61 @param source_name: source package name
63 @type check_hashes: bool
64 @param check_hashes: check size and hashes match
66 @raise KeyError: file was not found in the database
67 @raise HashMismatchException: hash mismatch
69 @rtype: L{daklib.dbconn.PoolFile}
70 @return: database entry for the file
72 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
74 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
75 if check_hashes and (poolfile.filesize != hashed_file.size
76 or poolfile.md5sum != hashed_file.md5sum
77 or poolfile.sha1sum != hashed_file.sha1sum
78 or poolfile.sha256sum != hashed_file.sha256sum):
79 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
82 raise KeyError('{0} not found in database.'.format(poolname))
84 def _install_file(self, directory, hashed_file, archive, component, source_name):
87 Will not give an error when the file is already present.
89 @rtype: L{daklib.dbconn.PoolFile}
90 @return: database object for the new file
92 session = self.session
94 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
96 poolfile = self.get_file(hashed_file, source_name)
98 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
99 poolfile.md5sum = hashed_file.md5sum
100 poolfile.sha1sum = hashed_file.sha1sum
101 poolfile.sha256sum = hashed_file.sha256sum
102 session.add(poolfile)
106 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
107 except NoResultFound:
108 archive_file = ArchiveFile(archive, component, poolfile)
109 session.add(archive_file)
112 path = os.path.join(archive.path, 'pool', component.component_name, poolname)
113 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
114 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
118 def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
119 """Install a binary package
122 @param directory: directory the binary package is located in
124 @type binary: L{daklib.upload.Binary}
125 @param binary: binary package to install
127 @type suite: L{daklib.dbconn.Suite}
128 @param suite: target suite
130 @type component: L{daklib.dbconn.Component}
131 @param component: target component
133 @type allow_tainted: bool
134 @param allow_tainted: allow to copy additional files from tainted archives
136 @type fingerprint: L{daklib.dbconn.Fingerprint}
137 @param fingerprint: optional fingerprint
139 @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
140 @param source_suites: suites to copy the source from if they are not
141 in C{suite} or C{True} to allow copying from any
144 @type extra_source_archives: list of L{daklib.dbconn.Archive}
145 @param extra_source_archives: extra archives to copy Built-Using sources from
147 @rtype: L{daklib.dbconn.DBBinary}
148 @return: databse object for the new package
150 session = self.session
151 control = binary.control
152 maintainer = get_or_set_maintainer(control['Maintainer'], session)
153 architecture = get_architecture(control['Architecture'], session)
155 (source_name, source_version) = binary.source
156 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
157 source = source_query.filter(DBSource.suites.contains(suite)).first()
159 if source_suites != True:
160 source_query = source_query.join(DBSource.suites) \
161 .filter(Suite.suite_id == source_suites.c.id)
162 source = source_query.first()
164 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
165 self.copy_source(source, suite, component)
167 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
170 package=control['Package'],
171 version=control['Version'],
172 architecture=architecture,
176 maintainer=maintainer,
178 binarytype=binary.type,
180 # Other attributes that are ignored for purposes of equality with
183 fingerprint=fingerprint,
187 db_binary = session.query(DBBinary).filter_by(**unique).one()
188 for key, value in rest.iteritems():
189 if getattr(db_binary, key) != value:
190 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
191 except NoResultFound:
192 db_binary = DBBinary(**unique)
193 for key, value in rest.iteritems():
194 setattr(db_binary, key, value)
195 for key, value in rest2.iteritems():
196 setattr(db_binary, key, value)
197 session.add(db_binary)
199 import_metadata_into_db(db_binary, session)
201 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
203 if suite not in db_binary.suites:
204 db_binary.suites.append(suite)
210 def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
211 """ensure source exists in the given archive
213 This is intended to be used to check that Built-Using sources exist.
216 @param filename: filename to use in error messages
218 @type source: L{daklib.dbconn.DBSource}
219 @param source: source to look for
221 @type archive: L{daklib.dbconn.Archive}
222 @param archive: archive to look in
224 @type extra_archives: list of L{daklib.dbconn.Archive}
225 @param extra_archives: list of archives to copy the source package from
226 if it is not yet present in C{archive}
228 session = self.session
229 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
230 if db_file is not None:
233 # Try to copy file from one extra archive
234 if extra_archives is None:
236 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
238 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
240 source_archive = db_file.archive
241 for dsc_file in source.srcfiles:
242 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
243 # We were given an explicit list of archives so it is okay to copy from tainted archives.
244 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
246 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
247 """Add Built-Using sources to C{db_binary.extra_sources}
249 session = self.session
250 built_using = control.get('Built-Using', None)
252 if built_using is not None:
253 for dep in apt_pkg.parse_depends(built_using):
254 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
255 bu_source_name, bu_source_version, comp = dep[0]
256 assert comp == '=', 'Built-Using must contain strict dependencies'
258 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
259 if bu_source is None:
260 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
262 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
264 db_binary.extra_sources.append(bu_source)
266 def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None):
267 session = self.session
269 maintainer = get_or_set_maintainer(control['Maintainer'], session)
270 source_name = control['Source']
272 ### Add source package to database
274 # We need to install the .dsc first as the DBSource object refers to it.
275 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
279 version=control['Version'],
282 maintainer=maintainer,
283 #install_date=datetime.now().date(),
284 poolfile=db_file_dsc,
285 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
287 # Other attributes that are ignored for purposes of equality with
290 changedby=changed_by,
291 fingerprint=fingerprint,
296 db_source = session.query(DBSource).filter_by(**unique).one()
297 for key, value in rest.iteritems():
298 if getattr(db_source, key) != value:
299 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
300 except NoResultFound:
302 db_source = DBSource(**unique)
303 for key, value in rest.iteritems():
304 setattr(db_source, key, value)
305 for key, value in rest2.iteritems():
306 setattr(db_source, key, value)
307 # XXX: set as default in postgres?
308 db_source.install_date = datetime.now().date()
309 session.add(db_source)
312 # Add .dsc file. Other files will be added later.
313 db_dsc_file = DSCFile()
314 db_dsc_file.source = db_source
315 db_dsc_file.poolfile = db_file_dsc
316 session.add(db_dsc_file)
320 for f in db_source.srcfiles:
321 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
324 ### Now add remaining files and copy them to the archive.
326 for hashed_file in source.files.itervalues():
327 hashed_file_path = os.path.join(directory, hashed_file.filename)
328 if os.path.exists(hashed_file_path):
329 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
332 db_file = self.get_file(hashed_file, source_name)
333 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
335 db_dsc_file = DSCFile()
336 db_dsc_file.source = db_source
337 db_dsc_file.poolfile = db_file
338 session.add(db_dsc_file)
342 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
343 import_metadata_into_db(db_source, session)
345 # Uploaders are the maintainer and co-maintainers from the Uploaders field
346 db_source.uploaders.append(maintainer)
347 if 'Uploaders' in control:
348 from daklib.textutils import split_uploaders
349 for u in split_uploaders(control['Uploaders']):
350 db_source.uploaders.append(get_or_set_maintainer(u, session))
355 def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
356 """Install a source package
359 @param directory: directory the source package is located in
361 @type source: L{daklib.upload.Source}
362 @param source: source package to install
364 @type suite: L{daklib.dbconn.Suite}
365 @param suite: target suite
367 @type component: L{daklib.dbconn.Component}
368 @param component: target component
370 @type changed_by: L{daklib.dbconn.Maintainer}
371 @param changed_by: person who prepared this version of the package
373 @type allow_tainted: bool
374 @param allow_tainted: allow to copy additional files from tainted archives
376 @type fingerprint: L{daklib.dbconn.Fingerprint}
377 @param fingerprint: optional fingerprint
379 @rtype: L{daklib.dbconn.DBSource}
380 @return: database object for the new source
382 db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint)
384 if suite in db_source.suites:
386 db_source.suites.append(suite)
391 def _copy_file(self, db_file, archive, component, allow_tainted=False):
392 """Copy a file to the given archive and component
394 @type db_file: L{daklib.dbconn.PoolFile}
395 @param db_file: file to copy
397 @type archive: L{daklib.dbconn.Archive}
398 @param archive: target archive
400 @type component: L{daklib.dbconn.Archive}
401 @param component: target component
403 @type allow_tainted: bool
404 @param allow_tainted: allow to copy from tainted archives (such as NEW)
406 session = self.session
408 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
409 query = session.query(ArchiveFile).filter_by(file=db_file)
410 if not allow_tainted:
411 query = query.join(Archive).filter(Archive.tainted == False)
413 source_af = query.first()
414 if source_af is None:
415 raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
416 target_af = ArchiveFile(archive, component, db_file)
417 session.add(target_af)
419 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
421 def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
422 """Copy a binary package to the given suite and component
424 @type db_binary: L{daklib.dbconn.DBBinary}
425 @param db_binary: binary to copy
427 @type suite: L{daklib.dbconn.Suite}
428 @param suite: target suite
430 @type component: L{daklib.dbconn.Component}
431 @param component: target component
433 @type allow_tainted: bool
434 @param allow_tainted: allow to copy from tainted archives (such as NEW)
436 @type extra_archives: list of L{daklib.dbconn.Archive}
437 @param extra_archives: extra archives to copy Built-Using sources from
439 session = self.session
440 archive = suite.archive
444 filename = db_binary.poolfile.filename
446 # make sure source is present in target archive
447 db_source = db_binary.source
448 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
449 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
451 # make sure built-using packages are present in target archive
452 for db_source in db_binary.extra_sources:
453 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
456 db_file = db_binary.poolfile
457 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
458 if suite not in db_binary.suites:
459 db_binary.suites.append(suite)
462 def copy_source(self, db_source, suite, component, allow_tainted=False):
463 """Copy a source package to the given suite and component
465 @type db_source: L{daklib.dbconn.DBSource}
466 @param db_source: source to copy
468 @type suite: L{daklib.dbconn.Suite}
469 @param suite: target suite
471 @type component: L{daklib.dbconn.Component}
472 @param component: target component
474 @type allow_tainted: bool
475 @param allow_tainted: allow to copy from tainted archives (such as NEW)
477 archive = suite.archive
480 for db_dsc_file in db_source.srcfiles:
481 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
482 if suite not in db_source.suites:
483 db_source.suites.append(suite)
486 def remove_file(self, db_file, archive, component):
487 """Remove a file from a given archive and component
489 @type db_file: L{daklib.dbconn.PoolFile}
490 @param db_file: file to remove
492 @type archive: L{daklib.dbconn.Archive}
493 @param archive: archive to remove the file from
495 @type component: L{daklib.dbconn.Component}
496 @param component: component to remove the file from
498 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
499 self.fs.unlink(af.path)
500 self.session.delete(af)
502 def remove_binary(self, binary, suite):
503 """Remove a binary from a given suite and component
505 @type binary: L{daklib.dbconn.DBBinary}
506 @param binary: binary to remove
508 @type suite: L{daklib.dbconn.Suite}
509 @param suite: suite to remove the package from
511 binary.suites.remove(suite)
514 def remove_source(self, source, suite):
515 """Remove a source from a given suite and component
517 @type source: L{daklib.dbconn.DBSource}
518 @param source: source to remove
520 @type suite: L{daklib.dbconn.Suite}
521 @param suite: suite to remove the package from
523 @raise ArchiveException: source package is still referenced by other
524 binaries in the suite
526 session = self.session
528 query = session.query(DBBinary).filter_by(source=source) \
529 .filter(DBBinary.suites.contains(suite))
530 if query.first() is not None:
531 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
533 source.suites.remove(suite)
539 self.session.commit()
542 self.session.rollback()
546 """rollback changes"""
547 self.session.rollback()
556 def __exit__(self, type, value, traceback):
563 def source_component_from_package_list(package_list, suite):
564 """Get component for a source package
566 This function will look at the Package-List field to determine the
567 component the source package belongs to. This is the first component
568 the source package provides binaries for (first with respect to the
569 ordering of components).
571 It the source package has no Package-List field, None is returned.
573 @type package_list: L{daklib.packagelist.PackageList}
574 @param package_list: package list of the source to get the override for
576 @type suite: L{daklib.dbconn.Suite}
577 @param suite: suite to consider for binaries produced
579 @rtype: L{daklib.dbconn.Component} or C{None}
580 @return: component for the given source or C{None}
582 if package_list.fallback:
584 session = object_session(suite)
585 packages = package_list.packages_for_suite(suite)
586 components = set(p.component for p in packages)
587 query = session.query(Component).order_by(Component.ordering) \
588 .filter(Component.component_name.in_(components))
591 class ArchiveUpload(object):
594 This class can be used in a with-statement::
596 with ArchiveUpload(...) as upload:
599 Doing so will automatically run any required cleanup and also rollback the
600 transaction if it was not committed.
602 def __init__(self, directory, changes, keyrings):
603 self.transaction = ArchiveTransaction()
604 """transaction used to handle the upload
605 @type: L{daklib.archive.ArchiveTransaction}
608 self.session = self.transaction.session
609 """database session"""
611 self.original_directory = directory
612 self.original_changes = changes
616 @type: L{daklib.upload.Changes}
619 self.directory = None
620 """directory with temporary copy of files. set by C{prepare}
624 self.keyrings = keyrings
626 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
627 """fingerprint of the key used to sign the upload
628 @type: L{daklib.dbconn.Fingerprint}
631 self.reject_reasons = []
632 """reasons why the upload cannot by accepted
642 self.final_suites = None
645 """upload is NEW. set by C{check}
649 self._checked = False
650 """checks passes. set by C{check}
654 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
655 self._new = self._new_queue.suite
657 def warn(self, message):
658 """add a warning message
660 Adds a warning message that can later be seen in C{self.warnings}
662 @type message: string
663 @param message: warning message
665 self.warnings.append(message)
668 """prepare upload for further processing
670 This copies the files involved to a temporary directory. If you use
671 this method directly, you have to remove the directory given by the
672 C{directory} attribute later on your own.
674 Instead of using the method directly, you can also use a with-statement::
676 with ArchiveUpload(...) as upload:
679 This will automatically handle any required cleanup.
681 assert self.directory is None
682 assert self.original_changes.valid_signature
685 session = self.transaction.session
687 group = cnf.get('Dinstall::UnprivGroup') or None
688 self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
689 mode=0o2750, group=group)
690 with FilesystemTransaction() as fs:
691 src = os.path.join(self.original_directory, self.original_changes.filename)
692 dst = os.path.join(self.directory, self.original_changes.filename)
693 fs.copy(src, dst, mode=0o640)
695 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
697 for f in self.changes.files.itervalues():
698 src = os.path.join(self.original_directory, f.filename)
699 dst = os.path.join(self.directory, f.filename)
700 if not os.path.exists(src):
702 fs.copy(src, dst, mode=0o640)
706 source = self.changes.source
708 # Do not raise an exception here if the .dsc is invalid.
711 if source is not None:
712 for f in source.files.itervalues():
713 src = os.path.join(self.original_directory, f.filename)
714 dst = os.path.join(self.directory, f.filename)
715 if not os.path.exists(dst):
717 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
718 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
719 fs.copy(db_archive_file.path, dst, mode=0o640)
721 # Ignore if get_file could not find it. Upload will
722 # probably be rejected later.
725 def unpacked_source(self):
726 """Path to unpacked source
728 Get path to the unpacked source. This method does unpack the source
729 into a temporary directory under C{self.directory} if it has not
730 been done so already.
732 @rtype: str or C{None}
733 @return: string giving the path to the unpacked source directory
734 or C{None} if no source was included in the upload.
736 assert self.directory is not None
738 source = self.changes.source
741 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
743 sourcedir = os.path.join(self.directory, 'source')
744 if not os.path.exists(sourcedir):
745 devnull = open('/dev/null', 'w')
746 daklib.daksubprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
747 if not os.path.isdir(sourcedir):
748 raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
751 def _map_suite(self, suite_name):
752 for rule in Config().value_list("SuiteMappings"):
753 fields = rule.split()
755 if rtype == "map" or rtype == "silent-map":
756 (src, dst) = fields[1:3]
757 if src == suite_name:
759 if rtype != "silent-map":
760 self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
761 elif rtype == "ignore":
763 if suite_name == ignored:
764 self.warnings.append('Ignoring target suite {0}.'.format(ignored))
766 elif rtype == "reject":
768 if suite_name == rejected:
769 raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected))
770 ## XXX: propup-version and map-unreleased not yet implemented
773 def _mapped_suites(self):
774 """Get target suites after mappings
776 @rtype: list of L{daklib.dbconn.Suite}
777 @return: list giving the mapped target suites of this upload
779 session = self.session
782 for dist in self.changes.distributions:
783 suite_name = self._map_suite(dist)
784 if suite_name is not None:
785 suite_names.append(suite_name)
787 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
790 def _check_new_binary_overrides(self, suite):
793 binaries = self.changes.binaries
794 source = self.changes.source
795 if source is not None and not source.package_list.fallback:
796 packages = source.package_list.packages_for_suite(suite)
797 binaries = [ entry for entry in packages ]
800 override = self._binary_override(suite, b)
802 self.warnings.append('binary:{0} is NEW.'.format(b.name))
807 def _check_new(self, suite):
808 """Check if upload is NEW
810 An upload is NEW if it has binary or source packages that do not have
811 an override in C{suite} OR if it references files ONLY in a tainted
812 archive (eg. when it references files in NEW).
815 @return: C{True} if the upload is NEW, C{False} otherwise
817 session = self.session
820 # Check for missing overrides
821 if self._check_new_binary_overrides(suite):
823 if self.changes.source is not None:
824 override = self._source_override(suite, self.changes.source)
826 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
829 # Check if we reference a file only in a tainted archive
830 files = self.changes.files.values()
831 if self.changes.source is not None:
832 files.extend(self.changes.source.files.values())
834 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
835 query_untainted = query.join(Archive).filter(Archive.tainted == False)
837 in_archive = (query.first() is not None)
838 in_untainted_archive = (query_untainted.first() is not None)
840 if in_archive and not in_untainted_archive:
841 self.warnings.append('{0} is only available in NEW.'.format(f.filename))
846 def _final_suites(self):
847 session = self.session
849 mapped_suites = self._mapped_suites()
852 for suite in mapped_suites:
853 overridesuite = suite
854 if suite.overridesuite is not None:
855 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
856 if self._check_new(overridesuite):
858 final_suites.add(suite)
862 def _binary_override(self, suite, binary):
863 """Get override entry for a binary
865 @type suite: L{daklib.dbconn.Suite}
866 @param suite: suite to get override for
868 @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
869 @param binary: binary to get override for
871 @rtype: L{daklib.dbconn.Override} or C{None}
872 @return: override for the given binary or C{None}
874 if suite.overridesuite is not None:
875 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
877 mapped_component = get_mapped_component(binary.component)
878 if mapped_component is None:
881 query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
882 .join(Component).filter(Component.component_name == mapped_component.component_name) \
883 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
887 except NoResultFound:
890 def _source_override(self, suite, source):
891 """Get override entry for a source
893 @type suite: L{daklib.dbconn.Suite}
894 @param suite: suite to get override for
896 @type source: L{daklib.upload.Source}
897 @param source: source to get override for
899 @rtype: L{daklib.dbconn.Override} or C{None}
900 @return: override for the given source or C{None}
902 if suite.overridesuite is not None:
903 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
905 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
906 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
908 component = source_component_from_package_list(source.package_list, suite)
909 if component is not None:
910 query = query.filter(Override.component == component)
914 except NoResultFound:
917 def _binary_component(self, suite, binary, only_overrides=True):
918 """get component for a binary
920 By default this will only look at overrides to get the right component;
921 if C{only_overrides} is C{False} this method will also look at the
924 @type suite: L{daklib.dbconn.Suite}
926 @type binary: L{daklib.upload.Binary}
928 @type only_overrides: bool
929 @param only_overrides: only use overrides to get the right component
931 @rtype: L{daklib.dbconn.Component} or C{None}
933 override = self._binary_override(suite, binary)
934 if override is not None:
935 return override.component
938 return get_mapped_component(binary.component, self.session)
940 def check(self, force=False):
941 """run checks against the upload
944 @param force: ignore failing forcable checks
947 @return: C{True} if all checks passed, C{False} otherwise
949 # XXX: needs to be better structured.
950 assert self.changes.valid_signature
953 # Validate signatures and hashes before we do any real work:
955 checks.SignatureAndHashesCheck,
956 checks.SignatureTimestampCheck,
958 checks.ExternalHashesCheck,
961 checks.BinaryTimestampCheck,
962 checks.SingleDistributionCheck,
966 final_suites = self._final_suites()
967 if len(final_suites) == 0:
968 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
971 self.final_suites = final_suites
974 checks.TransitionCheck,
976 checks.NoSourceOnlyCheck,
983 checks.SourceFormatCheck,
984 checks.SuiteArchitectureCheck,
987 for suite in final_suites:
988 chk().per_suite_check(self, suite)
990 if len(self.reject_reasons) != 0:
995 except checks.Reject as e:
996 self.reject_reasons.append(unicode(e))
997 except Exception as e:
998 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
1001 def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
1002 """Install upload to the given suite
1004 @type suite: L{daklib.dbconn.Suite}
1005 @param suite: suite to install the package into. This is the real suite,
1006 ie. after any redirection to NEW or a policy queue
1008 @param source_component_func: function to get the L{daklib.dbconn.Component}
1009 for a L{daklib.upload.Source} object
1011 @param binary_component_func: function to get the L{daklib.dbconn.Component}
1012 for a L{daklib.upload.Binary} object
1014 @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
1016 @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
1018 @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
1019 object for the install source or C{None} if no source was
1020 included. The second is a list of L{daklib.dbconn.DBBinary}
1021 objects for the installed binary packages.
1023 # XXX: move this function to ArchiveTransaction?
1025 control = self.changes.changes
1026 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
1028 if source_suites is None:
1029 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
1031 source = self.changes.source
1032 if source is not None:
1033 component = source_component_func(source)
1034 db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
1039 for binary in self.changes.binaries:
1040 component = binary_component_func(binary)
1041 db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
1042 db_binaries.append(db_binary)
1044 if suite.copychanges:
1045 src = os.path.join(self.directory, self.changes.filename)
1046 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
1047 self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
1049 return (db_source, db_binaries)
1051 def _install_changes(self):
1052 assert self.changes.valid_signature
1053 control = self.changes.changes
1054 session = self.transaction.session
1058 # Only add changelog for sourceful uploads and binNMUs
1059 if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
1060 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
1061 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
1062 assert changelog_id is not None
1064 db_changes = DBChange()
1065 db_changes.changesname = self.changes.filename
1066 db_changes.source = control['Source']
1067 db_changes.binaries = control.get('Binary', None)
1068 db_changes.architecture = control['Architecture']
1069 db_changes.version = control['Version']
1070 db_changes.distribution = control['Distribution']
1071 db_changes.urgency = control['Urgency']
1072 db_changes.maintainer = control['Maintainer']
1073 db_changes.changedby = control.get('Changed-By', control['Maintainer'])
1074 db_changes.date = control['Date']
1075 db_changes.fingerprint = self.fingerprint.fingerprint
1076 db_changes.changelog_id = changelog_id
1077 db_changes.closes = self.changes.closed_bugs
1080 self.transaction.session.add(db_changes)
1081 self.transaction.session.flush()
1082 except sqlalchemy.exc.IntegrityError:
1083 raise ArchiveException('{0} is already known.'.format(self.changes.filename))
1087 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
1088 u = PolicyQueueUpload()
1089 u.policy_queue = policy_queue
1090 u.target_suite = target_suite
1091 u.changes = db_changes
1092 u.source = db_source
1093 u.binaries = db_binaries
1094 self.transaction.session.add(u)
1095 self.transaction.session.flush()
1097 dst = os.path.join(policy_queue.path, self.changes.filename)
1098 self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms)
1102 def try_autobyhand(self):
1105 Try to handle byhand packages automatically.
1107 @rtype: list of L{daklib.upload.HashedFile}
1108 @return: list of remaining byhand files
1110 assert len(self.reject_reasons) == 0
1111 assert self.changes.valid_signature
1112 assert self.final_suites is not None
1113 assert self._checked
1115 byhand = self.changes.byhand_files
1116 if len(byhand) == 0:
1119 suites = list(self.final_suites)
1120 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1124 control = self.changes.changes
1125 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1129 if '_' in f.filename:
1130 parts = f.filename.split('_', 2)
1132 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1136 package, version, archext = parts
1137 arch, ext = archext.split('.', 1)
1139 parts = f.filename.split('.')
1141 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1151 rule = automatic_byhand_packages.subtree(package)
1156 if rule['Source'] != self.changes.source_name \
1157 or rule['Section'] != f.section \
1158 or ('Extension' in rule and rule['Extension'] != ext):
1162 script = rule['Script']
1163 retcode = daklib.daksubprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
1165 print "W: error processing {0}.".format(f.filename)
1168 return len(remaining) == 0
1170 def _install_byhand(self, policy_queue_upload, hashed_file):
1171 """install byhand file
1173 @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
1175 @type hashed_file: L{daklib.upload.HashedFile}
1177 fs = self.transaction.fs
1178 session = self.transaction.session
1179 policy_queue = policy_queue_upload.policy_queue
1181 byhand_file = PolicyQueueByhandFile()
1182 byhand_file.upload = policy_queue_upload
1183 byhand_file.filename = hashed_file.filename
1184 session.add(byhand_file)
1187 src = os.path.join(self.directory, hashed_file.filename)
1188 dst = os.path.join(policy_queue.path, hashed_file.filename)
1189 fs.copy(src, dst, mode=policy_queue.change_perms)
1193 def _do_bts_versiontracking(self):
1195 fs = self.transaction.fs
1197 btsdir = cnf.get('Dir::BTSVersionTrack')
1198 if btsdir is None or btsdir == '':
1201 base = os.path.join(btsdir, self.changes.filename[:-8])
1204 sourcedir = self.unpacked_source()
1205 if sourcedir is not None:
1206 fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
1207 versions = fs.create("{0}.versions".format(base), mode=0o644)
1208 for line in fh.readlines():
1209 if re_changelog_versions.match(line):
1210 versions.write(line)
1214 # binary -> source mapping
1215 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1216 for binary in self.changes.binaries:
1217 control = binary.control
1218 source_package, source_version = binary.source
1219 line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
1220 print >>debinfo, line
1223 def _policy_queue(self, suite):
1224 if suite.policy_queue is not None:
1225 return suite.policy_queue
1231 Install upload to a suite or policy queue. This method does B{not}
1232 handle uploads to NEW.
1234 You need to have called the C{check} method before calling this method.
1236 assert len(self.reject_reasons) == 0
1237 assert self.changes.valid_signature
1238 assert self.final_suites is not None
1239 assert self._checked
1242 db_changes = self._install_changes()
1244 for suite in self.final_suites:
1245 overridesuite = suite
1246 if suite.overridesuite is not None:
1247 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1249 policy_queue = self._policy_queue(suite)
1251 redirected_suite = suite
1252 if policy_queue is not None:
1253 redirected_suite = policy_queue.suite
1255 # source can be in the suite we install to or any suite we enhance
1256 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
1257 for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \
1258 .filter(VersionCheck.suite_id.in_(source_suite_ids)) \
1259 .filter(VersionCheck.check == 'Enhances'):
1260 source_suite_ids.add(enhanced_suite_id)
1262 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
1264 source_component_func = lambda source: self._source_override(overridesuite, source).component
1265 binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
1267 (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1269 if policy_queue is not None:
1270 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
1272 # copy to build queues
1273 if policy_queue is None or policy_queue.send_to_build_queues:
1274 for build_queue in suite.copy_queues:
1275 self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1277 self._do_bts_versiontracking()
1279 def install_to_new(self):
1280 """install upload to NEW
1282 Install upload to NEW. This method does B{not} handle regular uploads
1283 to suites or policy queues.
1285 You need to have called the C{check} method before calling this method.
1287 # Uploads to NEW are special as we don't have overrides.
1288 assert len(self.reject_reasons) == 0
1289 assert self.changes.valid_signature
1290 assert self.final_suites is not None
1292 source = self.changes.source
1293 binaries = self.changes.binaries
1294 byhand = self.changes.byhand_files
1296 # we need a suite to guess components
1297 suites = list(self.final_suites)
1298 assert len(suites) == 1, "NEW uploads must be to a single suite"
1301 # decide which NEW queue to use
1302 if suite.new_queue is None:
1303 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1305 new_queue = suite.new_queue
1307 # There is only one global BYHAND queue
1308 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1309 new_suite = new_queue.suite
1312 def binary_component_func(binary):
1313 return self._binary_component(suite, binary, only_overrides=False)
1315 # guess source component
1316 # XXX: should be moved into an extra method
1317 binary_component_names = set()
1318 for binary in binaries:
1319 component = binary_component_func(binary)
1320 binary_component_names.add(component.component_name)
1321 source_component_name = None
1322 for c in self.session.query(Component).order_by(Component.component_id):
1323 guess = c.component_name
1324 if guess in binary_component_names:
1325 source_component_name = guess
1327 if source_component_name is None:
1328 source_component = self.session.query(Component).order_by(Component.component_id).first()
1330 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1331 source_component_func = lambda source: source_component
1333 db_changes = self._install_changes()
1334 (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1335 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1338 self._install_byhand(policy_upload, f)
1340 self._do_bts_versiontracking()
1343 """commit changes"""
1344 self.transaction.commit()
1347 """rollback changes"""
1348 self.transaction.rollback()
1350 def __enter__(self):
1354 def __exit__(self, type, value, traceback):
1355 if self.directory is not None:
1356 shutil.rmtree(self.directory)
1357 self.directory = None
1359 self.transaction.rollback()