1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 """module to manipulate the archive
19 This module provides classes to manipulate the archive.
22 from daklib.dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from daklib.fstransactions import FilesystemTransaction
28 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
29 import daklib.daksubprocess
32 from datetime import datetime
35 from sqlalchemy.orm.exc import NoResultFound
36 from sqlalchemy.orm import object_session
41 class ArchiveException(Exception):
44 class HashMismatchException(ArchiveException):
47 class ArchiveTransaction(object):
48 """manipulate the archive in a transaction
51 self.fs = FilesystemTransaction()
52 self.session = DBConn().session()
54 def get_file(self, hashed_file, source_name, check_hashes=True):
55 """Look for file C{hashed_file} in database
57 @type hashed_file: L{daklib.upload.HashedFile}
58 @param hashed_file: file to look for in the database
60 @type source_name: str
61 @param source_name: source package name
63 @type check_hashes: bool
64 @param check_hashes: check size and hashes match
66 @raise KeyError: file was not found in the database
67 @raise HashMismatchException: hash mismatch
69 @rtype: L{daklib.dbconn.PoolFile}
70 @return: database entry for the file
72 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
74 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
75 if check_hashes and (poolfile.filesize != hashed_file.size
76 or poolfile.md5sum != hashed_file.md5sum
77 or poolfile.sha1sum != hashed_file.sha1sum
78 or poolfile.sha256sum != hashed_file.sha256sum):
79 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
82 raise KeyError('{0} not found in database.'.format(poolname))
84 def _install_file(self, directory, hashed_file, archive, component, source_name):
87 Will not give an error when the file is already present.
89 @rtype: L{daklib.dbconn.PoolFile}
90 @return: database object for the new file
92 session = self.session
94 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
96 poolfile = self.get_file(hashed_file, source_name)
98 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
99 poolfile.md5sum = hashed_file.md5sum
100 poolfile.sha1sum = hashed_file.sha1sum
101 poolfile.sha256sum = hashed_file.sha256sum
102 session.add(poolfile)
106 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
107 except NoResultFound:
108 archive_file = ArchiveFile(archive, component, poolfile)
109 session.add(archive_file)
112 path = os.path.join(archive.path, 'pool', component.component_name, poolname)
113 hashed_file_path = os.path.join(directory, hashed_file.filename)
114 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
118 def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
119 """Install a binary package
122 @param directory: directory the binary package is located in
124 @type binary: L{daklib.upload.Binary}
125 @param binary: binary package to install
127 @type suite: L{daklib.dbconn.Suite}
128 @param suite: target suite
130 @type component: L{daklib.dbconn.Component}
131 @param component: target component
133 @type allow_tainted: bool
134 @param allow_tainted: allow to copy additional files from tainted archives
136 @type fingerprint: L{daklib.dbconn.Fingerprint}
137 @param fingerprint: optional fingerprint
139 @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
140 @param source_suites: suites to copy the source from if they are not
141 in C{suite} or C{True} to allow copying from any
144 @type extra_source_archives: list of L{daklib.dbconn.Archive}
145 @param extra_source_archives: extra archives to copy Built-Using sources from
147 @rtype: L{daklib.dbconn.DBBinary}
148 @return: databse object for the new package
150 session = self.session
151 control = binary.control
152 maintainer = get_or_set_maintainer(control['Maintainer'], session)
153 architecture = get_architecture(control['Architecture'], session)
155 (source_name, source_version) = binary.source
156 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
157 source = source_query.filter(DBSource.suites.contains(suite)).first()
159 if source_suites != True:
160 source_query = source_query.join(DBSource.suites) \
161 .filter(Suite.suite_id == source_suites.c.id)
162 source = source_query.first()
164 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
165 self.copy_source(source, suite, component)
167 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
170 package=control['Package'],
171 version=control['Version'],
172 architecture=architecture,
176 maintainer=maintainer,
178 binarytype=binary.type,
180 # Other attributes that are ignored for purposes of equality with
183 fingerprint=fingerprint,
187 db_binary = session.query(DBBinary).filter_by(**unique).one()
188 for key, value in rest.iteritems():
189 if getattr(db_binary, key) != value:
190 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
191 except NoResultFound:
192 db_binary = DBBinary(**unique)
193 for key, value in rest.iteritems():
194 setattr(db_binary, key, value)
195 for key, value in rest2.iteritems():
196 setattr(db_binary, key, value)
197 session.add(db_binary)
199 import_metadata_into_db(db_binary, session)
201 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
203 if suite not in db_binary.suites:
204 db_binary.suites.append(suite)
210 def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
211 """ensure source exists in the given archive
213 This is intended to be used to check that Built-Using sources exist.
216 @param filename: filename to use in error messages
218 @type source: L{daklib.dbconn.DBSource}
219 @param source: source to look for
221 @type archive: L{daklib.dbconn.Archive}
222 @param archive: archive to look in
224 @type extra_archives: list of L{daklib.dbconn.Archive}
225 @param extra_archives: list of archives to copy the source package from
226 if it is not yet present in C{archive}
228 session = self.session
229 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
230 if db_file is not None:
233 # Try to copy file from one extra archive
234 if extra_archives is None:
236 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
238 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
240 source_archive = db_file.archive
241 for dsc_file in source.srcfiles:
242 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
243 # We were given an explicit list of archives so it is okay to copy from tainted archives.
244 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
246 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
247 """Add Built-Using sources to C{db_binary.extra_sources}
249 session = self.session
250 built_using = control.get('Built-Using', None)
252 if built_using is not None:
253 for dep in apt_pkg.parse_depends(built_using):
254 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
255 bu_source_name, bu_source_version, comp = dep[0]
256 assert comp == '=', 'Built-Using must contain strict dependencies'
258 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
259 if bu_source is None:
260 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
262 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
264 db_binary.extra_sources.append(bu_source)
266 def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
267 """Install a source package
270 @param directory: directory the source package is located in
272 @type source: L{daklib.upload.Source}
273 @param source: source package to install
275 @type suite: L{daklib.dbconn.Suite}
276 @param suite: target suite
278 @type component: L{daklib.dbconn.Component}
279 @param component: target component
281 @type changed_by: L{daklib.dbconn.Maintainer}
282 @param changed_by: person who prepared this version of the package
284 @type allow_tainted: bool
285 @param allow_tainted: allow to copy additional files from tainted archives
287 @type fingerprint: L{daklib.dbconn.Fingerprint}
288 @param fingerprint: optional fingerprint
290 @rtype: L{daklib.dbconn.DBSource}
291 @return: database object for the new source
293 session = self.session
294 archive = suite.archive
296 maintainer = get_or_set_maintainer(control['Maintainer'], session)
297 source_name = control['Source']
299 ### Add source package to database
301 # We need to install the .dsc first as the DBSource object refers to it.
302 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
306 version=control['Version'],
309 maintainer=maintainer,
310 #install_date=datetime.now().date(),
311 poolfile=db_file_dsc,
312 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
314 # Other attributes that are ignored for purposes of equality with
317 changedby=changed_by,
318 fingerprint=fingerprint,
323 db_source = session.query(DBSource).filter_by(**unique).one()
324 for key, value in rest.iteritems():
325 if getattr(db_source, key) != value:
326 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
327 except NoResultFound:
329 db_source = DBSource(**unique)
330 for key, value in rest.iteritems():
331 setattr(db_source, key, value)
332 for key, value in rest2.iteritems():
333 setattr(db_source, key, value)
334 # XXX: set as default in postgres?
335 db_source.install_date = datetime.now().date()
336 session.add(db_source)
339 # Add .dsc file. Other files will be added later.
340 db_dsc_file = DSCFile()
341 db_dsc_file.source = db_source
342 db_dsc_file.poolfile = db_file_dsc
343 session.add(db_dsc_file)
346 if suite in db_source.suites:
349 db_source.suites.append(suite)
352 for f in db_source.srcfiles:
353 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
356 ### Now add remaining files and copy them to the archive.
358 for hashed_file in source.files.itervalues():
359 hashed_file_path = os.path.join(directory, hashed_file.filename)
360 if os.path.exists(hashed_file_path):
361 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
364 db_file = self.get_file(hashed_file, source_name)
365 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
367 db_dsc_file = DSCFile()
368 db_dsc_file.source = db_source
369 db_dsc_file.poolfile = db_file
370 session.add(db_dsc_file)
374 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
375 import_metadata_into_db(db_source, session)
377 # Uploaders are the maintainer and co-maintainers from the Uploaders field
378 db_source.uploaders.append(maintainer)
379 if 'Uploaders' in control:
380 from daklib.textutils import split_uploaders
381 for u in split_uploaders(control['Uploaders']):
382 db_source.uploaders.append(get_or_set_maintainer(u, session))
387 def _copy_file(self, db_file, archive, component, allow_tainted=False):
388 """Copy a file to the given archive and component
390 @type db_file: L{daklib.dbconn.PoolFile}
391 @param db_file: file to copy
393 @type archive: L{daklib.dbconn.Archive}
394 @param archive: target archive
396 @type component: L{daklib.dbconn.Archive}
397 @param component: target component
399 @type allow_tainted: bool
400 @param allow_tainted: allow to copy from tainted archives (such as NEW)
402 session = self.session
404 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
405 query = session.query(ArchiveFile).filter_by(file=db_file)
406 if not allow_tainted:
407 query = query.join(Archive).filter(Archive.tainted == False)
409 source_af = query.first()
410 if source_af is None:
411 raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
412 target_af = ArchiveFile(archive, component, db_file)
413 session.add(target_af)
415 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
417 def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
418 """Copy a binary package to the given suite and component
420 @type db_binary: L{daklib.dbconn.DBBinary}
421 @param db_binary: binary to copy
423 @type suite: L{daklib.dbconn.Suite}
424 @param suite: target suite
426 @type component: L{daklib.dbconn.Component}
427 @param component: target component
429 @type allow_tainted: bool
430 @param allow_tainted: allow to copy from tainted archives (such as NEW)
432 @type extra_archives: list of L{daklib.dbconn.Archive}
433 @param extra_archives: extra archives to copy Built-Using sources from
435 session = self.session
436 archive = suite.archive
440 filename = db_binary.poolfile.filename
442 # make sure source is present in target archive
443 db_source = db_binary.source
444 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
445 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
447 # make sure built-using packages are present in target archive
448 for db_source in db_binary.extra_sources:
449 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
452 db_file = db_binary.poolfile
453 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
454 if suite not in db_binary.suites:
455 db_binary.suites.append(suite)
458 def copy_source(self, db_source, suite, component, allow_tainted=False):
459 """Copy a source package to the given suite and component
461 @type db_source: L{daklib.dbconn.DBSource}
462 @param db_source: source to copy
464 @type suite: L{daklib.dbconn.Suite}
465 @param suite: target suite
467 @type component: L{daklib.dbconn.Component}
468 @param component: target component
470 @type allow_tainted: bool
471 @param allow_tainted: allow to copy from tainted archives (such as NEW)
473 archive = suite.archive
476 for db_dsc_file in db_source.srcfiles:
477 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
478 if suite not in db_source.suites:
479 db_source.suites.append(suite)
482 def remove_file(self, db_file, archive, component):
483 """Remove a file from a given archive and component
485 @type db_file: L{daklib.dbconn.PoolFile}
486 @param db_file: file to remove
488 @type archive: L{daklib.dbconn.Archive}
489 @param archive: archive to remove the file from
491 @type component: L{daklib.dbconn.Component}
492 @param component: component to remove the file from
494 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
495 self.fs.unlink(af.path)
496 self.session.delete(af)
498 def remove_binary(self, binary, suite):
499 """Remove a binary from a given suite and component
501 @type binary: L{daklib.dbconn.DBBinary}
502 @param binary: binary to remove
504 @type suite: L{daklib.dbconn.Suite}
505 @param suite: suite to remove the package from
507 binary.suites.remove(suite)
510 def remove_source(self, source, suite):
511 """Remove a source from a given suite and component
513 @type source: L{daklib.dbconn.DBSource}
514 @param source: source to remove
516 @type suite: L{daklib.dbconn.Suite}
517 @param suite: suite to remove the package from
519 @raise ArchiveException: source package is still referenced by other
520 binaries in the suite
522 session = self.session
524 query = session.query(DBBinary).filter_by(source=source) \
525 .filter(DBBinary.suites.contains(suite))
526 if query.first() is not None:
527 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
529 source.suites.remove(suite)
535 self.session.commit()
538 self.session.rollback()
542 """rollback changes"""
543 self.session.rollback()
552 def __exit__(self, type, value, traceback):
559 def source_component_from_package_list(package_list, suite):
560 """Get component for a source package
562 This function will look at the Package-List field to determine the
563 component the source package belongs to. This is the first component
564 the source package provides binaries for (first with respect to the
565 ordering of components).
567 It the source package has no Package-List field, None is returned.
569 @type package_list: L{daklib.packagelist.PackageList}
570 @param package_list: package list of the source to get the override for
572 @type suite: L{daklib.dbconn.Suite}
573 @param suite: suite to consider for binaries produced
575 @rtype: L{daklib.dbconn.Component} or C{None}
576 @return: component for the given source or C{None}
578 if package_list.fallback:
580 session = object_session(suite)
581 packages = package_list.packages_for_suite(suite)
582 components = set(p.component for p in packages)
583 query = session.query(Component).order_by(Component.ordering) \
584 .filter(Component.component_name.in_(components))
587 class ArchiveUpload(object):
590 This class can be used in a with-statement::
592 with ArchiveUpload(...) as upload:
595 Doing so will automatically run any required cleanup and also rollback the
596 transaction if it was not committed.
598 def __init__(self, directory, changes, keyrings):
599 self.transaction = ArchiveTransaction()
600 """transaction used to handle the upload
601 @type: L{daklib.archive.ArchiveTransaction}
604 self.session = self.transaction.session
605 """database session"""
607 self.original_directory = directory
608 self.original_changes = changes
612 @type: L{daklib.upload.Changes}
615 self.directory = None
616 """directory with temporary copy of files. set by C{prepare}
620 self.keyrings = keyrings
622 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
623 """fingerprint of the key used to sign the upload
624 @type: L{daklib.dbconn.Fingerprint}
627 self.reject_reasons = []
628 """reasons why the upload cannot by accepted
638 self.final_suites = None
641 """upload is NEW. set by C{check}
645 self._checked = False
646 """checks passes. set by C{check}
650 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
651 self._new = self._new_queue.suite
653 def warn(self, message):
654 """add a warning message
656 Adds a warning message that can later be seen in C{self.warnings}
658 @type message: string
659 @param message: warning message
661 self.warnings.append(message)
664 """prepare upload for further processing
666 This copies the files involved to a temporary directory. If you use
667 this method directly, you have to remove the directory given by the
668 C{directory} attribute later on your own.
670 Instead of using the method directly, you can also use a with-statement::
672 with ArchiveUpload(...) as upload:
675 This will automatically handle any required cleanup.
677 assert self.directory is None
678 assert self.original_changes.valid_signature
681 session = self.transaction.session
683 group = cnf.get('Dinstall::UnprivGroup') or None
684 self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
685 mode=0o2750, group=group)
686 with FilesystemTransaction() as fs:
687 src = os.path.join(self.original_directory, self.original_changes.filename)
688 dst = os.path.join(self.directory, self.original_changes.filename)
689 fs.copy(src, dst, mode=0o640)
691 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
693 for f in self.changes.files.itervalues():
694 src = os.path.join(self.original_directory, f.filename)
695 dst = os.path.join(self.directory, f.filename)
696 if not os.path.exists(src):
698 fs.copy(src, dst, mode=0o640)
702 source = self.changes.source
704 # Do not raise an exception here if the .dsc is invalid.
707 if source is not None:
708 for f in source.files.itervalues():
709 src = os.path.join(self.original_directory, f.filename)
710 dst = os.path.join(self.directory, f.filename)
711 if not os.path.exists(dst):
713 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
714 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
715 fs.copy(db_archive_file.path, dst, mode=0o640)
717 # Ignore if get_file could not find it. Upload will
718 # probably be rejected later.
721 def unpacked_source(self):
722 """Path to unpacked source
724 Get path to the unpacked source. This method does unpack the source
725 into a temporary directory under C{self.directory} if it has not
726 been done so already.
728 @rtype: str or C{None}
729 @return: string giving the path to the unpacked source directory
730 or C{None} if no source was included in the upload.
732 assert self.directory is not None
734 source = self.changes.source
737 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
739 sourcedir = os.path.join(self.directory, 'source')
740 if not os.path.exists(sourcedir):
741 devnull = open('/dev/null', 'w')
742 daklib.daksubprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
743 if not os.path.isdir(sourcedir):
744 raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
747 def _map_suite(self, suite_name):
748 for rule in Config().value_list("SuiteMappings"):
749 fields = rule.split()
751 if rtype == "map" or rtype == "silent-map":
752 (src, dst) = fields[1:3]
753 if src == suite_name:
755 if rtype != "silent-map":
756 self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
757 elif rtype == "ignore":
759 if suite_name == ignored:
760 self.warnings.append('Ignoring target suite {0}.'.format(ignored))
762 elif rtype == "reject":
764 if suite_name == rejected:
765 raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected))
766 ## XXX: propup-version and map-unreleased not yet implemented
769 def _mapped_suites(self):
770 """Get target suites after mappings
772 @rtype: list of L{daklib.dbconn.Suite}
773 @return: list giving the mapped target suites of this upload
775 session = self.session
778 for dist in self.changes.distributions:
779 suite_name = self._map_suite(dist)
780 if suite_name is not None:
781 suite_names.append(suite_name)
783 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
786 def _check_new_binary_overrides(self, suite):
789 binaries = self.changes.binaries
790 source = self.changes.source
791 if source is not None and not source.package_list.fallback:
792 packages = source.package_list.packages_for_suite(suite)
793 binaries = [ entry for entry in packages ]
796 override = self._binary_override(suite, b)
798 self.warnings.append('binary:{0} is NEW.'.format(b.name))
803 def _check_new(self, suite):
804 """Check if upload is NEW
806 An upload is NEW if it has binary or source packages that do not have
807 an override in C{suite} OR if it references files ONLY in a tainted
808 archive (eg. when it references files in NEW).
811 @return: C{True} if the upload is NEW, C{False} otherwise
813 session = self.session
816 # Check for missing overrides
817 if self._check_new_binary_overrides(suite):
819 if self.changes.source is not None:
820 override = self._source_override(suite, self.changes.source)
822 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
825 # Check if we reference a file only in a tainted archive
826 files = self.changes.files.values()
827 if self.changes.source is not None:
828 files.extend(self.changes.source.files.values())
830 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
831 query_untainted = query.join(Archive).filter(Archive.tainted == False)
833 in_archive = (query.first() is not None)
834 in_untainted_archive = (query_untainted.first() is not None)
836 if in_archive and not in_untainted_archive:
837 self.warnings.append('{0} is only available in NEW.'.format(f.filename))
842 def _final_suites(self):
843 session = self.session
845 mapped_suites = self._mapped_suites()
848 for suite in mapped_suites:
849 overridesuite = suite
850 if suite.overridesuite is not None:
851 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
852 if self._check_new(overridesuite):
854 final_suites.add(suite)
858 def _binary_override(self, suite, binary):
859 """Get override entry for a binary
861 @type suite: L{daklib.dbconn.Suite}
862 @param suite: suite to get override for
864 @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
865 @param binary: binary to get override for
867 @rtype: L{daklib.dbconn.Override} or C{None}
868 @return: override for the given binary or C{None}
870 if suite.overridesuite is not None:
871 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
873 mapped_component = get_mapped_component(binary.component)
874 if mapped_component is None:
877 query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
878 .join(Component).filter(Component.component_name == mapped_component.component_name) \
879 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
883 except NoResultFound:
886 def _source_override(self, suite, source):
887 """Get override entry for a source
889 @type suite: L{daklib.dbconn.Suite}
890 @param suite: suite to get override for
892 @type source: L{daklib.upload.Source}
893 @param source: source to get override for
895 @rtype: L{daklib.dbconn.Override} or C{None}
896 @return: override for the given source or C{None}
898 if suite.overridesuite is not None:
899 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
901 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
902 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
904 component = source_component_from_package_list(source.package_list, suite)
905 if component is not None:
906 query = query.filter(Override.component == component)
910 except NoResultFound:
913 def _binary_component(self, suite, binary, only_overrides=True):
914 """get component for a binary
916 By default this will only look at overrides to get the right component;
917 if C{only_overrides} is C{False} this method will also look at the
920 @type suite: L{daklib.dbconn.Suite}
922 @type binary: L{daklib.upload.Binary}
924 @type only_overrides: bool
925 @param only_overrides: only use overrides to get the right component
927 @rtype: L{daklib.dbconn.Component} or C{None}
929 override = self._binary_override(suite, binary)
930 if override is not None:
931 return override.component
934 return get_mapped_component(binary.component, self.session)
936 def check(self, force=False):
937 """run checks against the upload
940 @param force: ignore failing forcable checks
943 @return: C{True} if all checks passed, C{False} otherwise
945 # XXX: needs to be better structured.
946 assert self.changes.valid_signature
949 # Validate signatures and hashes before we do any real work:
951 checks.SignatureAndHashesCheck,
952 checks.SignatureTimestampCheck,
954 checks.ExternalHashesCheck,
957 checks.BinaryTimestampCheck,
958 checks.SingleDistributionCheck,
962 final_suites = self._final_suites()
963 if len(final_suites) == 0:
964 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
967 self.final_suites = final_suites
970 checks.TransitionCheck,
972 checks.NoSourceOnlyCheck,
979 checks.SourceFormatCheck,
980 checks.SuiteArchitectureCheck,
983 for suite in final_suites:
984 chk().per_suite_check(self, suite)
986 if len(self.reject_reasons) != 0:
991 except checks.Reject as e:
992 self.reject_reasons.append(unicode(e))
993 except Exception as e:
994 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
997 def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
998 """Install upload to the given suite
1000 @type suite: L{daklib.dbconn.Suite}
1001 @param suite: suite to install the package into. This is the real suite,
1002 ie. after any redirection to NEW or a policy queue
1004 @param source_component_func: function to get the L{daklib.dbconn.Component}
1005 for a L{daklib.upload.Source} object
1007 @param binary_component_func: function to get the L{daklib.dbconn.Component}
1008 for a L{daklib.upload.Binary} object
1010 @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
1012 @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
1014 @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
1015 object for the install source or C{None} if no source was
1016 included. The second is a list of L{daklib.dbconn.DBBinary}
1017 objects for the installed binary packages.
1019 # XXX: move this function to ArchiveTransaction?
1021 control = self.changes.changes
1022 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
1024 if source_suites is None:
1025 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
1027 source = self.changes.source
1028 if source is not None:
1029 component = source_component_func(source)
1030 db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
1035 for binary in self.changes.binaries:
1036 component = binary_component_func(binary)
1037 db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
1038 db_binaries.append(db_binary)
1040 if suite.copychanges:
1041 src = os.path.join(self.directory, self.changes.filename)
1042 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
1043 self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
1045 return (db_source, db_binaries)
1047 def _install_changes(self):
1048 assert self.changes.valid_signature
1049 control = self.changes.changes
1050 session = self.transaction.session
1054 # Only add changelog for sourceful uploads and binNMUs
1055 if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
1056 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
1057 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
1058 assert changelog_id is not None
1060 db_changes = DBChange()
1061 db_changes.changesname = self.changes.filename
1062 db_changes.source = control['Source']
1063 db_changes.binaries = control.get('Binary', None)
1064 db_changes.architecture = control['Architecture']
1065 db_changes.version = control['Version']
1066 db_changes.distribution = control['Distribution']
1067 db_changes.urgency = control['Urgency']
1068 db_changes.maintainer = control['Maintainer']
1069 db_changes.changedby = control.get('Changed-By', control['Maintainer'])
1070 db_changes.date = control['Date']
1071 db_changes.fingerprint = self.fingerprint.fingerprint
1072 db_changes.changelog_id = changelog_id
1073 db_changes.closes = self.changes.closed_bugs
1076 self.transaction.session.add(db_changes)
1077 self.transaction.session.flush()
1078 except sqlalchemy.exc.IntegrityError:
1079 raise ArchiveException('{0} is already known.'.format(self.changes.filename))
1083 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
1084 u = PolicyQueueUpload()
1085 u.policy_queue = policy_queue
1086 u.target_suite = target_suite
1087 u.changes = db_changes
1088 u.source = db_source
1089 u.binaries = db_binaries
1090 self.transaction.session.add(u)
1091 self.transaction.session.flush()
1093 dst = os.path.join(policy_queue.path, self.changes.filename)
1094 self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms)
1098 def try_autobyhand(self):
1101 Try to handle byhand packages automatically.
1103 @rtype: list of L{daklib.upload.HashedFile}
1104 @return: list of remaining byhand files
1106 assert len(self.reject_reasons) == 0
1107 assert self.changes.valid_signature
1108 assert self.final_suites is not None
1109 assert self._checked
1111 byhand = self.changes.byhand_files
1112 if len(byhand) == 0:
1115 suites = list(self.final_suites)
1116 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1120 control = self.changes.changes
1121 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1125 if '_' in f.filename:
1126 parts = f.filename.split('_', 2)
1128 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1132 package, version, archext = parts
1133 arch, ext = archext.split('.', 1)
1135 parts = f.filename.split('.')
1137 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1147 rule = automatic_byhand_packages.subtree(package)
1152 if rule['Source'] != self.changes.source_name \
1153 or rule['Section'] != f.section \
1154 or ('Extension' in rule and rule['Extension'] != ext):
1158 script = rule['Script']
1159 retcode = daklib.daksubprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
1161 print "W: error processing {0}.".format(f.filename)
1164 return len(remaining) == 0
1166 def _install_byhand(self, policy_queue_upload, hashed_file):
1167 """install byhand file
1169 @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
1171 @type hashed_file: L{daklib.upload.HashedFile}
1173 fs = self.transaction.fs
1174 session = self.transaction.session
1175 policy_queue = policy_queue_upload.policy_queue
1177 byhand_file = PolicyQueueByhandFile()
1178 byhand_file.upload = policy_queue_upload
1179 byhand_file.filename = hashed_file.filename
1180 session.add(byhand_file)
1183 src = os.path.join(self.directory, hashed_file.filename)
1184 dst = os.path.join(policy_queue.path, hashed_file.filename)
1185 fs.copy(src, dst, mode=policy_queue.change_perms)
1189 def _do_bts_versiontracking(self):
1191 fs = self.transaction.fs
1193 btsdir = cnf.get('Dir::BTSVersionTrack')
1194 if btsdir is None or btsdir == '':
1197 base = os.path.join(btsdir, self.changes.filename[:-8])
1200 sourcedir = self.unpacked_source()
1201 if sourcedir is not None:
1202 fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
1203 versions = fs.create("{0}.versions".format(base), mode=0o644)
1204 for line in fh.readlines():
1205 if re_changelog_versions.match(line):
1206 versions.write(line)
1210 # binary -> source mapping
1211 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1212 for binary in self.changes.binaries:
1213 control = binary.control
1214 source_package, source_version = binary.source
1215 line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
1216 print >>debinfo, line
1219 def _policy_queue(self, suite):
1220 if suite.policy_queue is not None:
1221 return suite.policy_queue
1227 Install upload to a suite or policy queue. This method does B{not}
1228 handle uploads to NEW.
1230 You need to have called the C{check} method before calling this method.
1232 assert len(self.reject_reasons) == 0
1233 assert self.changes.valid_signature
1234 assert self.final_suites is not None
1235 assert self._checked
1238 db_changes = self._install_changes()
1240 for suite in self.final_suites:
1241 overridesuite = suite
1242 if suite.overridesuite is not None:
1243 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1245 policy_queue = self._policy_queue(suite)
1247 redirected_suite = suite
1248 if policy_queue is not None:
1249 redirected_suite = policy_queue.suite
1251 # source can be in the suite we install to or any suite we enhance
1252 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
1253 for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \
1254 .filter(VersionCheck.suite_id.in_(source_suite_ids)) \
1255 .filter(VersionCheck.check == 'Enhances'):
1256 source_suite_ids.add(enhanced_suite_id)
1258 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
1260 source_component_func = lambda source: self._source_override(overridesuite, source).component
1261 binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
1263 (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1265 if policy_queue is not None:
1266 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
1268 # copy to build queues
1269 if policy_queue is None or policy_queue.send_to_build_queues:
1270 for build_queue in suite.copy_queues:
1271 self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1273 self._do_bts_versiontracking()
1275 def install_to_new(self):
1276 """install upload to NEW
1278 Install upload to NEW. This method does B{not} handle regular uploads
1279 to suites or policy queues.
1281 You need to have called the C{check} method before calling this method.
1283 # Uploads to NEW are special as we don't have overrides.
1284 assert len(self.reject_reasons) == 0
1285 assert self.changes.valid_signature
1286 assert self.final_suites is not None
1288 source = self.changes.source
1289 binaries = self.changes.binaries
1290 byhand = self.changes.byhand_files
1292 # we need a suite to guess components
1293 suites = list(self.final_suites)
1294 assert len(suites) == 1, "NEW uploads must be to a single suite"
1297 # decide which NEW queue to use
1298 if suite.new_queue is None:
1299 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1301 new_queue = suite.new_queue
1303 # There is only one global BYHAND queue
1304 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1305 new_suite = new_queue.suite
1308 def binary_component_func(binary):
1309 return self._binary_component(suite, binary, only_overrides=False)
1311 # guess source component
1312 # XXX: should be moved into an extra method
1313 binary_component_names = set()
1314 for binary in binaries:
1315 component = binary_component_func(binary)
1316 binary_component_names.add(component.component_name)
1317 source_component_name = None
1318 for c in self.session.query(Component).order_by(Component.component_id):
1319 guess = c.component_name
1320 if guess in binary_component_names:
1321 source_component_name = guess
1323 if source_component_name is None:
1324 source_component = self.session.query(Component).order_by(Component.component_id).first()
1326 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1327 source_component_func = lambda source: source_component
1329 db_changes = self._install_changes()
1330 (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1331 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1334 self._install_byhand(policy_upload, f)
1336 self._do_bts_versiontracking()
1339 """commit changes"""
1340 self.transaction.commit()
1343 """rollback changes"""
1344 self.transaction.rollback()
1346 def __enter__(self):
1350 def __exit__(self, type, value, traceback):
1351 if self.directory is not None:
1352 shutil.rmtree(self.directory)
1353 self.directory = None
1355 self.transaction.rollback()