1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 """module to manipulate the archive
19 This module provides classes to manipulate the archive.
22 from daklib.dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from daklib.fstransactions import FilesystemTransaction
28 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
29 import daklib.daksubprocess
32 from datetime import datetime
35 from sqlalchemy.orm.exc import NoResultFound
36 from sqlalchemy.orm import object_session
41 class ArchiveException(Exception):
44 class HashMismatchException(ArchiveException):
47 class ArchiveTransaction(object):
48 """manipulate the archive in a transaction
51 self.fs = FilesystemTransaction()
52 self.session = DBConn().session()
54 def get_file(self, hashed_file, source_name, check_hashes=True):
55 """Look for file C{hashed_file} in database
57 @type hashed_file: L{daklib.upload.HashedFile}
58 @param hashed_file: file to look for in the database
60 @type source_name: str
61 @param source_name: source package name
63 @type check_hashes: bool
64 @param check_hashes: check size and hashes match
66 @raise KeyError: file was not found in the database
67 @raise HashMismatchException: hash mismatch
69 @rtype: L{daklib.dbconn.PoolFile}
70 @return: database entry for the file
72 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
74 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
75 if check_hashes and (poolfile.filesize != hashed_file.size
76 or poolfile.md5sum != hashed_file.md5sum
77 or poolfile.sha1sum != hashed_file.sha1sum
78 or poolfile.sha256sum != hashed_file.sha256sum):
79 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
82 raise KeyError('{0} not found in database.'.format(poolname))
84 def _install_file(self, directory, hashed_file, archive, component, source_name):
87 Will not give an error when the file is already present.
89 @rtype: L{daklib.dbconn.PoolFile}
90 @return: database object for the new file
92 session = self.session
94 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
96 poolfile = self.get_file(hashed_file, source_name)
98 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
99 poolfile.md5sum = hashed_file.md5sum
100 poolfile.sha1sum = hashed_file.sha1sum
101 poolfile.sha256sum = hashed_file.sha256sum
102 session.add(poolfile)
106 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
107 except NoResultFound:
108 archive_file = ArchiveFile(archive, component, poolfile)
109 session.add(archive_file)
112 path = os.path.join(archive.path, 'pool', component.component_name, poolname)
113 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
114 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
118 def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
119 """Install a binary package
122 @param directory: directory the binary package is located in
124 @type binary: L{daklib.upload.Binary}
125 @param binary: binary package to install
127 @type suite: L{daklib.dbconn.Suite}
128 @param suite: target suite
130 @type component: L{daklib.dbconn.Component}
131 @param component: target component
133 @type allow_tainted: bool
134 @param allow_tainted: allow to copy additional files from tainted archives
136 @type fingerprint: L{daklib.dbconn.Fingerprint}
137 @param fingerprint: optional fingerprint
139 @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
140 @param source_suites: suites to copy the source from if they are not
141 in C{suite} or C{True} to allow copying from any
144 @type extra_source_archives: list of L{daklib.dbconn.Archive}
145 @param extra_source_archives: extra archives to copy Built-Using sources from
147 @rtype: L{daklib.dbconn.DBBinary}
148 @return: databse object for the new package
150 session = self.session
151 control = binary.control
152 maintainer = get_or_set_maintainer(control['Maintainer'], session)
153 architecture = get_architecture(control['Architecture'], session)
155 (source_name, source_version) = binary.source
156 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
157 source = source_query.filter(DBSource.suites.contains(suite)).first()
159 if source_suites != True:
160 source_query = source_query.join(DBSource.suites) \
161 .filter(Suite.suite_id == source_suites.c.id)
162 source = source_query.first()
164 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
165 self.copy_source(source, suite, component)
167 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
170 package=control['Package'],
171 version=control['Version'],
172 architecture=architecture,
176 maintainer=maintainer,
178 binarytype=binary.type,
180 # Other attributes that are ignored for purposes of equality with
183 fingerprint=fingerprint,
187 db_binary = session.query(DBBinary).filter_by(**unique).one()
188 for key, value in rest.iteritems():
189 if getattr(db_binary, key) != value:
190 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
191 except NoResultFound:
192 db_binary = DBBinary(**unique)
193 for key, value in rest.iteritems():
194 setattr(db_binary, key, value)
195 for key, value in rest2.iteritems():
196 setattr(db_binary, key, value)
197 session.add(db_binary)
199 import_metadata_into_db(db_binary, session)
201 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
203 if suite not in db_binary.suites:
204 db_binary.suites.append(suite)
210 def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
211 """ensure source exists in the given archive
213 This is intended to be used to check that Built-Using sources exist.
216 @param filename: filename to use in error messages
218 @type source: L{daklib.dbconn.DBSource}
219 @param source: source to look for
221 @type archive: L{daklib.dbconn.Archive}
222 @param archive: archive to look in
224 @type extra_archives: list of L{daklib.dbconn.Archive}
225 @param extra_archives: list of archives to copy the source package from
226 if it is not yet present in C{archive}
228 session = self.session
229 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
230 if db_file is not None:
233 # Try to copy file from one extra archive
234 if extra_archives is None:
236 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
238 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
240 source_archive = db_file.archive
241 for dsc_file in source.srcfiles:
242 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
243 # We were given an explicit list of archives so it is okay to copy from tainted archives.
244 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
246 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
247 """Add Built-Using sources to C{db_binary.extra_sources}
249 session = self.session
251 for bu_source_name, bu_source_version in daklib.utils.parse_built_using(control):
252 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
253 if bu_source is None:
254 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
256 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
258 db_binary.extra_sources.append(bu_source)
260 def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None):
261 session = self.session
263 maintainer = get_or_set_maintainer(control['Maintainer'], session)
264 source_name = control['Source']
266 ### Add source package to database
268 # We need to install the .dsc first as the DBSource object refers to it.
269 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
273 version=control['Version'],
276 maintainer=maintainer,
277 #install_date=datetime.now().date(),
278 poolfile=db_file_dsc,
279 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
281 # Other attributes that are ignored for purposes of equality with
284 changedby=changed_by,
285 fingerprint=fingerprint,
290 db_source = session.query(DBSource).filter_by(**unique).one()
291 for key, value in rest.iteritems():
292 if getattr(db_source, key) != value:
293 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
294 except NoResultFound:
296 db_source = DBSource(**unique)
297 for key, value in rest.iteritems():
298 setattr(db_source, key, value)
299 for key, value in rest2.iteritems():
300 setattr(db_source, key, value)
301 # XXX: set as default in postgres?
302 db_source.install_date = datetime.now().date()
303 session.add(db_source)
306 # Add .dsc file. Other files will be added later.
307 db_dsc_file = DSCFile()
308 db_dsc_file.source = db_source
309 db_dsc_file.poolfile = db_file_dsc
310 session.add(db_dsc_file)
314 for f in db_source.srcfiles:
315 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
318 ### Now add remaining files and copy them to the archive.
320 for hashed_file in source.files.itervalues():
321 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
322 if os.path.exists(hashed_file_path):
323 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
326 db_file = self.get_file(hashed_file, source_name)
327 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
329 db_dsc_file = DSCFile()
330 db_dsc_file.source = db_source
331 db_dsc_file.poolfile = db_file
332 session.add(db_dsc_file)
336 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
337 import_metadata_into_db(db_source, session)
339 # Uploaders are the maintainer and co-maintainers from the Uploaders field
340 db_source.uploaders.append(maintainer)
341 if 'Uploaders' in control:
342 from daklib.textutils import split_uploaders
343 for u in split_uploaders(control['Uploaders']):
344 db_source.uploaders.append(get_or_set_maintainer(u, session))
349 def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
350 """Install a source package
353 @param directory: directory the source package is located in
355 @type source: L{daklib.upload.Source}
356 @param source: source package to install
358 @type suite: L{daklib.dbconn.Suite}
359 @param suite: target suite
361 @type component: L{daklib.dbconn.Component}
362 @param component: target component
364 @type changed_by: L{daklib.dbconn.Maintainer}
365 @param changed_by: person who prepared this version of the package
367 @type allow_tainted: bool
368 @param allow_tainted: allow to copy additional files from tainted archives
370 @type fingerprint: L{daklib.dbconn.Fingerprint}
371 @param fingerprint: optional fingerprint
373 @rtype: L{daklib.dbconn.DBSource}
374 @return: database object for the new source
376 db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint)
378 if suite in db_source.suites:
380 db_source.suites.append(suite)
385 def _copy_file(self, db_file, archive, component, allow_tainted=False):
386 """Copy a file to the given archive and component
388 @type db_file: L{daklib.dbconn.PoolFile}
389 @param db_file: file to copy
391 @type archive: L{daklib.dbconn.Archive}
392 @param archive: target archive
394 @type component: L{daklib.dbconn.Archive}
395 @param component: target component
397 @type allow_tainted: bool
398 @param allow_tainted: allow to copy from tainted archives (such as NEW)
400 session = self.session
402 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
403 query = session.query(ArchiveFile).filter_by(file=db_file)
404 if not allow_tainted:
405 query = query.join(Archive).filter(Archive.tainted == False)
407 source_af = query.first()
408 if source_af is None:
409 raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
410 target_af = ArchiveFile(archive, component, db_file)
411 session.add(target_af)
413 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
415 def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
416 """Copy a binary package to the given suite and component
418 @type db_binary: L{daklib.dbconn.DBBinary}
419 @param db_binary: binary to copy
421 @type suite: L{daklib.dbconn.Suite}
422 @param suite: target suite
424 @type component: L{daklib.dbconn.Component}
425 @param component: target component
427 @type allow_tainted: bool
428 @param allow_tainted: allow to copy from tainted archives (such as NEW)
430 @type extra_archives: list of L{daklib.dbconn.Archive}
431 @param extra_archives: extra archives to copy Built-Using sources from
433 session = self.session
434 archive = suite.archive
438 filename = db_binary.poolfile.filename
440 # make sure source is present in target archive
441 db_source = db_binary.source
442 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
443 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
445 # make sure built-using packages are present in target archive
446 for db_source in db_binary.extra_sources:
447 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
450 db_file = db_binary.poolfile
451 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
452 if suite not in db_binary.suites:
453 db_binary.suites.append(suite)
456 def copy_source(self, db_source, suite, component, allow_tainted=False):
457 """Copy a source package to the given suite and component
459 @type db_source: L{daklib.dbconn.DBSource}
460 @param db_source: source to copy
462 @type suite: L{daklib.dbconn.Suite}
463 @param suite: target suite
465 @type component: L{daklib.dbconn.Component}
466 @param component: target component
468 @type allow_tainted: bool
469 @param allow_tainted: allow to copy from tainted archives (such as NEW)
471 archive = suite.archive
474 for db_dsc_file in db_source.srcfiles:
475 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
476 if suite not in db_source.suites:
477 db_source.suites.append(suite)
480 def remove_file(self, db_file, archive, component):
481 """Remove a file from a given archive and component
483 @type db_file: L{daklib.dbconn.PoolFile}
484 @param db_file: file to remove
486 @type archive: L{daklib.dbconn.Archive}
487 @param archive: archive to remove the file from
489 @type component: L{daklib.dbconn.Component}
490 @param component: component to remove the file from
492 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
493 self.fs.unlink(af.path)
494 self.session.delete(af)
496 def remove_binary(self, binary, suite):
497 """Remove a binary from a given suite and component
499 @type binary: L{daklib.dbconn.DBBinary}
500 @param binary: binary to remove
502 @type suite: L{daklib.dbconn.Suite}
503 @param suite: suite to remove the package from
505 binary.suites.remove(suite)
508 def remove_source(self, source, suite):
509 """Remove a source from a given suite and component
511 @type source: L{daklib.dbconn.DBSource}
512 @param source: source to remove
514 @type suite: L{daklib.dbconn.Suite}
515 @param suite: suite to remove the package from
517 @raise ArchiveException: source package is still referenced by other
518 binaries in the suite
520 session = self.session
522 query = session.query(DBBinary).filter_by(source=source) \
523 .filter(DBBinary.suites.contains(suite))
524 if query.first() is not None:
525 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
527 source.suites.remove(suite)
533 self.session.commit()
536 self.session.rollback()
540 """rollback changes"""
541 self.session.rollback()
550 def __exit__(self, type, value, traceback):
557 def source_component_from_package_list(package_list, suite):
558 """Get component for a source package
560 This function will look at the Package-List field to determine the
561 component the source package belongs to. This is the first component
562 the source package provides binaries for (first with respect to the
563 ordering of components).
565 It the source package has no Package-List field, None is returned.
567 @type package_list: L{daklib.packagelist.PackageList}
568 @param package_list: package list of the source to get the override for
570 @type suite: L{daklib.dbconn.Suite}
571 @param suite: suite to consider for binaries produced
573 @rtype: L{daklib.dbconn.Component} or C{None}
574 @return: component for the given source or C{None}
576 if package_list.fallback:
578 session = object_session(suite)
579 packages = package_list.packages_for_suite(suite)
580 components = set(p.component for p in packages)
581 query = session.query(Component).order_by(Component.ordering) \
582 .filter(Component.component_name.in_(components))
585 class ArchiveUpload(object):
588 This class can be used in a with-statement::
590 with ArchiveUpload(...) as upload:
593 Doing so will automatically run any required cleanup and also rollback the
594 transaction if it was not committed.
596 def __init__(self, directory, changes, keyrings):
597 self.transaction = ArchiveTransaction()
598 """transaction used to handle the upload
599 @type: L{daklib.archive.ArchiveTransaction}
602 self.session = self.transaction.session
603 """database session"""
605 self.original_directory = directory
606 self.original_changes = changes
610 @type: L{daklib.upload.Changes}
613 self.directory = None
614 """directory with temporary copy of files. set by C{prepare}
618 self.keyrings = keyrings
620 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
621 """fingerprint of the key used to sign the upload
622 @type: L{daklib.dbconn.Fingerprint}
625 self.reject_reasons = []
626 """reasons why the upload cannot by accepted
636 self.final_suites = None
639 """upload is NEW. set by C{check}
643 self._checked = False
644 """checks passes. set by C{check}
648 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
649 self._new = self._new_queue.suite
651 def warn(self, message):
652 """add a warning message
654 Adds a warning message that can later be seen in C{self.warnings}
656 @type message: string
657 @param message: warning message
659 self.warnings.append(message)
662 """prepare upload for further processing
664 This copies the files involved to a temporary directory. If you use
665 this method directly, you have to remove the directory given by the
666 C{directory} attribute later on your own.
668 Instead of using the method directly, you can also use a with-statement::
670 with ArchiveUpload(...) as upload:
673 This will automatically handle any required cleanup.
675 assert self.directory is None
676 assert self.original_changes.valid_signature
679 session = self.transaction.session
681 group = cnf.get('Dinstall::UnprivGroup') or None
682 self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
683 mode=0o2750, group=group)
684 with FilesystemTransaction() as fs:
685 src = os.path.join(self.original_directory, self.original_changes.filename)
686 dst = os.path.join(self.directory, self.original_changes.filename)
687 fs.copy(src, dst, mode=0o640)
689 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
691 for f in self.changes.files.itervalues():
692 src = os.path.join(self.original_directory, f.filename)
693 dst = os.path.join(self.directory, f.filename)
694 if not os.path.exists(src):
696 fs.copy(src, dst, mode=0o640)
700 source = self.changes.source
702 # Do not raise an exception here if the .dsc is invalid.
705 if source is not None:
706 for f in source.files.itervalues():
707 src = os.path.join(self.original_directory, f.filename)
708 dst = os.path.join(self.directory, f.filename)
709 if not os.path.exists(dst):
711 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
712 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
713 fs.copy(db_archive_file.path, dst, mode=0o640)
715 # Ignore if get_file could not find it. Upload will
716 # probably be rejected later.
719 def unpacked_source(self):
720 """Path to unpacked source
722 Get path to the unpacked source. This method does unpack the source
723 into a temporary directory under C{self.directory} if it has not
724 been done so already.
726 @rtype: str or C{None}
727 @return: string giving the path to the unpacked source directory
728 or C{None} if no source was included in the upload.
730 assert self.directory is not None
732 source = self.changes.source
735 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
737 sourcedir = os.path.join(self.directory, 'source')
738 if not os.path.exists(sourcedir):
739 devnull = open('/dev/null', 'w')
740 daklib.daksubprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
741 if not os.path.isdir(sourcedir):
742 raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
745 def _map_suite(self, suite_name):
746 for rule in Config().value_list("SuiteMappings"):
747 fields = rule.split()
749 if rtype == "map" or rtype == "silent-map":
750 (src, dst) = fields[1:3]
751 if src == suite_name:
753 if rtype != "silent-map":
754 self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
755 elif rtype == "ignore":
757 if suite_name == ignored:
758 self.warnings.append('Ignoring target suite {0}.'.format(ignored))
760 elif rtype == "reject":
762 if suite_name == rejected:
763 raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected))
764 ## XXX: propup-version and map-unreleased not yet implemented
767 def _mapped_suites(self):
768 """Get target suites after mappings
770 @rtype: list of L{daklib.dbconn.Suite}
771 @return: list giving the mapped target suites of this upload
773 session = self.session
776 for dist in self.changes.distributions:
777 suite_name = self._map_suite(dist)
778 if suite_name is not None:
779 suite_names.append(suite_name)
781 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
784 def _check_new_binary_overrides(self, suite):
787 binaries = self.changes.binaries
788 source = self.changes.source
789 if source is not None and not source.package_list.fallback:
790 packages = source.package_list.packages_for_suite(suite)
791 binaries = [ entry for entry in packages ]
794 override = self._binary_override(suite, b)
796 self.warnings.append('binary:{0} is NEW.'.format(b.name))
801 def _check_new(self, suite):
802 """Check if upload is NEW
804 An upload is NEW if it has binary or source packages that do not have
805 an override in C{suite} OR if it references files ONLY in a tainted
806 archive (eg. when it references files in NEW).
809 @return: C{True} if the upload is NEW, C{False} otherwise
811 session = self.session
814 # Check for missing overrides
815 if self._check_new_binary_overrides(suite):
817 if self.changes.source is not None:
818 override = self._source_override(suite, self.changes.source)
820 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
823 # Check if we reference a file only in a tainted archive
824 files = self.changes.files.values()
825 if self.changes.source is not None:
826 files.extend(self.changes.source.files.values())
828 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
829 query_untainted = query.join(Archive).filter(Archive.tainted == False)
831 in_archive = (query.first() is not None)
832 in_untainted_archive = (query_untainted.first() is not None)
834 if in_archive and not in_untainted_archive:
835 self.warnings.append('{0} is only available in NEW.'.format(f.filename))
840 def _final_suites(self):
841 session = self.session
843 mapped_suites = self._mapped_suites()
846 for suite in mapped_suites:
847 overridesuite = suite
848 if suite.overridesuite is not None:
849 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
850 if self._check_new(overridesuite):
852 final_suites.add(suite)
856 def _binary_override(self, suite, binary):
857 """Get override entry for a binary
859 @type suite: L{daklib.dbconn.Suite}
860 @param suite: suite to get override for
862 @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
863 @param binary: binary to get override for
865 @rtype: L{daklib.dbconn.Override} or C{None}
866 @return: override for the given binary or C{None}
868 if suite.overridesuite is not None:
869 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
871 mapped_component = get_mapped_component(binary.component)
872 if mapped_component is None:
875 query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
876 .join(Component).filter(Component.component_name == mapped_component.component_name) \
877 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
881 except NoResultFound:
884 def _source_override(self, suite, source):
885 """Get override entry for a source
887 @type suite: L{daklib.dbconn.Suite}
888 @param suite: suite to get override for
890 @type source: L{daklib.upload.Source}
891 @param source: source to get override for
893 @rtype: L{daklib.dbconn.Override} or C{None}
894 @return: override for the given source or C{None}
896 if suite.overridesuite is not None:
897 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
899 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
900 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
902 component = source_component_from_package_list(source.package_list, suite)
903 if component is not None:
904 query = query.filter(Override.component == component)
908 except NoResultFound:
911 def _binary_component(self, suite, binary, only_overrides=True):
912 """get component for a binary
914 By default this will only look at overrides to get the right component;
915 if C{only_overrides} is C{False} this method will also look at the
918 @type suite: L{daklib.dbconn.Suite}
920 @type binary: L{daklib.upload.Binary}
922 @type only_overrides: bool
923 @param only_overrides: only use overrides to get the right component
925 @rtype: L{daklib.dbconn.Component} or C{None}
927 override = self._binary_override(suite, binary)
928 if override is not None:
929 return override.component
932 return get_mapped_component(binary.component, self.session)
934 def check(self, force=False):
935 """run checks against the upload
938 @param force: ignore failing forcable checks
941 @return: C{True} if all checks passed, C{False} otherwise
943 # XXX: needs to be better structured.
944 assert self.changes.valid_signature
947 # Validate signatures and hashes before we do any real work:
949 checks.SignatureAndHashesCheck,
950 checks.SignatureTimestampCheck,
952 checks.ExternalHashesCheck,
955 checks.BinaryTimestampCheck,
956 checks.SingleDistributionCheck,
960 final_suites = self._final_suites()
961 if len(final_suites) == 0:
962 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
965 self.final_suites = final_suites
968 checks.TransitionCheck,
970 checks.NoSourceOnlyCheck,
977 checks.SourceFormatCheck,
978 checks.SuiteArchitectureCheck,
981 for suite in final_suites:
982 chk().per_suite_check(self, suite)
984 if len(self.reject_reasons) != 0:
989 except checks.Reject as e:
990 self.reject_reasons.append(unicode(e))
991 except Exception as e:
992 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
995 def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
996 """Install upload to the given suite
998 @type suite: L{daklib.dbconn.Suite}
999 @param suite: suite to install the package into. This is the real suite,
1000 ie. after any redirection to NEW or a policy queue
1002 @param source_component_func: function to get the L{daklib.dbconn.Component}
1003 for a L{daklib.upload.Source} object
1005 @param binary_component_func: function to get the L{daklib.dbconn.Component}
1006 for a L{daklib.upload.Binary} object
1008 @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
1010 @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
1012 @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
1013 object for the install source or C{None} if no source was
1014 included. The second is a list of L{daklib.dbconn.DBBinary}
1015 objects for the installed binary packages.
1017 # XXX: move this function to ArchiveTransaction?
1019 control = self.changes.changes
1020 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
1022 if source_suites is None:
1023 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
1025 source = self.changes.source
1026 if source is not None:
1027 component = source_component_func(source)
1028 db_source = self.transaction.install_source(
1034 fingerprint=self.fingerprint
1040 for binary in self.changes.binaries:
1041 copy_to_suite = suite
1042 if utils.is_in_debug_section(binary.control) and suite.debug_suite is not None:
1043 copy_to_suite = suite.debug_suite
1045 component = binary_component_func(binary)
1046 db_binary = self.transaction.install_binary(
1051 fingerprint=self.fingerprint,
1052 source_suites=source_suites,
1053 extra_source_archives=extra_source_archives
1055 db_binaries.append(db_binary)
1057 if suite.copychanges:
1058 src = os.path.join(self.directory, self.changes.filename)
1059 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
1060 self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
1062 return (db_source, db_binaries)
1064 def _install_changes(self):
1065 assert self.changes.valid_signature
1066 control = self.changes.changes
1067 session = self.transaction.session
1071 # Only add changelog for sourceful uploads and binNMUs
1072 if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
1073 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
1074 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
1075 assert changelog_id is not None
1077 db_changes = DBChange()
1078 db_changes.changesname = self.changes.filename
1079 db_changes.source = control['Source']
1080 db_changes.binaries = control.get('Binary', None)
1081 db_changes.architecture = control['Architecture']
1082 db_changes.version = control['Version']
1083 db_changes.distribution = control['Distribution']
1084 db_changes.urgency = control['Urgency']
1085 db_changes.maintainer = control['Maintainer']
1086 db_changes.changedby = control.get('Changed-By', control['Maintainer'])
1087 db_changes.date = control['Date']
1088 db_changes.fingerprint = self.fingerprint.fingerprint
1089 db_changes.changelog_id = changelog_id
1090 db_changes.closes = self.changes.closed_bugs
1093 self.transaction.session.add(db_changes)
1094 self.transaction.session.flush()
1095 except sqlalchemy.exc.IntegrityError:
1096 raise ArchiveException('{0} is already known.'.format(self.changes.filename))
1100 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
1101 u = PolicyQueueUpload()
1102 u.policy_queue = policy_queue
1103 u.target_suite = target_suite
1104 u.changes = db_changes
1105 u.source = db_source
1106 u.binaries = db_binaries
1107 self.transaction.session.add(u)
1108 self.transaction.session.flush()
1110 dst = os.path.join(policy_queue.path, self.changes.filename)
1111 self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms)
1115 def try_autobyhand(self):
1118 Try to handle byhand packages automatically.
1120 @rtype: list of L{daklib.upload.HashedFile}
1121 @return: list of remaining byhand files
1123 assert len(self.reject_reasons) == 0
1124 assert self.changes.valid_signature
1125 assert self.final_suites is not None
1126 assert self._checked
1128 byhand = self.changes.byhand_files
1129 if len(byhand) == 0:
1132 suites = list(self.final_suites)
1133 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1137 control = self.changes.changes
1138 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1142 if '_' in f.filename:
1143 parts = f.filename.split('_', 2)
1145 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1149 package, version, archext = parts
1150 arch, ext = archext.split('.', 1)
1152 parts = f.filename.split('.')
1154 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1164 rule = automatic_byhand_packages.subtree(package)
1169 if rule['Source'] != self.changes.source_name \
1170 or rule['Section'] != f.section \
1171 or ('Extension' in rule and rule['Extension'] != ext):
1175 script = rule['Script']
1176 retcode = daklib.daksubprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
1178 print "W: error processing {0}.".format(f.filename)
1181 return len(remaining) == 0
1183 def _install_byhand(self, policy_queue_upload, hashed_file):
1184 """install byhand file
1186 @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
1188 @type hashed_file: L{daklib.upload.HashedFile}
1190 fs = self.transaction.fs
1191 session = self.transaction.session
1192 policy_queue = policy_queue_upload.policy_queue
1194 byhand_file = PolicyQueueByhandFile()
1195 byhand_file.upload = policy_queue_upload
1196 byhand_file.filename = hashed_file.filename
1197 session.add(byhand_file)
1200 src = os.path.join(self.directory, hashed_file.filename)
1201 dst = os.path.join(policy_queue.path, hashed_file.filename)
1202 fs.copy(src, dst, mode=policy_queue.change_perms)
1206 def _do_bts_versiontracking(self):
1208 fs = self.transaction.fs
1210 btsdir = cnf.get('Dir::BTSVersionTrack')
1211 if btsdir is None or btsdir == '':
1214 base = os.path.join(btsdir, self.changes.filename[:-8])
1217 sourcedir = self.unpacked_source()
1218 if sourcedir is not None:
1219 fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
1220 versions = fs.create("{0}.versions".format(base), mode=0o644)
1221 for line in fh.readlines():
1222 if re_changelog_versions.match(line):
1223 versions.write(line)
1227 # binary -> source mapping
1228 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1229 for binary in self.changes.binaries:
1230 control = binary.control
1231 source_package, source_version = binary.source
1232 line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
1233 print >>debinfo, line
1236 def _policy_queue(self, suite):
1237 if suite.policy_queue is not None:
1238 return suite.policy_queue
1244 Install upload to a suite or policy queue. This method does B{not}
1245 handle uploads to NEW.
1247 You need to have called the C{check} method before calling this method.
1249 assert len(self.reject_reasons) == 0
1250 assert self.changes.valid_signature
1251 assert self.final_suites is not None
1252 assert self._checked
1255 db_changes = self._install_changes()
1257 for suite in self.final_suites:
1258 overridesuite = suite
1259 if suite.overridesuite is not None:
1260 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1262 policy_queue = self._policy_queue(suite)
1264 redirected_suite = suite
1265 if policy_queue is not None:
1266 redirected_suite = policy_queue.suite
1268 # source can be in the suite we install to or any suite we enhance
1269 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
1270 for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \
1271 .filter(VersionCheck.suite_id.in_(source_suite_ids)) \
1272 .filter(VersionCheck.check == 'Enhances'):
1273 source_suite_ids.add(enhanced_suite_id)
1275 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
1277 source_component_func = lambda source: self._source_override(overridesuite, source).component
1278 binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
1280 (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1282 if policy_queue is not None:
1283 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
1285 # copy to build queues
1286 if policy_queue is None or policy_queue.send_to_build_queues:
1287 for build_queue in suite.copy_queues:
1288 self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1290 self._do_bts_versiontracking()
1292 def install_to_new(self):
1293 """install upload to NEW
1295 Install upload to NEW. This method does B{not} handle regular uploads
1296 to suites or policy queues.
1298 You need to have called the C{check} method before calling this method.
1300 # Uploads to NEW are special as we don't have overrides.
1301 assert len(self.reject_reasons) == 0
1302 assert self.changes.valid_signature
1303 assert self.final_suites is not None
1305 source = self.changes.source
1306 binaries = self.changes.binaries
1307 byhand = self.changes.byhand_files
1309 # we need a suite to guess components
1310 suites = list(self.final_suites)
1311 assert len(suites) == 1, "NEW uploads must be to a single suite"
1314 # decide which NEW queue to use
1315 if suite.new_queue is None:
1316 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1318 new_queue = suite.new_queue
1320 # There is only one global BYHAND queue
1321 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1322 new_suite = new_queue.suite
1325 def binary_component_func(binary):
1326 return self._binary_component(suite, binary, only_overrides=False)
1328 # guess source component
1329 # XXX: should be moved into an extra method
1330 binary_component_names = set()
1331 for binary in binaries:
1332 component = binary_component_func(binary)
1333 binary_component_names.add(component.component_name)
1334 source_component_name = None
1335 for c in self.session.query(Component).order_by(Component.component_id):
1336 guess = c.component_name
1337 if guess in binary_component_names:
1338 source_component_name = guess
1340 if source_component_name is None:
1341 source_component = self.session.query(Component).order_by(Component.component_id).first()
1343 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1344 source_component_func = lambda source: source_component
1346 db_changes = self._install_changes()
1347 (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1348 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1351 self._install_byhand(policy_upload, f)
1353 self._do_bts_versiontracking()
1356 """commit changes"""
1357 self.transaction.commit()
1360 """rollback changes"""
1361 self.transaction.rollback()
1363 def __enter__(self):
1367 def __exit__(self, type, value, traceback):
1368 if self.directory is not None:
1369 shutil.rmtree(self.directory)
1370 self.directory = None
1372 self.transaction.rollback()