1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 """module to manipulate the archive
19 This module provides classes to manipulate the archive.
22 from daklib.dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from daklib.fstransactions import FilesystemTransaction
28 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
29 import daklib.daksubprocess
32 from datetime import datetime
35 from sqlalchemy.orm.exc import NoResultFound
36 from sqlalchemy.orm import object_session
41 class ArchiveException(Exception):
44 class HashMismatchException(ArchiveException):
47 class ArchiveTransaction(object):
48 """manipulate the archive in a transaction
51 self.fs = FilesystemTransaction()
52 self.session = DBConn().session()
54 def get_file(self, hashed_file, source_name, check_hashes=True):
55 """Look for file C{hashed_file} in database
57 @type hashed_file: L{daklib.upload.HashedFile}
58 @param hashed_file: file to look for in the database
60 @type source_name: str
61 @param source_name: source package name
63 @type check_hashes: bool
64 @param check_hashes: check size and hashes match
66 @raise KeyError: file was not found in the database
67 @raise HashMismatchException: hash mismatch
69 @rtype: L{daklib.dbconn.PoolFile}
70 @return: database entry for the file
72 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
74 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
75 if check_hashes and (poolfile.filesize != hashed_file.size
76 or poolfile.md5sum != hashed_file.md5sum
77 or poolfile.sha1sum != hashed_file.sha1sum
78 or poolfile.sha256sum != hashed_file.sha256sum):
79 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
82 raise KeyError('{0} not found in database.'.format(poolname))
84 def _install_file(self, directory, hashed_file, archive, component, source_name):
87 Will not give an error when the file is already present.
89 @rtype: L{daklib.dbconn.PoolFile}
90 @return: database object for the new file
92 session = self.session
94 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
96 poolfile = self.get_file(hashed_file, source_name)
98 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
99 poolfile.md5sum = hashed_file.md5sum
100 poolfile.sha1sum = hashed_file.sha1sum
101 poolfile.sha256sum = hashed_file.sha256sum
102 session.add(poolfile)
106 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
107 except NoResultFound:
108 archive_file = ArchiveFile(archive, component, poolfile)
109 session.add(archive_file)
112 path = os.path.join(archive.path, 'pool', component.component_name, poolname)
113 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
114 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
118 def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
119 """Install a binary package
122 @param directory: directory the binary package is located in
124 @type binary: L{daklib.upload.Binary}
125 @param binary: binary package to install
127 @type suite: L{daklib.dbconn.Suite}
128 @param suite: target suite
130 @type component: L{daklib.dbconn.Component}
131 @param component: target component
133 @type allow_tainted: bool
134 @param allow_tainted: allow to copy additional files from tainted archives
136 @type fingerprint: L{daklib.dbconn.Fingerprint}
137 @param fingerprint: optional fingerprint
139 @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
140 @param source_suites: suites to copy the source from if they are not
141 in C{suite} or C{True} to allow copying from any
144 @type extra_source_archives: list of L{daklib.dbconn.Archive}
145 @param extra_source_archives: extra archives to copy Built-Using sources from
147 @rtype: L{daklib.dbconn.DBBinary}
148 @return: databse object for the new package
150 session = self.session
151 control = binary.control
152 maintainer = get_or_set_maintainer(control['Maintainer'], session)
153 architecture = get_architecture(control['Architecture'], session)
155 (source_name, source_version) = binary.source
156 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
157 source = source_query.filter(DBSource.suites.contains(suite)).first()
159 if source_suites != True:
160 source_query = source_query.join(DBSource.suites) \
161 .filter(Suite.suite_id == source_suites.c.id)
162 source = source_query.first()
164 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
165 self.copy_source(source, suite, component)
167 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
170 package=control['Package'],
171 version=control['Version'],
172 architecture=architecture,
176 maintainer=maintainer,
178 binarytype=binary.type,
180 # Other attributes that are ignored for purposes of equality with
183 fingerprint=fingerprint,
187 db_binary = session.query(DBBinary).filter_by(**unique).one()
188 for key, value in rest.iteritems():
189 if getattr(db_binary, key) != value:
190 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
191 except NoResultFound:
192 db_binary = DBBinary(**unique)
193 for key, value in rest.iteritems():
194 setattr(db_binary, key, value)
195 for key, value in rest2.iteritems():
196 setattr(db_binary, key, value)
197 session.add(db_binary)
199 import_metadata_into_db(db_binary, session)
201 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
203 if suite not in db_binary.suites:
204 db_binary.suites.append(suite)
210 def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
211 """ensure source exists in the given archive
213 This is intended to be used to check that Built-Using sources exist.
216 @param filename: filename to use in error messages
218 @type source: L{daklib.dbconn.DBSource}
219 @param source: source to look for
221 @type archive: L{daklib.dbconn.Archive}
222 @param archive: archive to look in
224 @type extra_archives: list of L{daklib.dbconn.Archive}
225 @param extra_archives: list of archives to copy the source package from
226 if it is not yet present in C{archive}
228 session = self.session
229 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
230 if db_file is not None:
233 # Try to copy file from one extra archive
234 if extra_archives is None:
236 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
238 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
240 source_archive = db_file.archive
241 for dsc_file in source.srcfiles:
242 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
243 # We were given an explicit list of archives so it is okay to copy from tainted archives.
244 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
246 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
247 """Add Built-Using sources to C{db_binary.extra_sources}
249 session = self.session
251 for bu_source_name, bu_source_version in daklib.utils.parse_built_using(control):
252 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
253 if bu_source is None:
254 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
256 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
258 db_binary.extra_sources.append(bu_source)
260 def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None):
261 session = self.session
263 maintainer = get_or_set_maintainer(control['Maintainer'], session)
264 source_name = control['Source']
266 ### Add source package to database
268 # We need to install the .dsc first as the DBSource object refers to it.
269 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
273 version=control['Version'],
276 maintainer=maintainer,
277 poolfile=db_file_dsc,
278 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
280 # Other attributes that are ignored for purposes of equality with
283 changedby=changed_by,
284 fingerprint=fingerprint,
289 db_source = session.query(DBSource).filter_by(**unique).one()
290 for key, value in rest.iteritems():
291 if getattr(db_source, key) != value:
292 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
293 except NoResultFound:
295 db_source = DBSource(**unique)
296 for key, value in rest.iteritems():
297 setattr(db_source, key, value)
298 for key, value in rest2.iteritems():
299 setattr(db_source, key, value)
300 session.add(db_source)
303 # Add .dsc file. Other files will be added later.
304 db_dsc_file = DSCFile()
305 db_dsc_file.source = db_source
306 db_dsc_file.poolfile = db_file_dsc
307 session.add(db_dsc_file)
311 for f in db_source.srcfiles:
312 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
315 ### Now add remaining files and copy them to the archive.
317 for hashed_file in source.files.itervalues():
318 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
319 if os.path.exists(hashed_file_path):
320 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
323 db_file = self.get_file(hashed_file, source_name)
324 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
326 db_dsc_file = DSCFile()
327 db_dsc_file.source = db_source
328 db_dsc_file.poolfile = db_file
329 session.add(db_dsc_file)
333 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
334 import_metadata_into_db(db_source, session)
336 # Uploaders are the maintainer and co-maintainers from the Uploaders field
337 db_source.uploaders.append(maintainer)
338 if 'Uploaders' in control:
339 from daklib.textutils import split_uploaders
340 for u in split_uploaders(control['Uploaders']):
341 db_source.uploaders.append(get_or_set_maintainer(u, session))
346 def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
347 """Install a source package
350 @param directory: directory the source package is located in
352 @type source: L{daklib.upload.Source}
353 @param source: source package to install
355 @type suite: L{daklib.dbconn.Suite}
356 @param suite: target suite
358 @type component: L{daklib.dbconn.Component}
359 @param component: target component
361 @type changed_by: L{daklib.dbconn.Maintainer}
362 @param changed_by: person who prepared this version of the package
364 @type allow_tainted: bool
365 @param allow_tainted: allow to copy additional files from tainted archives
367 @type fingerprint: L{daklib.dbconn.Fingerprint}
368 @param fingerprint: optional fingerprint
370 @rtype: L{daklib.dbconn.DBSource}
371 @return: database object for the new source
373 db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint)
375 if suite in db_source.suites:
377 db_source.suites.append(suite)
382 def _copy_file(self, db_file, archive, component, allow_tainted=False):
383 """Copy a file to the given archive and component
385 @type db_file: L{daklib.dbconn.PoolFile}
386 @param db_file: file to copy
388 @type archive: L{daklib.dbconn.Archive}
389 @param archive: target archive
391 @type component: L{daklib.dbconn.Archive}
392 @param component: target component
394 @type allow_tainted: bool
395 @param allow_tainted: allow to copy from tainted archives (such as NEW)
397 session = self.session
399 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
400 query = session.query(ArchiveFile).filter_by(file=db_file)
401 if not allow_tainted:
402 query = query.join(Archive).filter(Archive.tainted == False)
404 source_af = query.first()
405 if source_af is None:
406 raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
407 target_af = ArchiveFile(archive, component, db_file)
408 session.add(target_af)
410 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
412 def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
413 """Copy a binary package to the given suite and component
415 @type db_binary: L{daklib.dbconn.DBBinary}
416 @param db_binary: binary to copy
418 @type suite: L{daklib.dbconn.Suite}
419 @param suite: target suite
421 @type component: L{daklib.dbconn.Component}
422 @param component: target component
424 @type allow_tainted: bool
425 @param allow_tainted: allow to copy from tainted archives (such as NEW)
427 @type extra_archives: list of L{daklib.dbconn.Archive}
428 @param extra_archives: extra archives to copy Built-Using sources from
430 session = self.session
431 archive = suite.archive
435 filename = db_binary.poolfile.filename
437 # make sure source is present in target archive
438 db_source = db_binary.source
439 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
440 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
442 # make sure built-using packages are present in target archive
443 for db_source in db_binary.extra_sources:
444 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
447 db_file = db_binary.poolfile
448 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
449 if suite not in db_binary.suites:
450 db_binary.suites.append(suite)
453 def copy_source(self, db_source, suite, component, allow_tainted=False):
454 """Copy a source package to the given suite and component
456 @type db_source: L{daklib.dbconn.DBSource}
457 @param db_source: source to copy
459 @type suite: L{daklib.dbconn.Suite}
460 @param suite: target suite
462 @type component: L{daklib.dbconn.Component}
463 @param component: target component
465 @type allow_tainted: bool
466 @param allow_tainted: allow to copy from tainted archives (such as NEW)
468 archive = suite.archive
471 for db_dsc_file in db_source.srcfiles:
472 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
473 if suite not in db_source.suites:
474 db_source.suites.append(suite)
477 def remove_file(self, db_file, archive, component):
478 """Remove a file from a given archive and component
480 @type db_file: L{daklib.dbconn.PoolFile}
481 @param db_file: file to remove
483 @type archive: L{daklib.dbconn.Archive}
484 @param archive: archive to remove the file from
486 @type component: L{daklib.dbconn.Component}
487 @param component: component to remove the file from
489 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
490 self.fs.unlink(af.path)
491 self.session.delete(af)
493 def remove_binary(self, binary, suite):
494 """Remove a binary from a given suite and component
496 @type binary: L{daklib.dbconn.DBBinary}
497 @param binary: binary to remove
499 @type suite: L{daklib.dbconn.Suite}
500 @param suite: suite to remove the package from
502 binary.suites.remove(suite)
505 def remove_source(self, source, suite):
506 """Remove a source from a given suite and component
508 @type source: L{daklib.dbconn.DBSource}
509 @param source: source to remove
511 @type suite: L{daklib.dbconn.Suite}
512 @param suite: suite to remove the package from
514 @raise ArchiveException: source package is still referenced by other
515 binaries in the suite
517 session = self.session
519 query = session.query(DBBinary).filter_by(source=source) \
520 .filter(DBBinary.suites.contains(suite))
521 if query.first() is not None:
522 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
524 source.suites.remove(suite)
530 self.session.commit()
533 self.session.rollback()
537 """rollback changes"""
538 self.session.rollback()
547 def __exit__(self, type, value, traceback):
554 def source_component_from_package_list(package_list, suite):
555 """Get component for a source package
557 This function will look at the Package-List field to determine the
558 component the source package belongs to. This is the first component
559 the source package provides binaries for (first with respect to the
560 ordering of components).
562 It the source package has no Package-List field, None is returned.
564 @type package_list: L{daklib.packagelist.PackageList}
565 @param package_list: package list of the source to get the override for
567 @type suite: L{daklib.dbconn.Suite}
568 @param suite: suite to consider for binaries produced
570 @rtype: L{daklib.dbconn.Component} or C{None}
571 @return: component for the given source or C{None}
573 if package_list.fallback:
575 session = object_session(suite)
576 packages = package_list.packages_for_suite(suite)
577 components = set(p.component for p in packages)
578 query = session.query(Component).order_by(Component.ordering) \
579 .filter(Component.component_name.in_(components))
582 class ArchiveUpload(object):
585 This class can be used in a with-statement::
587 with ArchiveUpload(...) as upload:
590 Doing so will automatically run any required cleanup and also rollback the
591 transaction if it was not committed.
593 def __init__(self, directory, changes, keyrings):
594 self.transaction = ArchiveTransaction()
595 """transaction used to handle the upload
596 @type: L{daklib.archive.ArchiveTransaction}
599 self.session = self.transaction.session
600 """database session"""
602 self.original_directory = directory
603 self.original_changes = changes
607 @type: L{daklib.upload.Changes}
610 self.directory = None
611 """directory with temporary copy of files. set by C{prepare}
615 self.keyrings = keyrings
617 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
618 """fingerprint of the key used to sign the upload
619 @type: L{daklib.dbconn.Fingerprint}
622 self.reject_reasons = []
623 """reasons why the upload cannot by accepted
633 self.final_suites = None
636 """upload is NEW. set by C{check}
640 self._checked = False
641 """checks passes. set by C{check}
645 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
646 self._new = self._new_queue.suite
648 def warn(self, message):
649 """add a warning message
651 Adds a warning message that can later be seen in C{self.warnings}
653 @type message: string
654 @param message: warning message
656 self.warnings.append(message)
659 """prepare upload for further processing
661 This copies the files involved to a temporary directory. If you use
662 this method directly, you have to remove the directory given by the
663 C{directory} attribute later on your own.
665 Instead of using the method directly, you can also use a with-statement::
667 with ArchiveUpload(...) as upload:
670 This will automatically handle any required cleanup.
672 assert self.directory is None
673 assert self.original_changes.valid_signature
676 session = self.transaction.session
678 group = cnf.get('Dinstall::UnprivGroup') or None
679 self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
680 mode=0o2750, group=group)
681 with FilesystemTransaction() as fs:
682 src = os.path.join(self.original_directory, self.original_changes.filename)
683 dst = os.path.join(self.directory, self.original_changes.filename)
684 fs.copy(src, dst, mode=0o640)
686 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
688 for f in self.changes.files.itervalues():
689 src = os.path.join(self.original_directory, f.filename)
690 dst = os.path.join(self.directory, f.filename)
691 if not os.path.exists(src):
693 fs.copy(src, dst, mode=0o640)
697 source = self.changes.source
699 # Do not raise an exception here if the .dsc is invalid.
702 if source is not None:
703 for f in source.files.itervalues():
704 src = os.path.join(self.original_directory, f.filename)
705 dst = os.path.join(self.directory, f.filename)
706 if not os.path.exists(dst):
708 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
709 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
710 fs.copy(db_archive_file.path, dst, mode=0o640)
712 # Ignore if get_file could not find it. Upload will
713 # probably be rejected later.
716 def unpacked_source(self):
717 """Path to unpacked source
719 Get path to the unpacked source. This method does unpack the source
720 into a temporary directory under C{self.directory} if it has not
721 been done so already.
723 @rtype: str or C{None}
724 @return: string giving the path to the unpacked source directory
725 or C{None} if no source was included in the upload.
727 assert self.directory is not None
729 source = self.changes.source
732 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
734 sourcedir = os.path.join(self.directory, 'source')
735 if not os.path.exists(sourcedir):
736 devnull = open('/dev/null', 'w')
737 daklib.daksubprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
738 if not os.path.isdir(sourcedir):
739 raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
742 def _map_suite(self, suite_name):
743 for rule in Config().value_list("SuiteMappings"):
744 fields = rule.split()
746 if rtype == "map" or rtype == "silent-map":
747 (src, dst) = fields[1:3]
748 if src == suite_name:
750 if rtype != "silent-map":
751 self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
752 elif rtype == "ignore":
754 if suite_name == ignored:
755 self.warnings.append('Ignoring target suite {0}.'.format(ignored))
757 elif rtype == "reject":
759 if suite_name == rejected:
760 raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected))
761 ## XXX: propup-version and map-unreleased not yet implemented
764 def _mapped_suites(self):
765 """Get target suites after mappings
767 @rtype: list of L{daklib.dbconn.Suite}
768 @return: list giving the mapped target suites of this upload
770 session = self.session
773 for dist in self.changes.distributions:
774 suite_name = self._map_suite(dist)
775 if suite_name is not None:
776 suite_names.append(suite_name)
778 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
781 def _check_new_binary_overrides(self, suite, overridesuite):
783 source = self.changes.source
785 if source is not None and not source.package_list.fallback:
786 packages = source.package_list.packages_for_suite(suite)
787 binaries = [ entry for entry in packages ]
789 override = self._binary_override(overridesuite, b)
791 self.warnings.append('binary:{0} is NEW.'.format(b.name))
794 binaries = self.changes.binaries
796 if utils.is_in_debug_section(b.control) and suite.debug_suite is not None:
798 override = self._binary_override(overridesuite, b)
800 self.warnings.append('binary:{0} is NEW.'.format(b.name))
805 def _check_new(self, suite, overridesuite):
806 """Check if upload is NEW
808 An upload is NEW if it has binary or source packages that do not have
809 an override in C{overridesuite} OR if it references files ONLY in a
810 tainted archive (eg. when it references files in NEW).
812 Debug packages (*-dbgsym in Section: debug) are not considered as NEW
813 if C{suite} has a seperate debug suite.
816 @return: C{True} if the upload is NEW, C{False} otherwise
818 session = self.session
821 # Check for missing overrides
822 if self._check_new_binary_overrides(suite, overridesuite):
824 if self.changes.source is not None:
825 override = self._source_override(overridesuite, self.changes.source)
827 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
830 # Check if we reference a file only in a tainted archive
831 files = self.changes.files.values()
832 if self.changes.source is not None:
833 files.extend(self.changes.source.files.values())
835 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
836 query_untainted = query.join(Archive).filter(Archive.tainted == False)
838 in_archive = (query.first() is not None)
839 in_untainted_archive = (query_untainted.first() is not None)
841 if in_archive and not in_untainted_archive:
842 self.warnings.append('{0} is only available in NEW.'.format(f.filename))
847 def _final_suites(self):
848 session = self.session
850 mapped_suites = self._mapped_suites()
853 for suite in mapped_suites:
854 overridesuite = suite
855 if suite.overridesuite is not None:
856 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
857 if self._check_new(suite, overridesuite):
859 final_suites.add(suite)
863 def _binary_override(self, suite, binary):
864 """Get override entry for a binary
866 @type suite: L{daklib.dbconn.Suite}
867 @param suite: suite to get override for
869 @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
870 @param binary: binary to get override for
872 @rtype: L{daklib.dbconn.Override} or C{None}
873 @return: override for the given binary or C{None}
875 if suite.overridesuite is not None:
876 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
878 mapped_component = get_mapped_component(binary.component)
879 if mapped_component is None:
882 query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
883 .join(Component).filter(Component.component_name == mapped_component.component_name) \
884 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
888 except NoResultFound:
891 def _source_override(self, suite, source):
892 """Get override entry for a source
894 @type suite: L{daklib.dbconn.Suite}
895 @param suite: suite to get override for
897 @type source: L{daklib.upload.Source}
898 @param source: source to get override for
900 @rtype: L{daklib.dbconn.Override} or C{None}
901 @return: override for the given source or C{None}
903 if suite.overridesuite is not None:
904 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
906 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
907 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
909 component = source_component_from_package_list(source.package_list, suite)
910 if component is not None:
911 query = query.filter(Override.component == component)
915 except NoResultFound:
918 def _binary_component(self, suite, binary, only_overrides=True):
919 """get component for a binary
921 By default this will only look at overrides to get the right component;
922 if C{only_overrides} is C{False} this method will also look at the
925 @type suite: L{daklib.dbconn.Suite}
927 @type binary: L{daklib.upload.Binary}
929 @type only_overrides: bool
930 @param only_overrides: only use overrides to get the right component
932 @rtype: L{daklib.dbconn.Component} or C{None}
934 override = self._binary_override(suite, binary)
935 if override is not None:
936 return override.component
939 return get_mapped_component(binary.component, self.session)
941 def check(self, force=False):
942 """run checks against the upload
945 @param force: ignore failing forcable checks
948 @return: C{True} if all checks passed, C{False} otherwise
950 # XXX: needs to be better structured.
951 assert self.changes.valid_signature
954 # Validate signatures and hashes before we do any real work:
956 checks.SignatureAndHashesCheck,
957 checks.SignatureTimestampCheck,
959 checks.ExternalHashesCheck,
962 checks.BinaryTimestampCheck,
963 checks.SingleDistributionCheck,
964 checks.ArchAllBinNMUCheck,
968 final_suites = self._final_suites()
969 if len(final_suites) == 0:
970 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
973 self.final_suites = final_suites
976 checks.TransitionCheck,
978 checks.NoSourceOnlyCheck,
986 checks.SourceFormatCheck,
987 checks.SuiteArchitectureCheck,
990 for suite in final_suites:
991 chk().per_suite_check(self, suite)
993 if len(self.reject_reasons) != 0:
998 except checks.Reject as e:
999 self.reject_reasons.append(unicode(e))
1000 except Exception as e:
1001 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
1004 def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
1005 """Install upload to the given suite
1007 @type suite: L{daklib.dbconn.Suite}
1008 @param suite: suite to install the package into. This is the real suite,
1009 ie. after any redirection to NEW or a policy queue
1011 @param source_component_func: function to get the L{daklib.dbconn.Component}
1012 for a L{daklib.upload.Source} object
1014 @param binary_component_func: function to get the L{daklib.dbconn.Component}
1015 for a L{daklib.upload.Binary} object
1017 @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
1019 @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
1021 @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
1022 object for the install source or C{None} if no source was
1023 included. The second is a list of L{daklib.dbconn.DBBinary}
1024 objects for the installed binary packages.
1026 # XXX: move this function to ArchiveTransaction?
1028 control = self.changes.changes
1029 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
1031 if source_suites is None:
1032 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
1034 source = self.changes.source
1035 if source is not None:
1036 component = source_component_func(source)
1037 db_source = self.transaction.install_source(
1043 fingerprint=self.fingerprint
1049 for binary in self.changes.binaries:
1050 copy_to_suite = suite
1051 if utils.is_in_debug_section(binary.control) and suite.debug_suite is not None:
1052 copy_to_suite = suite.debug_suite
1054 component = binary_component_func(binary)
1055 db_binary = self.transaction.install_binary(
1060 fingerprint=self.fingerprint,
1061 source_suites=source_suites,
1062 extra_source_archives=extra_source_archives
1064 db_binaries.append(db_binary)
1066 if suite.copychanges:
1067 src = os.path.join(self.directory, self.changes.filename)
1068 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
1069 self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
1071 suite.update_last_changed()
1073 return (db_source, db_binaries)
1075 def _install_changes(self):
1076 assert self.changes.valid_signature
1077 control = self.changes.changes
1078 session = self.transaction.session
1082 # Only add changelog for sourceful uploads and binNMUs
1083 if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
1084 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
1085 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
1086 assert changelog_id is not None
1088 db_changes = DBChange()
1089 db_changes.changesname = self.changes.filename
1090 db_changes.source = control['Source']
1091 db_changes.binaries = control.get('Binary', None)
1092 db_changes.architecture = control['Architecture']
1093 db_changes.version = control['Version']
1094 db_changes.distribution = control['Distribution']
1095 db_changes.urgency = control['Urgency']
1096 db_changes.maintainer = control['Maintainer']
1097 db_changes.changedby = control.get('Changed-By', control['Maintainer'])
1098 db_changes.date = control['Date']
1099 db_changes.fingerprint = self.fingerprint.fingerprint
1100 db_changes.changelog_id = changelog_id
1101 db_changes.closes = self.changes.closed_bugs
1104 self.transaction.session.add(db_changes)
1105 self.transaction.session.flush()
1106 except sqlalchemy.exc.IntegrityError:
1107 raise ArchiveException('{0} is already known.'.format(self.changes.filename))
1111 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
1112 u = PolicyQueueUpload()
1113 u.policy_queue = policy_queue
1114 u.target_suite = target_suite
1115 u.changes = db_changes
1116 u.source = db_source
1117 u.binaries = db_binaries
1118 self.transaction.session.add(u)
1119 self.transaction.session.flush()
1121 dst = os.path.join(policy_queue.path, self.changes.filename)
1122 self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms)
1126 def try_autobyhand(self):
1129 Try to handle byhand packages automatically.
1131 @rtype: list of L{daklib.upload.HashedFile}
1132 @return: list of remaining byhand files
1134 assert len(self.reject_reasons) == 0
1135 assert self.changes.valid_signature
1136 assert self.final_suites is not None
1137 assert self._checked
1139 byhand = self.changes.byhand_files
1140 if len(byhand) == 0:
1143 suites = list(self.final_suites)
1144 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1148 control = self.changes.changes
1149 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1153 if '_' in f.filename:
1154 parts = f.filename.split('_', 2)
1156 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1160 package, version, archext = parts
1161 arch, ext = archext.split('.', 1)
1163 parts = f.filename.split('.')
1165 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1175 rule = automatic_byhand_packages.subtree(package)
1180 if rule['Source'] != self.changes.source_name \
1181 or rule['Section'] != f.section \
1182 or ('Extension' in rule and rule['Extension'] != ext):
1186 script = rule['Script']
1187 retcode = daklib.daksubprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename), suite.suite_name], shell=False)
1189 print "W: error processing {0}.".format(f.filename)
1192 return len(remaining) == 0
1194 def _install_byhand(self, policy_queue_upload, hashed_file):
1195 """install byhand file
1197 @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
1199 @type hashed_file: L{daklib.upload.HashedFile}
1201 fs = self.transaction.fs
1202 session = self.transaction.session
1203 policy_queue = policy_queue_upload.policy_queue
1205 byhand_file = PolicyQueueByhandFile()
1206 byhand_file.upload = policy_queue_upload
1207 byhand_file.filename = hashed_file.filename
1208 session.add(byhand_file)
1211 src = os.path.join(self.directory, hashed_file.filename)
1212 dst = os.path.join(policy_queue.path, hashed_file.filename)
1213 fs.copy(src, dst, mode=policy_queue.change_perms)
1217 def _do_bts_versiontracking(self):
1219 fs = self.transaction.fs
1221 btsdir = cnf.get('Dir::BTSVersionTrack')
1222 if btsdir is None or btsdir == '':
1225 base = os.path.join(btsdir, self.changes.filename[:-8])
1228 sourcedir = self.unpacked_source()
1229 if sourcedir is not None:
1230 fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
1231 versions = fs.create("{0}.versions".format(base), mode=0o644)
1232 for line in fh.readlines():
1233 if re_changelog_versions.match(line):
1234 versions.write(line)
1238 # binary -> source mapping
1239 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1240 for binary in self.changes.binaries:
1241 control = binary.control
1242 source_package, source_version = binary.source
1243 line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
1244 print >>debinfo, line
1247 def _policy_queue(self, suite):
1248 if suite.policy_queue is not None:
1249 return suite.policy_queue
1255 Install upload to a suite or policy queue. This method does B{not}
1256 handle uploads to NEW.
1258 You need to have called the C{check} method before calling this method.
1260 assert len(self.reject_reasons) == 0
1261 assert self.changes.valid_signature
1262 assert self.final_suites is not None
1263 assert self._checked
1266 db_changes = self._install_changes()
1268 for suite in self.final_suites:
1269 overridesuite = suite
1270 if suite.overridesuite is not None:
1271 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1273 policy_queue = self._policy_queue(suite)
1275 redirected_suite = suite
1276 if policy_queue is not None:
1277 redirected_suite = policy_queue.suite
1279 # source can be in the suite we install to or any suite we enhance
1280 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
1281 for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \
1282 .filter(VersionCheck.suite_id.in_(source_suite_ids)) \
1283 .filter(VersionCheck.check == 'Enhances'):
1284 source_suite_ids.add(enhanced_suite_id)
1286 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
1288 source_component_func = lambda source: self._source_override(overridesuite, source).component
1289 binary_component_func = lambda binary: self._binary_component(overridesuite, binary, only_overrides=False)
1291 (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1293 if policy_queue is not None:
1294 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
1296 # copy to build queues
1297 if policy_queue is None or policy_queue.send_to_build_queues:
1298 for build_queue in suite.copy_queues:
1299 self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1301 self._do_bts_versiontracking()
1303 def install_to_new(self):
1304 """install upload to NEW
1306 Install upload to NEW. This method does B{not} handle regular uploads
1307 to suites or policy queues.
1309 You need to have called the C{check} method before calling this method.
1311 # Uploads to NEW are special as we don't have overrides.
1312 assert len(self.reject_reasons) == 0
1313 assert self.changes.valid_signature
1314 assert self.final_suites is not None
1316 source = self.changes.source
1317 binaries = self.changes.binaries
1318 byhand = self.changes.byhand_files
1320 # we need a suite to guess components
1321 suites = list(self.final_suites)
1322 assert len(suites) == 1, "NEW uploads must be to a single suite"
1325 # decide which NEW queue to use
1326 if suite.new_queue is None:
1327 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1329 new_queue = suite.new_queue
1331 # There is only one global BYHAND queue
1332 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1333 new_suite = new_queue.suite
1336 def binary_component_func(binary):
1337 return self._binary_component(suite, binary, only_overrides=False)
1339 # guess source component
1340 # XXX: should be moved into an extra method
1341 binary_component_names = set()
1342 for binary in binaries:
1343 component = binary_component_func(binary)
1344 binary_component_names.add(component.component_name)
1345 source_component_name = None
1346 for c in self.session.query(Component).order_by(Component.component_id):
1347 guess = c.component_name
1348 if guess in binary_component_names:
1349 source_component_name = guess
1351 if source_component_name is None:
1352 source_component = self.session.query(Component).order_by(Component.component_id).first()
1354 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1355 source_component_func = lambda source: source_component
1357 db_changes = self._install_changes()
1358 (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1359 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1362 self._install_byhand(policy_upload, f)
1364 self._do_bts_versiontracking()
1367 """commit changes"""
1368 self.transaction.commit()
1371 """rollback changes"""
1372 self.transaction.rollback()
1374 def __enter__(self):
1378 def __exit__(self, type, value, traceback):
1379 if self.directory is not None:
1380 shutil.rmtree(self.directory)
1381 self.directory = None
1383 self.transaction.rollback()