1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 """module to manipulate the archive
19 This module provides classes to manipulate the archive.
22 from daklib.dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from daklib.fstransactions import FilesystemTransaction
28 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
29 import daklib.daksubprocess
32 from datetime import datetime
35 from sqlalchemy.orm.exc import NoResultFound
36 from sqlalchemy.orm import object_session
41 class ArchiveException(Exception):
44 class HashMismatchException(ArchiveException):
47 class ArchiveTransaction(object):
48 """manipulate the archive in a transaction
51 self.fs = FilesystemTransaction()
52 self.session = DBConn().session()
54 def get_file(self, hashed_file, source_name, check_hashes=True):
55 """Look for file C{hashed_file} in database
57 @type hashed_file: L{daklib.upload.HashedFile}
58 @param hashed_file: file to look for in the database
60 @type source_name: str
61 @param source_name: source package name
63 @type check_hashes: bool
64 @param check_hashes: check size and hashes match
66 @raise KeyError: file was not found in the database
67 @raise HashMismatchException: hash mismatch
69 @rtype: L{daklib.dbconn.PoolFile}
70 @return: database entry for the file
72 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
74 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
75 if check_hashes and (poolfile.filesize != hashed_file.size
76 or poolfile.md5sum != hashed_file.md5sum
77 or poolfile.sha1sum != hashed_file.sha1sum
78 or poolfile.sha256sum != hashed_file.sha256sum):
79 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
82 raise KeyError('{0} not found in database.'.format(poolname))
84 def _install_file(self, directory, hashed_file, archive, component, source_name):
87 Will not give an error when the file is already present.
89 @rtype: L{daklib.dbconn.PoolFile}
90 @return: database object for the new file
92 session = self.session
94 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
96 poolfile = self.get_file(hashed_file, source_name)
98 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
99 poolfile.md5sum = hashed_file.md5sum
100 poolfile.sha1sum = hashed_file.sha1sum
101 poolfile.sha256sum = hashed_file.sha256sum
102 session.add(poolfile)
106 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
107 except NoResultFound:
108 archive_file = ArchiveFile(archive, component, poolfile)
109 session.add(archive_file)
112 path = os.path.join(archive.path, 'pool', component.component_name, poolname)
113 hashed_file_path = os.path.join(directory, hashed_file.filename)
114 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
118 def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
119 """Install a binary package
122 @param directory: directory the binary package is located in
124 @type binary: L{daklib.upload.Binary}
125 @param binary: binary package to install
127 @type suite: L{daklib.dbconn.Suite}
128 @param suite: target suite
130 @type component: L{daklib.dbconn.Component}
131 @param component: target component
133 @type allow_tainted: bool
134 @param allow_tainted: allow to copy additional files from tainted archives
136 @type fingerprint: L{daklib.dbconn.Fingerprint}
137 @param fingerprint: optional fingerprint
139 @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
140 @param source_suites: suites to copy the source from if they are not
141 in C{suite} or C{True} to allow copying from any
144 @type extra_source_archives: list of L{daklib.dbconn.Archive}
145 @param extra_source_archives: extra archives to copy Built-Using sources from
147 @rtype: L{daklib.dbconn.DBBinary}
148 @return: databse object for the new package
150 session = self.session
151 control = binary.control
152 maintainer = get_or_set_maintainer(control['Maintainer'], session)
153 architecture = get_architecture(control['Architecture'], session)
155 (source_name, source_version) = binary.source
156 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
157 source = source_query.filter(DBSource.suites.contains(suite)).first()
159 if source_suites != True:
160 source_query = source_query.join(DBSource.suites) \
161 .filter(Suite.suite_id == source_suites.c.id)
162 source = source_query.first()
164 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
165 self.copy_source(source, suite, component)
167 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
170 package=control['Package'],
171 version=control['Version'],
172 architecture=architecture,
176 maintainer=maintainer,
178 binarytype=binary.type,
179 fingerprint=fingerprint,
183 db_binary = session.query(DBBinary).filter_by(**unique).one()
184 for key, value in rest.iteritems():
185 if getattr(db_binary, key) != value:
186 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
187 except NoResultFound:
188 db_binary = DBBinary(**unique)
189 for key, value in rest.iteritems():
190 setattr(db_binary, key, value)
191 session.add(db_binary)
193 import_metadata_into_db(db_binary, session)
195 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
197 if suite not in db_binary.suites:
198 db_binary.suites.append(suite)
204 def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
205 """ensure source exists in the given archive
207 This is intended to be used to check that Built-Using sources exist.
210 @param filename: filename to use in error messages
212 @type source: L{daklib.dbconn.DBSource}
213 @param source: source to look for
215 @type archive: L{daklib.dbconn.Archive}
216 @param archive: archive to look in
218 @type extra_archives: list of L{daklib.dbconn.Archive}
219 @param extra_archives: list of archives to copy the source package from
220 if it is not yet present in C{archive}
222 session = self.session
223 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
224 if db_file is not None:
227 # Try to copy file from one extra archive
228 if extra_archives is None:
230 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
232 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
234 source_archive = db_file.archive
235 for dsc_file in source.srcfiles:
236 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
237 # We were given an explicit list of archives so it is okay to copy from tainted archives.
238 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
240 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
241 """Add Built-Using sources to C{db_binary.extra_sources}
243 session = self.session
244 built_using = control.get('Built-Using', None)
246 if built_using is not None:
247 for dep in apt_pkg.parse_depends(built_using):
248 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
249 bu_source_name, bu_source_version, comp = dep[0]
250 assert comp == '=', 'Built-Using must contain strict dependencies'
252 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
253 if bu_source is None:
254 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
256 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
258 db_binary.extra_sources.append(bu_source)
260 def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
261 """Install a source package
264 @param directory: directory the source package is located in
266 @type source: L{daklib.upload.Source}
267 @param source: source package to install
269 @type suite: L{daklib.dbconn.Suite}
270 @param suite: target suite
272 @type component: L{daklib.dbconn.Component}
273 @param component: target component
275 @type changed_by: L{daklib.dbconn.Maintainer}
276 @param changed_by: person who prepared this version of the package
278 @type allow_tainted: bool
279 @param allow_tainted: allow to copy additional files from tainted archives
281 @type fingerprint: L{daklib.dbconn.Fingerprint}
282 @param fingerprint: optional fingerprint
284 @rtype: L{daklib.dbconn.DBSource}
285 @return: database object for the new source
287 session = self.session
288 archive = suite.archive
290 maintainer = get_or_set_maintainer(control['Maintainer'], session)
291 source_name = control['Source']
293 ### Add source package to database
295 # We need to install the .dsc first as the DBSource object refers to it.
296 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
300 version=control['Version'],
303 maintainer=maintainer,
304 changedby=changed_by,
305 #install_date=datetime.now().date(),
306 poolfile=db_file_dsc,
307 fingerprint=fingerprint,
308 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
313 db_source = session.query(DBSource).filter_by(**unique).one()
314 for key, value in rest.iteritems():
315 if getattr(db_source, key) != value:
316 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
317 except NoResultFound:
319 db_source = DBSource(**unique)
320 for key, value in rest.iteritems():
321 setattr(db_source, key, value)
322 # XXX: set as default in postgres?
323 db_source.install_date = datetime.now().date()
324 session.add(db_source)
327 # Add .dsc file. Other files will be added later.
328 db_dsc_file = DSCFile()
329 db_dsc_file.source = db_source
330 db_dsc_file.poolfile = db_file_dsc
331 session.add(db_dsc_file)
334 if suite in db_source.suites:
337 db_source.suites.append(suite)
340 for f in db_source.srcfiles:
341 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
344 ### Now add remaining files and copy them to the archive.
346 for hashed_file in source.files.itervalues():
347 hashed_file_path = os.path.join(directory, hashed_file.filename)
348 if os.path.exists(hashed_file_path):
349 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
352 db_file = self.get_file(hashed_file, source_name)
353 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
355 db_dsc_file = DSCFile()
356 db_dsc_file.source = db_source
357 db_dsc_file.poolfile = db_file
358 session.add(db_dsc_file)
362 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
363 import_metadata_into_db(db_source, session)
365 # Uploaders are the maintainer and co-maintainers from the Uploaders field
366 db_source.uploaders.append(maintainer)
367 if 'Uploaders' in control:
368 from daklib.textutils import split_uploaders
369 for u in split_uploaders(control['Uploaders']):
370 db_source.uploaders.append(get_or_set_maintainer(u, session))
375 def _copy_file(self, db_file, archive, component, allow_tainted=False):
376 """Copy a file to the given archive and component
378 @type db_file: L{daklib.dbconn.PoolFile}
379 @param db_file: file to copy
381 @type archive: L{daklib.dbconn.Archive}
382 @param archive: target archive
384 @type component: L{daklib.dbconn.Archive}
385 @param component: target component
387 @type allow_tainted: bool
388 @param allow_tainted: allow to copy from tainted archives (such as NEW)
390 session = self.session
392 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
393 query = session.query(ArchiveFile).filter_by(file=db_file)
394 if not allow_tainted:
395 query = query.join(Archive).filter(Archive.tainted == False)
397 source_af = query.first()
398 if source_af is None:
399 raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
400 target_af = ArchiveFile(archive, component, db_file)
401 session.add(target_af)
403 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
405 def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
406 """Copy a binary package to the given suite and component
408 @type db_binary: L{daklib.dbconn.DBBinary}
409 @param db_binary: binary to copy
411 @type suite: L{daklib.dbconn.Suite}
412 @param suite: target suite
414 @type component: L{daklib.dbconn.Component}
415 @param component: target component
417 @type allow_tainted: bool
418 @param allow_tainted: allow to copy from tainted archives (such as NEW)
420 @type extra_archives: list of L{daklib.dbconn.Archive}
421 @param extra_archives: extra archives to copy Built-Using sources from
423 session = self.session
424 archive = suite.archive
428 filename = db_binary.poolfile.filename
430 # make sure source is present in target archive
431 db_source = db_binary.source
432 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
433 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
435 # make sure built-using packages are present in target archive
436 for db_source in db_binary.extra_sources:
437 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
440 db_file = db_binary.poolfile
441 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
442 if suite not in db_binary.suites:
443 db_binary.suites.append(suite)
446 def copy_source(self, db_source, suite, component, allow_tainted=False):
447 """Copy a source package to the given suite and component
449 @type db_source: L{daklib.dbconn.DBSource}
450 @param db_source: source to copy
452 @type suite: L{daklib.dbconn.Suite}
453 @param suite: target suite
455 @type component: L{daklib.dbconn.Component}
456 @param component: target component
458 @type allow_tainted: bool
459 @param allow_tainted: allow to copy from tainted archives (such as NEW)
461 archive = suite.archive
464 for db_dsc_file in db_source.srcfiles:
465 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
466 if suite not in db_source.suites:
467 db_source.suites.append(suite)
470 def remove_file(self, db_file, archive, component):
471 """Remove a file from a given archive and component
473 @type db_file: L{daklib.dbconn.PoolFile}
474 @param db_file: file to remove
476 @type archive: L{daklib.dbconn.Archive}
477 @param archive: archive to remove the file from
479 @type component: L{daklib.dbconn.Component}
480 @param component: component to remove the file from
482 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
483 self.fs.unlink(af.path)
484 self.session.delete(af)
486 def remove_binary(self, binary, suite):
487 """Remove a binary from a given suite and component
489 @type binary: L{daklib.dbconn.DBBinary}
490 @param binary: binary to remove
492 @type suite: L{daklib.dbconn.Suite}
493 @param suite: suite to remove the package from
495 binary.suites.remove(suite)
498 def remove_source(self, source, suite):
499 """Remove a source from a given suite and component
501 @type source: L{daklib.dbconn.DBSource}
502 @param source: source to remove
504 @type suite: L{daklib.dbconn.Suite}
505 @param suite: suite to remove the package from
507 @raise ArchiveException: source package is still referenced by other
508 binaries in the suite
510 session = self.session
512 query = session.query(DBBinary).filter_by(source=source) \
513 .filter(DBBinary.suites.contains(suite))
514 if query.first() is not None:
515 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
517 source.suites.remove(suite)
523 self.session.commit()
526 self.session.rollback()
530 """rollback changes"""
531 self.session.rollback()
540 def __exit__(self, type, value, traceback):
547 def source_component_from_package_list(package_list, suite):
548 """Get component for a source package
550 This function will look at the Package-List field to determine the
551 component the source package belongs to. This is the first component
552 the source package provides binaries for (first with respect to the
553 ordering of components).
555 It the source package has no Package-List field, None is returned.
557 @type package_list: L{daklib.packagelist.PackageList}
558 @param package_list: package list of the source to get the override for
560 @type suite: L{daklib.dbconn.Suite}
561 @param suite: suite to consider for binaries produced
563 @rtype: L{daklib.dbconn.Component} or C{None}
564 @return: component for the given source or C{None}
566 if package_list.fallback:
568 session = object_session(suite)
569 packages = package_list.packages_for_suite(suite)
570 components = set(p.component for p in packages)
571 query = session.query(Component).order_by(Component.ordering) \
572 .filter(Component.component_name.in_(components))
575 class ArchiveUpload(object):
578 This class can be used in a with-statement::
580 with ArchiveUpload(...) as upload:
583 Doing so will automatically run any required cleanup and also rollback the
584 transaction if it was not committed.
586 def __init__(self, directory, changes, keyrings):
587 self.transaction = ArchiveTransaction()
588 """transaction used to handle the upload
589 @type: L{daklib.archive.ArchiveTransaction}
592 self.session = self.transaction.session
593 """database session"""
595 self.original_directory = directory
596 self.original_changes = changes
600 @type: L{daklib.upload.Changes}
603 self.directory = None
604 """directory with temporary copy of files. set by C{prepare}
608 self.keyrings = keyrings
610 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
611 """fingerprint of the key used to sign the upload
612 @type: L{daklib.dbconn.Fingerprint}
615 self.reject_reasons = []
616 """reasons why the upload cannot by accepted
626 self.final_suites = None
629 """upload is NEW. set by C{check}
633 self._checked = False
634 """checks passes. set by C{check}
638 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
639 self._new = self._new_queue.suite
641 def warn(self, message):
642 """add a warning message
644 Adds a warning message that can later be seen in C{self.warnings}
646 @type message: string
647 @param message: warning message
649 self.warnings.append(message)
652 """prepare upload for further processing
654 This copies the files involved to a temporary directory. If you use
655 this method directly, you have to remove the directory given by the
656 C{directory} attribute later on your own.
658 Instead of using the method directly, you can also use a with-statement::
660 with ArchiveUpload(...) as upload:
663 This will automatically handle any required cleanup.
665 assert self.directory is None
666 assert self.original_changes.valid_signature
669 session = self.transaction.session
671 group = cnf.get('Dinstall::UnprivGroup') or None
672 self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
673 mode=0o2750, group=group)
674 with FilesystemTransaction() as fs:
675 src = os.path.join(self.original_directory, self.original_changes.filename)
676 dst = os.path.join(self.directory, self.original_changes.filename)
677 fs.copy(src, dst, mode=0o640)
679 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
681 for f in self.changes.files.itervalues():
682 src = os.path.join(self.original_directory, f.filename)
683 dst = os.path.join(self.directory, f.filename)
684 if not os.path.exists(src):
686 fs.copy(src, dst, mode=0o640)
690 source = self.changes.source
692 # Do not raise an exception here if the .dsc is invalid.
695 if source is not None:
696 for f in source.files.itervalues():
697 src = os.path.join(self.original_directory, f.filename)
698 dst = os.path.join(self.directory, f.filename)
699 if not os.path.exists(dst):
701 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
702 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
703 fs.copy(db_archive_file.path, dst, mode=0o640)
705 # Ignore if get_file could not find it. Upload will
706 # probably be rejected later.
709 def unpacked_source(self):
710 """Path to unpacked source
712 Get path to the unpacked source. This method does unpack the source
713 into a temporary directory under C{self.directory} if it has not
714 been done so already.
716 @rtype: str or C{None}
717 @return: string giving the path to the unpacked source directory
718 or C{None} if no source was included in the upload.
720 assert self.directory is not None
722 source = self.changes.source
725 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
727 sourcedir = os.path.join(self.directory, 'source')
728 if not os.path.exists(sourcedir):
729 devnull = open('/dev/null', 'w')
730 daklib.daksubprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
731 if not os.path.isdir(sourcedir):
732 raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
735 def _map_suite(self, suite_name):
736 for rule in Config().value_list("SuiteMappings"):
737 fields = rule.split()
739 if rtype == "map" or rtype == "silent-map":
740 (src, dst) = fields[1:3]
741 if src == suite_name:
743 if rtype != "silent-map":
744 self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
745 elif rtype == "ignore":
747 if suite_name == ignored:
748 self.warnings.append('Ignoring target suite {0}.'.format(ignored))
750 elif rtype == "reject":
752 if suite_name == rejected:
753 raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected))
754 ## XXX: propup-version and map-unreleased not yet implemented
757 def _mapped_suites(self):
758 """Get target suites after mappings
760 @rtype: list of L{daklib.dbconn.Suite}
761 @return: list giving the mapped target suites of this upload
763 session = self.session
766 for dist in self.changes.distributions:
767 suite_name = self._map_suite(dist)
768 if suite_name is not None:
769 suite_names.append(suite_name)
771 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
774 def _check_new_binary_overrides(self, suite):
777 binaries = self.changes.binaries
778 source = self.changes.source
779 if source is not None and not source.package_list.fallback:
780 packages = source.package_list.packages_for_suite(suite)
781 binaries = [ entry for entry in packages ]
784 override = self._binary_override(suite, b)
786 self.warnings.append('binary:{0} is NEW.'.format(b.name))
791 def _check_new(self, suite):
792 """Check if upload is NEW
794 An upload is NEW if it has binary or source packages that do not have
795 an override in C{suite} OR if it references files ONLY in a tainted
796 archive (eg. when it references files in NEW).
799 @return: C{True} if the upload is NEW, C{False} otherwise
801 session = self.session
804 # Check for missing overrides
805 if self._check_new_binary_overrides(suite):
807 if self.changes.source is not None:
808 override = self._source_override(suite, self.changes.source)
810 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
813 # Check if we reference a file only in a tainted archive
814 files = self.changes.files.values()
815 if self.changes.source is not None:
816 files.extend(self.changes.source.files.values())
818 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
819 query_untainted = query.join(Archive).filter(Archive.tainted == False)
821 in_archive = (query.first() is not None)
822 in_untainted_archive = (query_untainted.first() is not None)
824 if in_archive and not in_untainted_archive:
825 self.warnings.append('{0} is only available in NEW.'.format(f.filename))
830 def _final_suites(self):
831 session = self.session
833 mapped_suites = self._mapped_suites()
836 for suite in mapped_suites:
837 overridesuite = suite
838 if suite.overridesuite is not None:
839 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
840 if self._check_new(overridesuite):
842 final_suites.add(suite)
846 def _binary_override(self, suite, binary):
847 """Get override entry for a binary
849 @type suite: L{daklib.dbconn.Suite}
850 @param suite: suite to get override for
852 @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
853 @param binary: binary to get override for
855 @rtype: L{daklib.dbconn.Override} or C{None}
856 @return: override for the given binary or C{None}
858 if suite.overridesuite is not None:
859 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
861 mapped_component = get_mapped_component(binary.component)
862 if mapped_component is None:
865 query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
866 .join(Component).filter(Component.component_name == mapped_component.component_name) \
867 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
871 except NoResultFound:
874 def _source_override(self, suite, source):
875 """Get override entry for a source
877 @type suite: L{daklib.dbconn.Suite}
878 @param suite: suite to get override for
880 @type source: L{daklib.upload.Source}
881 @param source: source to get override for
883 @rtype: L{daklib.dbconn.Override} or C{None}
884 @return: override for the given source or C{None}
886 if suite.overridesuite is not None:
887 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
889 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
890 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
892 component = source_component_from_package_list(source.package_list, suite)
893 if component is not None:
894 query = query.filter(Override.component == component)
898 except NoResultFound:
901 def _binary_component(self, suite, binary, only_overrides=True):
902 """get component for a binary
904 By default this will only look at overrides to get the right component;
905 if C{only_overrides} is C{False} this method will also look at the
908 @type suite: L{daklib.dbconn.Suite}
910 @type binary: L{daklib.upload.Binary}
912 @type only_overrides: bool
913 @param only_overrides: only use overrides to get the right component
915 @rtype: L{daklib.dbconn.Component} or C{None}
917 override = self._binary_override(suite, binary)
918 if override is not None:
919 return override.component
922 return get_mapped_component(binary.component, self.session)
924 def check(self, force=False):
925 """run checks against the upload
928 @param force: ignore failing forcable checks
931 @return: C{True} if all checks passed, C{False} otherwise
933 # XXX: needs to be better structured.
934 assert self.changes.valid_signature
937 # Validate signatures and hashes before we do any real work:
939 checks.SignatureAndHashesCheck,
940 checks.SignatureTimestampCheck,
942 checks.ExternalHashesCheck,
945 checks.BinaryTimestampCheck,
946 checks.SingleDistributionCheck,
950 final_suites = self._final_suites()
951 if len(final_suites) == 0:
952 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
955 self.final_suites = final_suites
958 checks.TransitionCheck,
960 checks.NoSourceOnlyCheck,
967 checks.SourceFormatCheck,
968 checks.SuiteArchitectureCheck,
971 for suite in final_suites:
972 chk().per_suite_check(self, suite)
974 if len(self.reject_reasons) != 0:
979 except checks.Reject as e:
980 self.reject_reasons.append(unicode(e))
981 except Exception as e:
982 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
985 def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
986 """Install upload to the given suite
988 @type suite: L{daklib.dbconn.Suite}
989 @param suite: suite to install the package into. This is the real suite,
990 ie. after any redirection to NEW or a policy queue
992 @param source_component_func: function to get the L{daklib.dbconn.Component}
993 for a L{daklib.upload.Source} object
995 @param binary_component_func: function to get the L{daklib.dbconn.Component}
996 for a L{daklib.upload.Binary} object
998 @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
1000 @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
1002 @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
1003 object for the install source or C{None} if no source was
1004 included. The second is a list of L{daklib.dbconn.DBBinary}
1005 objects for the installed binary packages.
1007 # XXX: move this function to ArchiveTransaction?
1009 control = self.changes.changes
1010 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
1012 if source_suites is None:
1013 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
1015 source = self.changes.source
1016 if source is not None:
1017 component = source_component_func(source)
1018 db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
1023 for binary in self.changes.binaries:
1024 component = binary_component_func(binary)
1025 db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
1026 db_binaries.append(db_binary)
1028 if suite.copychanges:
1029 src = os.path.join(self.directory, self.changes.filename)
1030 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
1031 self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
1033 return (db_source, db_binaries)
1035 def _install_changes(self):
1036 assert self.changes.valid_signature
1037 control = self.changes.changes
1038 session = self.transaction.session
1042 # Only add changelog for sourceful uploads and binNMUs
1043 if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
1044 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
1045 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
1046 assert changelog_id is not None
1048 db_changes = DBChange()
1049 db_changes.changesname = self.changes.filename
1050 db_changes.source = control['Source']
1051 db_changes.binaries = control.get('Binary', None)
1052 db_changes.architecture = control['Architecture']
1053 db_changes.version = control['Version']
1054 db_changes.distribution = control['Distribution']
1055 db_changes.urgency = control['Urgency']
1056 db_changes.maintainer = control['Maintainer']
1057 db_changes.changedby = control.get('Changed-By', control['Maintainer'])
1058 db_changes.date = control['Date']
1059 db_changes.fingerprint = self.fingerprint.fingerprint
1060 db_changes.changelog_id = changelog_id
1061 db_changes.closes = self.changes.closed_bugs
1064 self.transaction.session.add(db_changes)
1065 self.transaction.session.flush()
1066 except sqlalchemy.exc.IntegrityError:
1067 raise ArchiveException('{0} is already known.'.format(self.changes.filename))
1071 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
1072 u = PolicyQueueUpload()
1073 u.policy_queue = policy_queue
1074 u.target_suite = target_suite
1075 u.changes = db_changes
1076 u.source = db_source
1077 u.binaries = db_binaries
1078 self.transaction.session.add(u)
1079 self.transaction.session.flush()
1081 dst = os.path.join(policy_queue.path, self.changes.filename)
1082 self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms)
1086 def try_autobyhand(self):
1089 Try to handle byhand packages automatically.
1091 @rtype: list of L{daklib.upload.HashedFile}
1092 @return: list of remaining byhand files
1094 assert len(self.reject_reasons) == 0
1095 assert self.changes.valid_signature
1096 assert self.final_suites is not None
1097 assert self._checked
1099 byhand = self.changes.byhand_files
1100 if len(byhand) == 0:
1103 suites = list(self.final_suites)
1104 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1108 control = self.changes.changes
1109 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1113 if '_' in f.filename:
1114 parts = f.filename.split('_', 2)
1116 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1120 package, version, archext = parts
1121 arch, ext = archext.split('.', 1)
1123 parts = f.filename.split('.')
1125 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1135 rule = automatic_byhand_packages.subtree(package)
1140 if rule['Source'] != self.changes.source_name \
1141 or rule['Section'] != f.section \
1142 or ('Extension' in rule and rule['Extension'] != ext):
1146 script = rule['Script']
1147 retcode = daklib.daksubprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
1149 print "W: error processing {0}.".format(f.filename)
1152 return len(remaining) == 0
1154 def _install_byhand(self, policy_queue_upload, hashed_file):
1155 """install byhand file
1157 @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
1159 @type hashed_file: L{daklib.upload.HashedFile}
1161 fs = self.transaction.fs
1162 session = self.transaction.session
1163 policy_queue = policy_queue_upload.policy_queue
1165 byhand_file = PolicyQueueByhandFile()
1166 byhand_file.upload = policy_queue_upload
1167 byhand_file.filename = hashed_file.filename
1168 session.add(byhand_file)
1171 src = os.path.join(self.directory, hashed_file.filename)
1172 dst = os.path.join(policy_queue.path, hashed_file.filename)
1173 fs.copy(src, dst, mode=policy_queue.change_perms)
1177 def _do_bts_versiontracking(self):
1179 fs = self.transaction.fs
1181 btsdir = cnf.get('Dir::BTSVersionTrack')
1182 if btsdir is None or btsdir == '':
1185 base = os.path.join(btsdir, self.changes.filename[:-8])
1188 sourcedir = self.unpacked_source()
1189 if sourcedir is not None:
1190 fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
1191 versions = fs.create("{0}.versions".format(base), mode=0o644)
1192 for line in fh.readlines():
1193 if re_changelog_versions.match(line):
1194 versions.write(line)
1198 # binary -> source mapping
1199 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1200 for binary in self.changes.binaries:
1201 control = binary.control
1202 source_package, source_version = binary.source
1203 line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
1204 print >>debinfo, line
1207 def _policy_queue(self, suite):
1208 if suite.policy_queue is not None:
1209 return suite.policy_queue
1215 Install upload to a suite or policy queue. This method does B{not}
1216 handle uploads to NEW.
1218 You need to have called the C{check} method before calling this method.
1220 assert len(self.reject_reasons) == 0
1221 assert self.changes.valid_signature
1222 assert self.final_suites is not None
1223 assert self._checked
1226 db_changes = self._install_changes()
1228 for suite in self.final_suites:
1229 overridesuite = suite
1230 if suite.overridesuite is not None:
1231 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1233 policy_queue = self._policy_queue(suite)
1235 redirected_suite = suite
1236 if policy_queue is not None:
1237 redirected_suite = policy_queue.suite
1239 # source can be in the suite we install to or any suite we enhance
1240 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
1241 for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \
1242 .filter(VersionCheck.suite_id.in_(source_suite_ids)) \
1243 .filter(VersionCheck.check == 'Enhances'):
1244 source_suite_ids.add(enhanced_suite_id)
1246 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
1248 source_component_func = lambda source: self._source_override(overridesuite, source).component
1249 binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
1251 (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1253 if policy_queue is not None:
1254 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
1256 # copy to build queues
1257 if policy_queue is None or policy_queue.send_to_build_queues:
1258 for build_queue in suite.copy_queues:
1259 self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1261 self._do_bts_versiontracking()
1263 def install_to_new(self):
1264 """install upload to NEW
1266 Install upload to NEW. This method does B{not} handle regular uploads
1267 to suites or policy queues.
1269 You need to have called the C{check} method before calling this method.
1271 # Uploads to NEW are special as we don't have overrides.
1272 assert len(self.reject_reasons) == 0
1273 assert self.changes.valid_signature
1274 assert self.final_suites is not None
1276 source = self.changes.source
1277 binaries = self.changes.binaries
1278 byhand = self.changes.byhand_files
1280 # we need a suite to guess components
1281 suites = list(self.final_suites)
1282 assert len(suites) == 1, "NEW uploads must be to a single suite"
1285 # decide which NEW queue to use
1286 if suite.new_queue is None:
1287 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1289 new_queue = suite.new_queue
1291 # There is only one global BYHAND queue
1292 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1293 new_suite = new_queue.suite
1296 def binary_component_func(binary):
1297 return self._binary_component(suite, binary, only_overrides=False)
1299 # guess source component
1300 # XXX: should be moved into an extra method
1301 binary_component_names = set()
1302 for binary in binaries:
1303 component = binary_component_func(binary)
1304 binary_component_names.add(component.component_name)
1305 source_component_name = None
1306 for c in self.session.query(Component).order_by(Component.component_id):
1307 guess = c.component_name
1308 if guess in binary_component_names:
1309 source_component_name = guess
1311 if source_component_name is None:
1312 source_component = self.session.query(Component).order_by(Component.component_id).first()
1314 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1315 source_component_func = lambda source: source_component
1317 db_changes = self._install_changes()
1318 (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1319 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1322 self._install_byhand(policy_upload, f)
1324 self._do_bts_versiontracking()
1327 """commit changes"""
1328 self.transaction.commit()
1331 """rollback changes"""
1332 self.transaction.rollback()
1334 def __enter__(self):
1338 def __exit__(self, type, value, traceback):
1339 if self.directory is not None:
1340 shutil.rmtree(self.directory)
1341 self.directory = None
1343 self.transaction.rollback()