1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 """module to manipulate the archive
19 This module provides classes to manipulate the archive.
22 from daklib.dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from daklib.fstransactions import FilesystemTransaction
28 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
31 from datetime import datetime
35 from sqlalchemy.orm.exc import NoResultFound
39 class ArchiveException(Exception):
42 class HashMismatchException(ArchiveException):
45 class ArchiveTransaction(object):
46 """manipulate the archive in a transaction
49 self.fs = FilesystemTransaction()
50 self.session = DBConn().session()
52 def get_file(self, hashed_file, source_name, check_hashes=True):
53 """Look for file C{hashed_file} in database
55 @type hashed_file: L{daklib.upload.HashedFile}
56 @param hashed_file: file to look for in the database
58 @type source_name: str
59 @param source_name: source package name
61 @type check_hashes: bool
62 @param check_hashes: check size and hashes match
64 @raise KeyError: file was not found in the database
65 @raise HashMismatchException: hash mismatch
67 @rtype: L{daklib.dbconn.PoolFile}
68 @return: database entry for the file
70 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
72 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
73 if check_hashes and (poolfile.filesize != hashed_file.size
74 or poolfile.md5sum != hashed_file.md5sum
75 or poolfile.sha1sum != hashed_file.sha1sum
76 or poolfile.sha256sum != hashed_file.sha256sum):
77 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
80 raise KeyError('{0} not found in database.'.format(poolname))
82 def _install_file(self, directory, hashed_file, archive, component, source_name):
85 Will not give an error when the file is already present.
87 @rtype: L{daklib.dbconn.PoolFile}
88 @return: batabase object for the new file
90 session = self.session
92 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
94 poolfile = self.get_file(hashed_file, source_name)
96 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
97 poolfile.md5sum = hashed_file.md5sum
98 poolfile.sha1sum = hashed_file.sha1sum
99 poolfile.sha256sum = hashed_file.sha256sum
100 session.add(poolfile)
104 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
105 except NoResultFound:
106 archive_file = ArchiveFile(archive, component, poolfile)
107 session.add(archive_file)
110 path = os.path.join(archive.path, 'pool', component.component_name, poolname)
111 hashed_file_path = os.path.join(directory, hashed_file.filename)
112 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
116 def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
117 """Install a binary package
120 @param directory: directory the binary package is located in
122 @type binary: L{daklib.upload.Binary}
123 @param binary: binary package to install
125 @type suite: L{daklib.dbconn.Suite}
126 @param suite: target suite
128 @type component: L{daklib.dbconn.Component}
129 @param component: target component
131 @type allow_tainted: bool
132 @param allow_tainted: allow to copy additional files from tainted archives
134 @type fingerprint: L{daklib.dbconn.Fingerprint}
135 @param fingerprint: optional fingerprint
137 @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
138 @param source_suites: suites to copy the source from if they are not
139 in C{suite} or C{True} to allow copying from any
142 @type extra_source_archives: list of L{daklib.dbconn.Archive}
143 @param extra_source_archives: extra archives to copy Built-Using sources from
145 @rtype: L{daklib.dbconn.DBBinary}
146 @return: databse object for the new package
148 session = self.session
149 control = binary.control
150 maintainer = get_or_set_maintainer(control['Maintainer'], session)
151 architecture = get_architecture(control['Architecture'], session)
153 (source_name, source_version) = binary.source
154 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
155 source = source_query.filter(DBSource.suites.contains(suite)).first()
157 if source_suites != True:
158 source_query = source_query.join(DBSource.suites) \
159 .filter(Suite.suite_id == source_suites.c.id)
160 source = source_query.first()
162 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
163 self.copy_source(source, suite, component)
165 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
168 package=control['Package'],
169 version=control['Version'],
170 architecture=architecture,
174 maintainer=maintainer,
176 binarytype=binary.type,
177 fingerprint=fingerprint,
181 db_binary = session.query(DBBinary).filter_by(**unique).one()
182 for key, value in rest.iteritems():
183 if getattr(db_binary, key) != value:
184 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
185 except NoResultFound:
186 db_binary = DBBinary(**unique)
187 for key, value in rest.iteritems():
188 setattr(db_binary, key, value)
189 session.add(db_binary)
191 import_metadata_into_db(db_binary, session)
193 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
195 if suite not in db_binary.suites:
196 db_binary.suites.append(suite)
202 def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
203 """ensure source exists in the given archive
205 This is intended to be used to check that Built-Using sources exist.
208 @param filename: filename to use in error messages
210 @type source: L{daklib.dbconn.DBSource}
211 @param source: source to look for
213 @type archive: L{daklib.dbconn.Archive}
214 @param archive: archive to look in
216 @type extra_archives: list of L{daklib.dbconn.Archive}
217 @param extra_archives: list of archives to copy the source package from
218 if it is not yet present in C{archive}
220 session = self.session
221 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
222 if db_file is not None:
225 # Try to copy file from one extra archive
226 if extra_archives is None:
228 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
230 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
232 source_archive = db_file.archive
233 for dsc_file in source.srcfiles:
234 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
235 # We were given an explicit list of archives so it is okay to copy from tainted archives.
236 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
238 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
239 """Add Built-Using sources to C{db_binary.extra_sources}
241 session = self.session
242 built_using = control.get('Built-Using', None)
244 if built_using is not None:
245 for dep in apt_pkg.parse_depends(built_using):
246 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
247 bu_source_name, bu_source_version, comp = dep[0]
248 assert comp == '=', 'Built-Using must contain strict dependencies'
250 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
251 if bu_source is None:
252 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
254 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
256 db_binary.extra_sources.append(bu_source)
258 def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
259 """Install a source package
262 @param directory: directory the source package is located in
264 @type source: L{daklib.upload.Source}
265 @param source: source package to install
267 @type suite: L{daklib.dbconn.Suite}
268 @param suite: target suite
270 @type component: L{daklib.dbconn.Component}
271 @param component: target component
273 @type changed_by: L{daklib.dbconn.Maintainer}
274 @param changed_by: person who prepared this version of the package
276 @type allow_tainted: bool
277 @param allow_tainted: allow to copy additional files from tainted archives
279 @type fingerprint: L{daklib.dbconn.Fingerprint}
280 @param fingerprint: optional fingerprint
282 @rtype: L{daklib.dbconn.DBSource}
283 @return: database object for the new source
285 session = self.session
286 archive = suite.archive
288 maintainer = get_or_set_maintainer(control['Maintainer'], session)
289 source_name = control['Source']
291 ### Add source package to database
293 # We need to install the .dsc first as the DBSource object refers to it.
294 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
298 version=control['Version'],
301 maintainer=maintainer,
302 changedby=changed_by,
303 #install_date=datetime.now().date(),
304 poolfile=db_file_dsc,
305 fingerprint=fingerprint,
306 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
311 db_source = session.query(DBSource).filter_by(**unique).one()
312 for key, value in rest.iteritems():
313 if getattr(db_source, key) != value:
314 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
315 except NoResultFound:
317 db_source = DBSource(**unique)
318 for key, value in rest.iteritems():
319 setattr(db_source, key, value)
320 # XXX: set as default in postgres?
321 db_source.install_date = datetime.now().date()
322 session.add(db_source)
325 # Add .dsc file. Other files will be added later.
326 db_dsc_file = DSCFile()
327 db_dsc_file.source = db_source
328 db_dsc_file.poolfile = db_file_dsc
329 session.add(db_dsc_file)
332 if suite in db_source.suites:
335 db_source.suites.append(suite)
340 ### Now add remaining files and copy them to the archive.
342 for hashed_file in source.files.itervalues():
343 hashed_file_path = os.path.join(directory, hashed_file.filename)
344 if os.path.exists(hashed_file_path):
345 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
348 db_file = self.get_file(hashed_file, source_name)
349 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
351 db_dsc_file = DSCFile()
352 db_dsc_file.source = db_source
353 db_dsc_file.poolfile = db_file
354 session.add(db_dsc_file)
358 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
359 import_metadata_into_db(db_source, session)
361 # Uploaders are the maintainer and co-maintainers from the Uploaders field
362 db_source.uploaders.append(maintainer)
363 if 'Uploaders' in control:
364 from daklib.textutils import split_uploaders
365 for u in split_uploaders(control['Uploaders']):
366 db_source.uploaders.append(get_or_set_maintainer(u, session))
371 def _copy_file(self, db_file, archive, component, allow_tainted=False):
372 """Copy a file to the given archive and component
374 @type db_file: L{daklib.dbconn.PoolFile}
375 @param db_file: file to copy
377 @type archive: L{daklib.dbconn.Archive}
378 @param archive: target archive
380 @type component: L{daklib.dbconn.Archive}
381 @param component: target component
383 @type allow_tainted: bool
384 @param allow_tainted: allow to copy from tainted archives (such as NEW)
386 session = self.session
388 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
389 query = session.query(ArchiveFile).filter_by(file=db_file, component=component)
390 if not allow_tainted:
391 query = query.join(Archive).filter(Archive.tainted == False)
393 source_af = query.first()
394 if source_af is None:
395 raise ArchiveException('cp: Could not find {0} in component {1} in any archive.'.format(db_file.filename, component.component_name))
396 target_af = ArchiveFile(archive, component, db_file)
397 session.add(target_af)
399 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
401 def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
402 """Copy a binary package to the given suite and component
404 @type db_binary: L{daklib.dbconn.DBBinary}
405 @param db_binary: binary to copy
407 @type suite: L{daklib.dbconn.Suite}
408 @param suite: target suite
410 @type component: L{daklib.dbconn.Component}
411 @param component: target component
413 @type allow_tainted: bool
414 @param allow_tainted: allow to copy from tainted archives (such as NEW)
416 @type extra_archives: list of L{daklib.dbconn.Archive}
417 @param extra_archives: extra archives to copy Built-Using sources from
419 session = self.session
420 archive = suite.archive
424 filename = db_binary.poolfile.filename
426 # make sure source is present in target archive
427 db_source = db_binary.source
428 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
429 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
431 # make sure built-using packages are present in target archive
432 for db_source in db_binary.extra_sources:
433 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
436 db_file = db_binary.poolfile
437 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
438 if suite not in db_binary.suites:
439 db_binary.suites.append(suite)
442 def copy_source(self, db_source, suite, component, allow_tainted=False):
443 """Copy a source package to the given suite and component
445 @type db_source: L{daklib.dbconn.DBSource}
446 @param db_source: source to copy
448 @type suite: L{daklib.dbconn.Suite}
449 @param suite: target suite
451 @type component: L{daklib.dbconn.Component}
452 @param component: target component
454 @type allow_tainted: bool
455 @param allow_tainted: allow to copy from tainted archives (such as NEW)
457 archive = suite.archive
460 for db_dsc_file in db_source.srcfiles:
461 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
462 if suite not in db_source.suites:
463 db_source.suites.append(suite)
466 def remove_file(self, db_file, archive, component):
467 """Remove a file from a given archive and component
469 @type db_file: L{daklib.dbconn.PoolFile}
470 @param db_file: file to remove
472 @type archive: L{daklib.dbconn.Archive}
473 @param archive: archive to remove the file from
475 @type component: L{daklib.dbconn.Component}
476 @param component: component to remove the file from
478 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
479 self.fs.unlink(af.path)
480 self.session.delete(af)
482 def remove_binary(self, binary, suite):
483 """Remove a binary from a given suite and component
485 @type binary: L{daklib.dbconn.DBBinary}
486 @param binary: binary to remove
488 @type suite: L{daklib.dbconn.Suite}
489 @param suite: suite to remove the package from
491 binary.suites.remove(suite)
494 def remove_source(self, source, suite):
495 """Remove a source from a given suite and component
497 @type source: L{daklib.dbconn.DBSource}
498 @param source: source to remove
500 @type suite: L{daklib.dbconn.Suite}
501 @param suite: suite to remove the package from
503 @raise ArchiveException: source package is still referenced by other
504 binaries in the suite
506 session = self.session
508 query = session.query(DBBinary).filter_by(source=source) \
509 .filter(DBBinary.suites.contains(suite))
510 if query.first() is not None:
511 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
513 source.suites.remove(suite)
519 self.session.commit()
522 self.session.rollback()
526 """rollback changes"""
527 self.session.rollback()
533 def __exit__(self, type, value, traceback):
540 class ArchiveUpload(object):
543 This class can be used in a with-statement::
545 with ArchiveUpload(...) as upload:
548 Doing so will automatically run any required cleanup and also rollback the
549 transaction if it was not committed.
551 def __init__(self, directory, changes, keyrings):
552 self.transaction = ArchiveTransaction()
553 """transaction used to handle the upload
554 @type: L{daklib.archive.ArchiveTransaction}
557 self.session = self.transaction.session
558 """database session"""
560 self.original_directory = directory
561 self.original_changes = changes
565 @type: L{daklib.upload.Changes}
568 self.directory = None
569 """directory with temporary copy of files. set by C{prepare}
573 self.keyrings = keyrings
575 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
576 """fingerprint of the key used to sign the upload
577 @type: L{daklib.dbconn.Fingerprint}
580 self.reject_reasons = []
581 """reasons why the upload cannot by accepted
591 self.final_suites = None
594 """upload is NEW. set by C{check}
598 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
599 self._new = self._new_queue.suite
602 """prepare upload for further processing
604 This copies the files involved to a temporary directory. If you use
605 this method directly, you have to remove the directory given by the
606 C{directory} attribute later on your own.
608 Instead of using the method directly, you can also use a with-statement::
610 with ArchiveUpload(...) as upload:
613 This will automatically handle any required cleanup.
615 assert self.directory is None
616 assert self.original_changes.valid_signature
619 session = self.transaction.session
621 self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
622 with FilesystemTransaction() as fs:
623 src = os.path.join(self.original_directory, self.original_changes.filename)
624 dst = os.path.join(self.directory, self.original_changes.filename)
627 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
629 for f in self.changes.files.itervalues():
630 src = os.path.join(self.original_directory, f.filename)
631 dst = os.path.join(self.directory, f.filename)
632 if not os.path.exists(src):
636 source = self.changes.source
637 if source is not None:
638 for f in source.files.itervalues():
639 src = os.path.join(self.original_directory, f.filename)
640 dst = os.path.join(self.directory, f.filename)
641 if not os.path.exists(dst):
643 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
644 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
645 fs.copy(db_archive_file.path, dst, symlink=True)
647 # Ignore if get_file could not find it. Upload will
648 # probably be rejected later.
651 def unpacked_source(self):
652 """Path to unpacked source
654 Get path to the unpacked source. This method does unpack the source
655 into a temporary directory under C{self.directory} if it has not
656 been done so already.
658 @rtype: str or C{None}
659 @return: string giving the path to the unpacked source directory
660 or C{None} if no source was included in the upload.
662 assert self.directory is not None
664 source = self.changes.source
667 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
669 sourcedir = os.path.join(self.directory, 'source')
670 if not os.path.exists(sourcedir):
671 devnull = open('/dev/null', 'w')
672 subprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
673 if not os.path.isdir(sourcedir):
674 raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
677 def _map_suite(self, suite_name):
678 for rule in Config().value_list("SuiteMappings"):
679 fields = rule.split()
681 if rtype == "map" or rtype == "silent-map":
682 (src, dst) = fields[1:3]
683 if src == suite_name:
685 if rtype != "silent-map":
686 self.warnings.append('Mapping {0} to {0}.'.format(src, dst))
687 elif rtype == "ignore":
689 if suite_name == ignored:
690 self.warnings.append('Ignoring target suite {0}.'.format(ignored))
692 elif rtype == "reject":
694 if suite_name == rejected:
695 self.reject_reasons.append('Uploads to {0} are not accepted.'.format(suite))
696 ## XXX: propup-version and map-unreleased not yet implemented
699 def _mapped_suites(self):
700 """Get target suites after mappings
702 @rtype: list of L{daklib.dbconn.Suite}
703 @return: list giving the mapped target suites of this upload
705 session = self.session
708 for dist in self.changes.distributions:
709 suite_name = self._map_suite(dist)
710 if suite_name is not None:
711 suite_names.append(suite_name)
713 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
716 def _check_new(self, suite):
717 """Check if upload is NEW
719 An upload is NEW if it has binary or source packages that do not have
720 an override in C{suite} OR if it references files ONLY in a tainted
721 archive (eg. when it references files in NEW).
724 @return: C{True} if the upload is NEW, C{False} otherwise
726 session = self.session
728 # Check for missing overrides
729 for b in self.changes.binaries:
730 override = self._binary_override(suite, b)
734 if self.changes.source is not None:
735 override = self._source_override(suite, self.changes.source)
739 # Check if we reference a file only in a tainted archive
740 files = self.changes.files.values()
741 if self.changes.source is not None:
742 files.extend(self.changes.source.files.values())
744 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
745 query_untainted = query.join(Archive).filter(Archive.tainted == False)
747 in_archive = (query.first() is not None)
748 in_untainted_archive = (query_untainted.first() is not None)
750 if in_archive and not in_untainted_archive:
753 def _final_suites(self):
754 session = self.session
756 mapped_suites = self._mapped_suites()
759 for suite in mapped_suites:
760 overridesuite = suite
761 if suite.overridesuite is not None:
762 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
763 if self._check_new(overridesuite):
765 final_suites.add(suite)
769 def _binary_override(self, suite, binary):
770 """Get override entry for a binary
772 @type suite: L{daklib.dbconn.Suite}
773 @param suite: suite to get override for
775 @type binary: L{daklib.upload.Binary}
776 @param binary: binary to get override for
778 @rtype: L{daklib.dbconn.Override} or C{None}
779 @return: override for the given binary or C{None}
781 if suite.overridesuite is not None:
782 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
784 query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
785 .join(Component).filter(Component.component_name == binary.component) \
786 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
790 except NoResultFound:
793 def _source_override(self, suite, source):
794 """Get override entry for a source
796 @type suite: L{daklib.dbconn.Suite}
797 @param suite: suite to get override for
799 @type source: L{daklib.upload.Source}
800 @param source: source to get override for
802 @rtype: L{daklib.dbconn.Override} or C{None}
803 @return: override for the given source or C{None}
805 if suite.overridesuite is not None:
806 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
808 # XXX: component for source?
809 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
810 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
814 except NoResultFound:
817 def _binary_component(self, suite, binary, only_overrides=True):
818 """get component for a binary
820 By default this will only look at overrides to get the right component;
821 if C{only_overrides} is C{False} this method will also look at the
824 @type suite: L{daklib.dbconn.Suite}
826 @type binary: L{daklib.upload.Binary}
828 @type only_overrides: bool
829 @param only_overrides: only use overrides to get the right component
831 @rtype: L{daklib.dbconn.Component} or C{None}
833 override = self._binary_override(suite, binary)
834 if override is not None:
835 return override.component
838 return get_mapped_component(binary.component, self.session)
840 def check(self, force=False):
841 """run checks against the upload
844 @param force: ignore failing forcable checks
847 @return: C{True} if all checks passed, C{False} otherwise
849 # XXX: needs to be better structured.
850 assert self.changes.valid_signature
853 # Validate signatures and hashes before we do any real work:
855 checks.SignatureCheck,
860 checks.BinaryTimestampCheck,
861 checks.SingleDistributionCheck,
865 final_suites = self._final_suites()
866 if len(final_suites) == 0:
867 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
871 checks.TransitionCheck,
872 checks.UploadBlockCheck,
874 checks.NoSourceOnlyCheck,
880 checks.SourceFormatCheck,
881 checks.SuiteArchitectureCheck,
884 for suite in final_suites:
885 chk().per_suite_check(self, suite)
887 if len(self.reject_reasons) != 0:
890 self.final_suites = final_suites
892 except checks.Reject as e:
893 self.reject_reasons.append(unicode(e))
894 except Exception as e:
895 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
898 def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
899 """Install upload to the given suite
901 @type suite: L{daklib.dbconn.Suite}
902 @param suite: suite to install the package into. This is the real suite,
903 ie. after any redirection to NEW or a policy queue
905 @param source_component_func: function to get the L{daklib.dbconn.Component}
906 for a L{daklib.upload.Source} object
908 @param binary_component_func: function to get the L{daklib.dbconn.Component}
909 for a L{daklib.upload.Binary} object
911 @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
913 @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
915 @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
916 object for the install source or C{None} if no source was
917 included. The second is a list of L{daklib.dbconn.DBBinary}
918 objects for the installed binary packages.
920 # XXX: move this function to ArchiveTransaction?
922 control = self.changes.changes
923 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
925 if source_suites is None:
926 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
928 source = self.changes.source
929 if source is not None:
930 component = source_component_func(source)
931 db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
936 for binary in self.changes.binaries:
937 component = binary_component_func(binary)
938 db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
939 db_binaries.append(db_binary)
941 if suite.copychanges:
942 src = os.path.join(self.directory, self.changes.filename)
943 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
944 self.transaction.fs.copy(src, dst)
946 return (db_source, db_binaries)
948 def _install_changes(self):
949 assert self.changes.valid_signature
950 control = self.changes.changes
951 session = self.transaction.session
955 # Only add changelog for sourceful uploads and binNMUs
956 if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
957 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
958 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
959 assert changelog_id is not None
961 db_changes = DBChange()
962 db_changes.changesname = self.changes.filename
963 db_changes.source = control['Source']
964 db_changes.binaries = control.get('Binary', None)
965 db_changes.architecture = control['Architecture']
966 db_changes.version = control['Version']
967 db_changes.distribution = control['Distribution']
968 db_changes.urgency = control['Urgency']
969 db_changes.maintainer = control['Maintainer']
970 db_changes.changedby = control.get('Changed-By', control['Maintainer'])
971 db_changes.date = control['Date']
972 db_changes.fingerprint = self.fingerprint.fingerprint
973 db_changes.changelog_id = changelog_id
974 db_changes.closes = self.changes.closed_bugs
976 self.transaction.session.add(db_changes)
977 self.transaction.session.flush()
981 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
982 u = PolicyQueueUpload()
983 u.policy_queue = policy_queue
984 u.target_suite = target_suite
985 u.changes = db_changes
987 u.binaries = db_binaries
988 self.transaction.session.add(u)
989 self.transaction.session.flush()
991 dst = os.path.join(policy_queue.path, self.changes.filename)
992 self.transaction.fs.copy(self.changes.path, dst)
996 def try_autobyhand(self):
999 Try to handle byhand packages automatically.
1001 @rtype: list of L{daklib.upload.HashedFile}
1002 @return: list of remaining byhand files
1004 assert len(self.reject_reasons) == 0
1005 assert self.changes.valid_signature
1006 assert self.final_suites is not None
1008 byhand = self.changes.byhand_files
1009 if len(byhand) == 0:
1012 suites = list(self.final_suites)
1013 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1017 control = self.changes.changes
1018 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1022 parts = f.filename.split('_', 2)
1024 print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
1028 package, version, archext = parts
1029 arch, ext = archext.split('.', 1)
1032 rule = automatic_byhand_packages.subtree(package)
1037 if rule['Source'] != control['Source'] or rule['Section'] != f.section or rule['Extension'] != ext:
1041 script = rule['Script']
1042 retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
1044 print "W: error processing {0}.".format(f.filename)
1047 return len(remaining) == 0
1049 def _install_byhand(self, policy_queue_upload, hashed_file):
1050 """install byhand file
1052 @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
1054 @type hashed_file: L{daklib.upload.HashedFile}
1056 fs = self.transaction.fs
1057 session = self.transaction.session
1058 policy_queue = policy_queue_upload.policy_queue
1060 byhand_file = PolicyQueueByhandFile()
1061 byhand_file.upload = policy_queue_upload
1062 byhand_file.filename = hashed_file.filename
1063 session.add(byhand_file)
1066 src = os.path.join(self.directory, hashed_file.filename)
1067 dst = os.path.join(policy_queue.path, hashed_file.filename)
1072 def _do_bts_versiontracking(self):
1074 fs = self.transaction.fs
1076 btsdir = cnf.get('Dir::BTSVersionTrack')
1077 if btsdir is None or btsdir == '':
1080 base = os.path.join(btsdir, self.changes.filename[:-8])
1083 sourcedir = self.unpacked_source()
1084 if sourcedir is not None:
1085 fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
1086 versions = fs.create("{0}.versions".format(base), mode=0o644)
1087 for line in fh.readlines():
1088 if re_changelog_versions.match(line):
1089 versions.write(line)
1093 # binary -> source mapping
1094 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1095 for binary in self.changes.binaries:
1096 control = binary.control
1097 source_package, source_version = binary.source
1098 line = " ".join([control['Package'], control['Version'], source_package, source_version])
1099 print >>debinfo, line
1102 def _policy_queue(self, suite):
1103 if suite.policy_queue is not None:
1104 return suite.policy_queue
1110 Install upload to a suite or policy queue. This method does B{not}
1111 handle uploads to NEW.
1113 You need to have called the C{check} method before calling this method.
1115 assert len(self.reject_reasons) == 0
1116 assert self.changes.valid_signature
1117 assert self.final_suites is not None
1120 db_changes = self._install_changes()
1122 for suite in self.final_suites:
1123 overridesuite = suite
1124 if suite.overridesuite is not None:
1125 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1127 policy_queue = self._policy_queue(suite)
1129 redirected_suite = suite
1130 if policy_queue is not None:
1131 redirected_suite = policy_queue.suite
1133 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_([suite.suite_id, redirected_suite.suite_id])).subquery()
1135 source_component_func = lambda source: self._source_override(overridesuite, source).component
1136 binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
1138 (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1140 if policy_queue is not None:
1141 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
1143 # copy to build queues
1144 if policy_queue is None or policy_queue.send_to_build_queues:
1145 for build_queue in suite.copy_queues:
1146 self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
1148 self._do_bts_versiontracking()
1150 def install_to_new(self):
1151 """install upload to NEW
1153 Install upload to NEW. This method does B{not} handle regular uploads
1154 to suites or policy queues.
1156 You need to have called the C{check} method before calling this method.
1158 # Uploads to NEW are special as we don't have overrides.
1159 assert len(self.reject_reasons) == 0
1160 assert self.changes.valid_signature
1161 assert self.final_suites is not None
1163 source = self.changes.source
1164 binaries = self.changes.binaries
1165 byhand = self.changes.byhand_files
1167 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1169 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1170 new_suite = new_queue.suite
1172 # we need a suite to guess components
1173 suites = list(self.final_suites)
1174 assert len(suites) == 1, "NEW uploads must be to a single suite"
1177 def binary_component_func(binary):
1178 return self._binary_component(suite, binary, only_overrides=False)
1180 # guess source component
1181 # XXX: should be moved into an extra method
1182 binary_component_names = set()
1183 for binary in binaries:
1184 component = binary_component_func(binary)
1185 binary_component_names.add(component.component_name)
1186 source_component_name = None
1187 for c in self.session.query(Component).order_by(Component.component_id):
1188 guess = c.component_name
1189 if guess in binary_component_names:
1190 source_component_name = guess
1192 if source_component_name is None:
1193 source_component = self.session.query(Component).order_by(Component.component_id).first()
1195 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1196 source_component_func = lambda source: source_component
1198 db_changes = self._install_changes()
1199 (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1200 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1203 self._install_byhand(policy_upload, f)
1205 self._do_bts_versiontracking()
1208 """commit changes"""
1209 self.transaction.commit()
1212 """rollback changes"""
1213 self.transaction.rollback()
1215 def __enter__(self):
1219 def __exit__(self, type, value, traceback):
1220 if self.directory is not None:
1221 shutil.rmtree(self.directory)
1222 self.directory = None
1224 self.transaction.rollback()