1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 """module to manipulate the archive
19 This module provides classes to manipulate the archive.
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from .fstransactions import FilesystemTransaction
28 from .regexes import re_changelog_versions, re_bin_only_nmu
31 from datetime import datetime
35 from sqlalchemy.orm.exc import NoResultFound
39 class ArchiveException(Exception):
42 class HashMismatchException(ArchiveException):
45 class ArchiveTransaction(object):
46 """manipulate the archive in a transaction
49 self.fs = FilesystemTransaction()
50 self.session = DBConn().session()
52 def get_file(self, hashed_file, source_name):
53 """Look for file `hashed_file` in database
56 hashed_file (daklib.upload.HashedFile): file to look for in the database
59 KeyError: file was not found in the database
60 HashMismatchException: hash mismatch
63 `daklib.dbconn.PoolFile` object for the database
65 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
67 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
68 if poolfile.filesize != hashed_file.size or poolfile.md5sum != hashed_file.md5sum or poolfile.sha1sum != hashed_file.sha1sum or poolfile.sha256sum != hashed_file.sha256sum:
69 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
72 raise KeyError('{0} not found in database.'.format(poolname))
74 def _install_file(self, directory, hashed_file, archive, component, source_name):
77 Will not give an error when the file is already present.
80 `daklib.dbconn.PoolFile` object for the new file
82 session = self.session
84 poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
86 poolfile = self.get_file(hashed_file, source_name)
88 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
89 poolfile.md5sum = hashed_file.md5sum
90 poolfile.sha1sum = hashed_file.sha1sum
91 poolfile.sha256sum = hashed_file.sha256sum
96 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
98 archive_file = ArchiveFile(archive, component, poolfile)
99 session.add(archive_file)
102 path = os.path.join(archive.path, 'pool', component.component_name, poolname)
103 hashed_file_path = os.path.join(directory, hashed_file.filename)
104 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
108 def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
109 """Install a binary package
112 directory (str): directory the binary package is located in
113 binary (daklib.upload.Binary): binary package to install
114 suite (daklib.dbconn.Suite): target suite
115 component (daklib.dbconn.Component): target component
118 allow_tainted (bool): allow to copy additional files from tainted archives
119 fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
120 source_suites (list of daklib.dbconn.Suite or True): suites to copy
121 the source from if they are not in `suite` or True to allow
122 copying from any suite.
123 This can also be a SQLAlchemy (sub)query object.
124 extra_source_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
127 `daklib.dbconn.DBBinary` object for the new package
129 session = self.session
130 control = binary.control
131 maintainer = get_or_set_maintainer(control['Maintainer'], session)
132 architecture = get_architecture(control['Architecture'], session)
134 (source_name, source_version) = binary.source
135 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
136 source = source_query.filter(DBSource.suites.contains(suite)).first()
138 if source_suites != True:
139 source_query = source_query.filter(DBSource.suites.any(source_suites))
140 source = source_query.first()
142 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
143 self.copy_source(source, suite, component)
145 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
148 package=control['Package'],
149 version=control['Version'],
150 architecture=architecture,
154 maintainer=maintainer,
156 binarytype=binary.type,
157 fingerprint=fingerprint,
161 db_binary = session.query(DBBinary).filter_by(**unique).one()
162 for key, value in rest.iteritems():
163 if getattr(db_binary, key) != value:
164 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
165 except NoResultFound:
166 db_binary = DBBinary(**unique)
167 for key, value in rest.iteritems():
168 setattr(db_binary, key, value)
169 session.add(db_binary)
171 import_metadata_into_db(db_binary, session)
173 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
175 if suite not in db_binary.suites:
176 db_binary.suites.append(suite)
182 def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
183 """ensure source exists in the given archive
185 This is intended to be used to check that Built-Using sources exist.
188 filename (str): filename to use in error messages
189 source (daklib.dbconn.DBSource): source to look for
190 archive (daklib.dbconn.Archive): archive to look in
193 extra_archives (list of daklib.dbconn.Archive): list of archives to copy
194 the source package from if it is not yet present in `archive`
196 session = self.session
197 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
198 if db_file is not None:
201 # Try to copy file from one extra archive
202 if extra_archives is None:
204 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
206 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
208 source_archive = db_file.archive
209 for dsc_file in source.srcfiles:
210 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
211 # We were given an explicit list of archives so it is okay to copy from tainted archives.
212 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
214 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
215 """Add Built-Using sources to `db_binary.extra_sources`
217 session = self.session
218 built_using = control.get('Built-Using', None)
220 if built_using is not None:
221 for dep in apt_pkg.parse_depends(built_using):
222 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
223 bu_source_name, bu_source_version, comp = dep[0]
224 assert comp == '=', 'Built-Using must contain strict dependencies'
226 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
227 if bu_source is None:
228 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
230 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
232 db_binary.extra_sources.append(bu_source)
234 def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
235 """Install a source package
238 directory (str): directory the source package is located in
239 source (daklib.upload.Source): source package to install
240 suite (daklib.dbconn.Suite): target suite
241 component (daklib.dbconn.Component): target component
242 changed_by (daklib.dbconn.Maintainer): person who prepared this version of the package
245 allow_tainted (bool): allow to copy additional files from tainted archives
246 fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
249 `daklib.dbconn.DBSource` object for the new source
251 session = self.session
252 archive = suite.archive
254 maintainer = get_or_set_maintainer(control['Maintainer'], session)
255 source_name = control['Source']
257 ### Add source package to database
259 # We need to install the .dsc first as the DBSource object refers to it.
260 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
264 version=control['Version'],
267 maintainer=maintainer,
268 changedby=changed_by,
269 #install_date=datetime.now().date(),
270 poolfile=db_file_dsc,
271 fingerprint=fingerprint,
272 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
277 db_source = session.query(DBSource).filter_by(**unique).one()
278 for key, value in rest.iteritems():
279 if getattr(db_source, key) != value:
280 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
281 except NoResultFound:
283 db_source = DBSource(**unique)
284 for key, value in rest.iteritems():
285 setattr(db_source, key, value)
286 # XXX: set as default in postgres?
287 db_source.install_date = datetime.now().date()
288 session.add(db_source)
291 # Add .dsc file. Other files will be added later.
292 db_dsc_file = DSCFile()
293 db_dsc_file.source = db_source
294 db_dsc_file.poolfile = db_file_dsc
295 session.add(db_dsc_file)
298 if suite in db_source.suites:
301 db_source.suites.append(suite)
306 ### Now add remaining files and copy them to the archive.
308 for hashed_file in source.files.itervalues():
309 hashed_file_path = os.path.join(directory, hashed_file.filename)
310 if os.path.exists(hashed_file_path):
311 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
314 db_file = self.get_file(hashed_file, source_name)
315 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
317 db_dsc_file = DSCFile()
318 db_dsc_file.source = db_source
319 db_dsc_file.poolfile = db_file
320 session.add(db_dsc_file)
324 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
325 import_metadata_into_db(db_source, session)
327 # Uploaders are the maintainer and co-maintainers from the Uploaders field
328 db_source.uploaders.append(maintainer)
329 if 'Uploaders' in control:
330 def split_uploaders(field):
332 for u in re.sub(">[ ]*,", ">\t", field).split("\t"):
335 for u in split_uploaders(control['Uploaders']):
336 db_source.uploaders.append(get_or_set_maintainer(u, session))
341 def _copy_file(self, db_file, archive, component, allow_tainted=False):
342 """Copy a file to the given archive and component
345 db_file (daklib.dbconn.PoolFile): file to copy
346 archive (daklib.dbconn.Archive): target archive
347 component (daklib.dbconn.Component): target component
350 allow_tainted (bool): allow to copy from tainted archives (such as NEW)
352 session = self.session
354 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
355 query = session.query(ArchiveFile).filter_by(file=db_file, component=component)
356 if not allow_tainted:
357 query = query.join(Archive).filter(Archive.tainted == False)
359 source_af = query.first()
360 if source_af is None:
361 raise ArchiveException('cp: Could not find {0} in component {1} in any archive.'.format(db_file.filename, component.component_name))
362 target_af = ArchiveFile(archive, component, db_file)
363 session.add(target_af)
365 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
367 def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
368 """Copy a binary package to the given suite and component
371 db_binary (daklib.dbconn.DBBinary): binary to copy
372 suite (daklib.dbconn.Suite): target suite
373 component (daklib.dbconn.Component): target component
376 allow_tainted (bool): allow to copy from tainted archives (such as NEW)
377 extra_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
379 session = self.session
380 archive = suite.archive
384 # make sure built-using packages are present in target archive
385 filename = db_binary.poolfile.filename
386 for db_source in db_binary.extra_sources:
387 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
390 db_file = db_binary.poolfile
391 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
392 if suite not in db_binary.suites:
393 db_binary.suites.append(suite)
396 def copy_source(self, db_source, suite, component, allow_tainted=False):
397 """Copy a source package to the given suite and component
400 db_source (daklib.dbconn.DBSource): source to copy
401 suite (daklib.dbconn.Suite): target suite
402 component (daklib.dbconn.Component): target component
405 allow_tainted (bool): allow to copy from tainted archives (such as NEW)
407 archive = suite.archive
410 for db_dsc_file in db_source.srcfiles:
411 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
412 if suite not in db_source.suites:
413 db_source.suites.append(suite)
416 def remove_file(self, db_file, archive, component):
417 """Remove a file from a given archive and component
420 db_file (daklib.dbconn.PoolFile): file to remove
421 archive (daklib.dbconn.Archive): archive to remove the file from
422 component (daklib.dbconn.Component): component to remove the file from
424 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
425 self.fs.unlink(af.path)
426 self.session.delete(af)
428 def remove_binary(self, binary, suite):
429 """Remove a binary from a given suite and component
432 binary (daklib.dbconn.DBBinary): binary to remove
433 suite (daklib.dbconn.Suite): suite to remove the package from
435 binary.suites.remove(suite)
438 def remove_source(self, source, suite):
439 """Remove a source from a given suite and component
442 ArchiveException: source package is still referenced by other
443 binaries in the suite
446 binary (daklib.dbconn.DBSource): source to remove
447 suite (daklib.dbconn.Suite): suite to remove the package from
449 session = self.session
451 query = session.query(DBBinary).filter_by(source=source) \
452 .filter(DBBinary.suites.contains(suite))
453 if query.first() is not None:
454 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
456 source.suites.remove(suite)
462 self.session.commit()
465 self.session.rollback()
469 """rollback changes"""
470 self.session.rollback()
476 def __exit__(self, type, value, traceback):
483 class ArchiveUpload(object):
486 This class can be used in a with-statement:
488 with ArchiveUpload(...) as upload:
491 Doing so will automatically run any required cleanup and also rollback the
492 transaction if it was not committed.
495 changes (daklib.upload.Changes): upload to process
496 directory (str): directory with temporary copy of files. set by `prepare`
497 fingerprint (daklib.dbconn.Fingerprint): fingerprint used to sign the upload
498 new (bool): upload is NEW. set by `check`
499 reject_reasons (list of str): reasons why the upload cannot be accepted
500 session: database session
501 transaction (daklib.archive.ArchiveTransaction): transaction used to handle the upload
502 warnings (list of str): warnings (NOT USED YET)
504 def __init__(self, directory, changes, keyrings):
505 self.transaction = ArchiveTransaction()
506 self.session = self.transaction.session
508 self.original_directory = directory
509 self.original_changes = changes
511 self.directory = None
512 self.keyrings = keyrings
514 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
516 self.reject_reasons = []
518 self.final_suites = None
521 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
522 self._new = self._new_queue.suite
525 """prepare upload for further processing
527 This copies the files involved to a temporary directory. If you use
528 this method directly, you have to remove the directory given by the
529 `directory` attribute later on your own.
531 Instead of using the method directly, you can also use a with-statement:
533 with ArchiveUpload(...) as upload:
536 This will automatically handle any required cleanup.
538 assert self.directory is None
539 assert self.original_changes.valid_signature
542 session = self.transaction.session
544 self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
545 with FilesystemTransaction() as fs:
546 src = os.path.join(self.original_directory, self.original_changes.filename)
547 dst = os.path.join(self.directory, self.original_changes.filename)
550 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
552 for f in self.changes.files.itervalues():
553 src = os.path.join(self.original_directory, f.filename)
554 dst = os.path.join(self.directory, f.filename)
557 source = self.changes.source
558 if source is not None:
559 for f in source.files.itervalues():
560 src = os.path.join(self.original_directory, f.filename)
561 dst = os.path.join(self.directory, f.filename)
562 if f.filename not in self.changes.files:
563 db_file = self.transaction.get_file(f, source.dsc['Source'])
564 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
565 fs.copy(db_archive_file.path, dst, symlink=True)
567 def unpacked_source(self):
568 """Path to unpacked source
570 Get path to the unpacked source. This method does unpack the source
571 into a temporary directory under `self.directory` if it has not
572 been done so already.
575 String giving the path to the unpacked source directory
576 or None if no source was included in the upload.
578 assert self.directory is not None
580 source = self.changes.source
583 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
585 sourcedir = os.path.join(self.directory, 'source')
586 if not os.path.exists(sourcedir):
587 subprocess.check_call(["dpkg-source", "--no-copy", "-x", dsc_path, sourcedir], shell=False)
588 if not os.path.isdir(sourcedir):
589 raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
592 def _map_suite(self, suite_name):
593 for rule in Config().value_list("SuiteMappings"):
594 fields = rule.split()
596 if rtype == "map" or rtype == "silent-map":
597 (src, dst) = fields[1:3]
598 if src == suite_name:
600 if rtype != "silent-map":
601 self.warnings.append('Mapping {0} to {0}.'.format(src, dst))
602 elif rtype == "ignore":
604 if suite_name == ignored:
605 self.warnings.append('Ignoring target suite {0}.'.format(ignored))
607 elif rtype == "reject":
609 if suite_name == rejected:
610 self.reject_reasons.append('Uploads to {0} are not accepted.'.format(suite))
611 ## XXX: propup-version and map-unreleased not yet implemented
614 def _mapped_suites(self):
615 """Get target suites after mappings
618 list of daklib.dbconn.Suite giving the mapped target suites of this upload
620 session = self.session
623 for dist in self.changes.distributions:
624 suite_name = self._map_suite(dist)
625 if suite_name is not None:
626 suite_names.append(suite_name)
628 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
631 def _check_new(self, suite):
632 """Check if upload is NEW
634 An upload is NEW if it has binary or source packages that do not have
635 an override in `suite` OR if it references files ONLY in a tainted
636 archive (eg. when it references files in NEW).
639 True if the upload is NEW, False otherwise
641 session = self.session
643 # Check for missing overrides
644 for b in self.changes.binaries:
645 override = self._binary_override(suite, b)
649 if self.changes.source is not None:
650 override = self._source_override(suite, self.changes.source)
654 # Check if we reference a file only in a tainted archive
655 files = self.changes.files.values()
656 if self.changes.source is not None:
657 files.extend(self.changes.source.files.values())
659 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
660 query_untainted = query.join(Archive).filter(Archive.tainted == False)
662 in_archive = (query.first() is not None)
663 in_untainted_archive = (query_untainted.first() is not None)
665 if in_archive and not in_untainted_archive:
668 def _final_suites(self):
669 session = self.session
671 mapped_suites = self._mapped_suites()
674 for suite in mapped_suites:
675 overridesuite = suite
676 if suite.overridesuite is not None:
677 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
678 if self._check_new(overridesuite):
680 final_suites.add(suite)
684 def _binary_override(self, suite, binary):
685 """Get override entry for a binary
688 suite (daklib.dbconn.Suite)
689 binary (daklib.upload.Binary)
692 daklib.dbconn.Override or None
694 if suite.overridesuite is not None:
695 suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
697 query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
698 .join(Component).filter(Component.component_name == binary.component) \
699 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
703 except NoResultFound:
706 def _source_override(self, suite, source):
707 """Get override entry for a source
710 suite (daklib.dbconn.Suite)
711 source (daklib.upload.Source)
714 daklib.dbconn.Override or None
716 if suite.overridesuite is not None:
717 suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
719 # XXX: component for source?
720 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
721 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
725 except NoResultFound:
728 def check(self, force=False):
729 """run checks against the upload
732 force (bool): ignore failing forcable checks
735 True if all checks passed, False otherwise
737 # XXX: needs to be better structured.
738 assert self.changes.valid_signature
742 checks.SignatureCheck,
748 checks.SingleDistributionCheck,
749 checks.NoSourceOnlyCheck,
754 final_suites = self._final_suites()
755 if len(final_suites) == 0:
756 self.reject_reasons.append('Ended with no suite to install to.')
760 checks.SourceFormatCheck,
761 checks.SuiteArchitectureCheck,
764 for suite in final_suites:
765 chk().per_suite_check(self, suite)
767 if len(self.reject_reasons) != 0:
770 self.final_suites = final_suites
772 except checks.Reject as e:
773 self.reject_reasons.append(unicode(e))
774 except Exception as e:
775 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
778 def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
779 """Install upload to the given suite
782 suite (daklib.dbconn.Suite): suite to install the package into.
783 This is the real suite, ie. after any redirection to NEW or a policy queue
784 source_component_func: function to get the `daklib.dbconn.Component`
785 for a `daklib.upload.Source` object
786 binary_component_func: function to get the `daklib.dbconn.Component`
787 for a `daklib.upload.Binary` object
790 source_suites: see `daklib.archive.ArchiveTransaction.install_binary`
791 extra_source_archives: see `daklib.archive.ArchiveTransaction.install_binary`
794 tuple with two elements. The first is a `daklib.dbconn.DBSource`
795 object for the install source or None if no source was included.
796 The second is a list of `daklib.dbconn.DBBinary` objects for the
797 installed binary packages.
799 # XXX: move this function to ArchiveTransaction?
801 control = self.changes.changes
802 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
804 if source_suites is None:
805 source_suites = self.session.query(Suite).join(VersionCheck, VersionCheck.reference_id == Suite.suite_id).filter(VersionCheck.suite == suite).subquery()
807 source = self.changes.source
808 if source is not None:
809 component = source_component_func(source)
810 db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
815 for binary in self.changes.binaries:
816 component = binary_component_func(binary)
817 db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
818 db_binaries.append(db_binary)
820 if suite.copychanges:
821 src = os.path.join(self.directory, self.changes.filename)
822 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
823 self.transaction.fs.copy(src, dst)
825 return (db_source, db_binaries)
827 def _install_changes(self):
828 assert self.changes.valid_signature
829 control = self.changes.changes
830 session = self.transaction.session
834 # Only add changelog for sourceful uploads and binNMUs
835 if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
836 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
837 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
838 assert changelog_id is not None
840 db_changes = DBChange()
841 db_changes.changesname = self.changes.filename
842 db_changes.source = control['Source']
843 db_changes.binaries = control.get('Binary', None)
844 db_changes.architecture = control['Architecture']
845 db_changes.version = control['Version']
846 db_changes.distribution = control['Distribution']
847 db_changes.urgency = control['Urgency']
848 db_changes.maintainer = control['Maintainer']
849 db_changes.changedby = control.get('Changed-By', control['Maintainer'])
850 db_changes.date = control['Date']
851 db_changes.fingerprint = self.fingerprint.fingerprint
852 db_changes.changelog_id = changelog_id
853 db_changes.closes = self.changes.closed_bugs
855 self.transaction.session.add(db_changes)
856 self.transaction.session.flush()
860 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
861 u = PolicyQueueUpload()
862 u.policy_queue = policy_queue
863 u.target_suite = target_suite
864 u.changes = db_changes
866 u.binaries = db_binaries
867 self.transaction.session.add(u)
868 self.transaction.session.flush()
870 dst = os.path.join(policy_queue.path, self.changes.filename)
871 self.transaction.fs.copy(self.changes.path, dst)
875 def try_autobyhand(self):
878 Try to handle byhand packages automatically.
881 list of `daklib.upload.hashed_file` for the remaining byhand packages
883 assert len(self.reject_reasons) == 0
884 assert self.changes.valid_signature
885 assert self.final_suites is not None
887 byhand = self.changes.byhand_files
891 suites = list(self.final_suites)
892 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
896 control = self.changes.changes
897 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
901 package, version, archext = f.filename.split('_', 2)
902 arch, ext = archext.split('.', 1)
904 rule = automatic_byhand_packages.get(package)
909 if rule['Source'] != control['Source'] or rule['Section'] != f.section or rule['Extension'] != ext:
913 script = rule['Script']
914 retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
916 print "W: error processing {0}.".format(f.filename)
919 return len(remaining) == 0
921 def _install_byhand(self, policy_queue_upload, hashed_file):
924 policy_queue_upload (daklib.dbconn.PolicyQueueUpload): XXX
925 hashed_file (daklib.upload.HashedFile): XXX
927 fs = self.transaction.fs
928 session = self.transaction.session
929 policy_queue = policy_queue_upload.policy_queue
931 byhand_file = PolicyQueueByhandFile()
932 byhand_file.upload = policy_queue_upload
933 byhand_file.filename = hashed_file.filename
934 session.add(byhand_file)
937 src = os.path.join(self.directory, hashed_file.filename)
938 dst = os.path.join(policy_queue.path, hashed_file.filename)
943 def _do_bts_versiontracking(self):
945 fs = self.transaction.fs
947 btsdir = cnf.get('Dir::BTSVersionTrack')
948 if btsdir is None or btsdir == '':
951 base = os.path.join(btsdir, self.changes.filename[:-8])
954 sourcedir = self.unpacked_source()
955 if sourcedir is not None:
956 fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
957 versions = fs.create("{0}.versions".format(base), mode=0o644)
958 for line in fh.readlines():
959 if re_changelog_versions.match(line):
964 # binary -> source mapping
965 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
966 for binary in self.changes.binaries:
967 control = binary.control
968 source_package, source_version = binary.source
969 line = " ".join([control['Package'], control['Version'], source_package, source_version])
970 print >>debinfo, line
976 Install upload to a suite or policy queue. This method does *not*
977 handle uploads to NEW.
979 You need to have called the `check` method before calling this method.
981 assert len(self.reject_reasons) == 0
982 assert self.changes.valid_signature
983 assert self.final_suites is not None
986 db_changes = self._install_changes()
988 for suite in self.final_suites:
989 overridesuite = suite
990 if suite.overridesuite is not None:
991 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
993 redirected_suite = suite
994 if suite.policy_queue is not None:
995 redirected_suite = suite.policy_queue.suite
997 source_component_func = lambda source: self._source_override(overridesuite, source).component
998 binary_component_func = lambda binary: self._binary_override(overridesuite, binary).component
1000 (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
1002 if suite.policy_queue is not None:
1003 self._install_policy(suite.policy_queue, suite, db_changes, db_source, db_binaries)
1005 # copy to build queues
1006 if suite.policy_queue is None or suite.policy_queue.send_to_build_queues:
1007 for build_queue in suite.copy_queues:
1008 self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
1010 self._do_bts_versiontracking()
1012 def install_to_new(self):
1013 """install upload to NEW
1015 Install upload to NEW. This method does *not* handle regular uploads
1016 to suites or policy queues.
1018 You need to have called the `check` method before calling this method.
1020 # Uploads to NEW are special as we don't have overrides.
1021 assert len(self.reject_reasons) == 0
1022 assert self.changes.valid_signature
1023 assert self.final_suites is not None
1025 source = self.changes.source
1026 binaries = self.changes.binaries
1027 byhand = self.changes.byhand_files
1029 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1031 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1032 new_suite = new_queue.suite
1034 # we need a suite to guess components
1035 suites = list(self.final_suites)
1036 assert len(suites) == 1, "NEW uploads must be to a single suite"
1039 def binary_component_func(binary):
1040 override = self._binary_override(suite, binary)
1041 if override is not None:
1042 return override.component
1043 component_name = binary.component
1044 component = self.session.query(Component).filter_by(component_name=component_name).one()
1047 # guess source component
1048 # XXX: should be moved into an extra method
1049 binary_component_names = set()
1050 for binary in binaries:
1051 component = binary_component_func(binary)
1052 binary_component_names.add(component.component_name)
1053 source_component_name = None
1054 for guess in ('main', 'contrib', 'non-free'):
1055 if guess in binary_component_names:
1056 source_component_name = guess
1058 if source_component_name is None:
1059 raise Exception('Could not guess source component.')
1060 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1061 source_component_func = lambda source: source_component
1063 db_changes = self._install_changes()
1064 (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1065 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1068 self._install_byhand(policy_upload, f)
1070 self._do_bts_versiontracking()
1073 """commit changes"""
1074 self.transaction.commit()
1077 """rollback changes"""
1078 self.transaction.rollback()
1080 def __enter__(self):
1084 def __exit__(self, type, value, traceback):
1085 if self.directory is not None:
1086 shutil.rmtree(self.directory)
1087 self.directory = None
1089 self.transaction.rollback()