5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
58 from sqlalchemy import types as sqltypes
60 # Don't remove this, we re-export the exceptions to scripts which import us
61 from sqlalchemy.exc import *
62 from sqlalchemy.orm.exc import NoResultFound
64 # Only import Config until Queue stuff is changed to store its config
66 from config import Config
67 from textutils import fix_maintainer
68 from dak_exceptions import NoSourceFieldError
70 # suppress some deprecation warnings in squeeze related to sqlalchemy
72 warnings.filterwarnings('ignore', \
73 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
75 # TODO: sqlalchemy needs some extra configuration to correctly reflect
76 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
77 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
80 ################################################################################
82 # Patch in support for the debversion field type so that it works during
86 # that is for sqlalchemy 0.6
87 UserDefinedType = sqltypes.UserDefinedType
89 # this one for sqlalchemy 0.5
90 UserDefinedType = sqltypes.TypeEngine
92 class DebVersion(UserDefinedType):
93 def get_col_spec(self):
96 def bind_processor(self, dialect):
99 # ' = None' is needed for sqlalchemy 0.5:
100 def result_processor(self, dialect, coltype = None):
103 sa_major_version = sqlalchemy.__version__[0:3]
104 if sa_major_version in ["0.5", "0.6"]:
105 from sqlalchemy.databases import postgres
106 postgres.ischema_names['debversion'] = DebVersion
108 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
110 ################################################################################
112 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
114 ################################################################################
116 def session_wrapper(fn):
118 Wrapper around common ".., session=None):" handling. If the wrapped
119 function is called without passing 'session', we create a local one
120 and destroy it when the function ends.
122 Also attaches a commit_or_flush method to the session; if we created a
123 local session, this is a synonym for session.commit(), otherwise it is a
124 synonym for session.flush().
127 def wrapped(*args, **kwargs):
128 private_transaction = False
130 # Find the session object
131 session = kwargs.get('session')
134 if len(args) <= len(getargspec(fn)[0]) - 1:
135 # No session specified as last argument or in kwargs
136 private_transaction = True
137 session = kwargs['session'] = DBConn().session()
139 # Session is last argument in args
143 session = args[-1] = DBConn().session()
144 private_transaction = True
146 if private_transaction:
147 session.commit_or_flush = session.commit
149 session.commit_or_flush = session.flush
152 return fn(*args, **kwargs)
154 if private_transaction:
155 # We created a session; close it.
158 wrapped.__doc__ = fn.__doc__
159 wrapped.func_name = fn.func_name
163 __all__.append('session_wrapper')
165 ################################################################################
167 class ORMObject(object):
169 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
170 derived classes must implement the summary() method.
173 def properties(self):
175 This method should be implemented by all derived classes and returns a
176 list of the important properties. The properties 'created' and
177 'modified' will be added automatically. A suffix '_count' should be
178 added to properties that are lists or query objects. The most important
179 property name should be returned as the first element in the list
180 because it is used by repr().
186 Returns a JSON representation of the object based on the properties
187 returned from the properties() method.
190 # add created and modified
191 all_properties = self.properties() + ['created', 'modified']
192 for property in all_properties:
193 # check for list or query
194 if property[-6:] == '_count':
195 value = getattr(self, property[:-6])
196 if hasattr(value, '__len__'):
199 elif hasattr(value, 'count'):
201 value = value.count()
203 raise KeyError('Do not understand property %s.' % property)
206 value = getattr(self, property)
210 elif isinstance(value, ORMObject):
211 # use repr() for ORMObject types
214 # we want a string for all other types because json cannot
217 data[property] = value
218 return json.dumps(data)
222 Returns the name of the class.
224 return type(self).__name__
228 Returns a short string representation of the object using the first
229 element from the properties() method.
231 primary_property = self.properties()[0]
232 value = getattr(self, primary_property)
233 return '<%s %s>' % (self.classname(), str(value))
237 Returns a human readable form of the object using the properties()
240 return '<%s %s>' % (self.classname(), self.json())
242 __all__.append('ORMObject')
244 ################################################################################
246 class Architecture(ORMObject):
247 def __init__(self, arch_string = None, description = None):
248 self.arch_string = arch_string
249 self.description = description
251 def __eq__(self, val):
252 if isinstance(val, str):
253 return (self.arch_string== val)
254 # This signals to use the normal comparison operator
255 return NotImplemented
257 def __ne__(self, val):
258 if isinstance(val, str):
259 return (self.arch_string != val)
260 # This signals to use the normal comparison operator
261 return NotImplemented
263 def properties(self):
264 return ['arch_string', 'arch_id', 'suites_count']
266 __all__.append('Architecture')
269 def get_architecture(architecture, session=None):
271 Returns database id for given C{architecture}.
273 @type architecture: string
274 @param architecture: The name of the architecture
276 @type session: Session
277 @param session: Optional SQLA session object (a temporary one will be
278 generated if not supplied)
281 @return: Architecture object for the given arch (None if not present)
284 q = session.query(Architecture).filter_by(arch_string=architecture)
288 except NoResultFound:
291 __all__.append('get_architecture')
293 # TODO: should be removed because the implementation is too trivial
295 def get_architecture_suites(architecture, session=None):
297 Returns list of Suite objects for given C{architecture} name
299 @type architecture: str
300 @param architecture: Architecture name to search for
302 @type session: Session
303 @param session: Optional SQL session object (a temporary one will be
304 generated if not supplied)
307 @return: list of Suite objects for the given name (may be empty)
310 return get_architecture(architecture, session).suites
312 __all__.append('get_architecture_suites')
314 ################################################################################
316 class Archive(object):
317 def __init__(self, *args, **kwargs):
321 return '<Archive %s>' % self.archive_name
323 __all__.append('Archive')
326 def get_archive(archive, session=None):
328 returns database id for given C{archive}.
330 @type archive: string
331 @param archive: the name of the arhive
333 @type session: Session
334 @param session: Optional SQLA session object (a temporary one will be
335 generated if not supplied)
338 @return: Archive object for the given name (None if not present)
341 archive = archive.lower()
343 q = session.query(Archive).filter_by(archive_name=archive)
347 except NoResultFound:
350 __all__.append('get_archive')
352 ################################################################################
354 class BinAssociation(object):
355 def __init__(self, *args, **kwargs):
359 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
361 __all__.append('BinAssociation')
363 ################################################################################
365 class BinContents(object):
366 def __init__(self, *args, **kwargs):
370 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
372 __all__.append('BinContents')
374 ################################################################################
376 class DBBinary(object):
377 def __init__(self, *args, **kwargs):
381 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
383 __all__.append('DBBinary')
386 def get_suites_binary_in(package, session=None):
388 Returns list of Suite objects which given C{package} name is in
391 @param package: DBBinary package name to search for
394 @return: list of Suite objects for the given package
397 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
399 __all__.append('get_suites_binary_in')
402 def get_binary_from_id(binary_id, session=None):
404 Returns DBBinary object for given C{id}
407 @param binary_id: Id of the required binary
409 @type session: Session
410 @param session: Optional SQLA session object (a temporary one will be
411 generated if not supplied)
414 @return: DBBinary object for the given binary (None if not present)
417 q = session.query(DBBinary).filter_by(binary_id=binary_id)
421 except NoResultFound:
424 __all__.append('get_binary_from_id')
427 def get_binaries_from_name(package, version=None, architecture=None, session=None):
429 Returns list of DBBinary objects for given C{package} name
432 @param package: DBBinary package name to search for
434 @type version: str or None
435 @param version: Version to search for (or None)
437 @type architecture: str, list or None
438 @param architecture: Architectures to limit to (or None if no limit)
440 @type session: Session
441 @param session: Optional SQL session object (a temporary one will be
442 generated if not supplied)
445 @return: list of DBBinary objects for the given name (may be empty)
448 q = session.query(DBBinary).filter_by(package=package)
450 if version is not None:
451 q = q.filter_by(version=version)
453 if architecture is not None:
454 if not isinstance(architecture, list):
455 architecture = [architecture]
456 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
462 __all__.append('get_binaries_from_name')
465 def get_binaries_from_source_id(source_id, session=None):
467 Returns list of DBBinary objects for given C{source_id}
470 @param source_id: source_id to search for
472 @type session: Session
473 @param session: Optional SQL session object (a temporary one will be
474 generated if not supplied)
477 @return: list of DBBinary objects for the given name (may be empty)
480 return session.query(DBBinary).filter_by(source_id=source_id).all()
482 __all__.append('get_binaries_from_source_id')
485 def get_binary_from_name_suite(package, suitename, session=None):
486 ### For dak examine-package
487 ### XXX: Doesn't use object API yet
489 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
490 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
491 WHERE b.package='%(package)s'
493 AND fi.location = l.id
494 AND l.component = c.id
497 AND su.suite_name %(suitename)s
498 ORDER BY b.version DESC"""
500 return session.execute(sql % {'package': package, 'suitename': suitename})
502 __all__.append('get_binary_from_name_suite')
505 def get_binary_components(package, suitename, arch, session=None):
506 # Check for packages that have moved from one component to another
507 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
508 WHERE b.package=:package AND s.suite_name=:suitename
509 AND (a.arch_string = :arch OR a.arch_string = 'all')
510 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
511 AND f.location = l.id
512 AND l.component = c.id
515 vals = {'package': package, 'suitename': suitename, 'arch': arch}
517 return session.execute(query, vals)
519 __all__.append('get_binary_components')
521 ################################################################################
523 class BinaryACL(object):
524 def __init__(self, *args, **kwargs):
528 return '<BinaryACL %s>' % self.binary_acl_id
530 __all__.append('BinaryACL')
532 ################################################################################
534 class BinaryACLMap(object):
535 def __init__(self, *args, **kwargs):
539 return '<BinaryACLMap %s>' % self.binary_acl_map_id
541 __all__.append('BinaryACLMap')
543 ################################################################################
548 ArchiveDir "%(archivepath)s";
549 OverrideDir "%(overridedir)s";
550 CacheDir "%(cachedir)s";
555 Packages::Compress ". bzip2 gzip";
556 Sources::Compress ". bzip2 gzip";
561 bindirectory "incoming"
566 BinOverride "override.sid.all3";
567 BinCacheDB "packages-accepted.db";
569 FileList "%(filelist)s";
572 Packages::Extensions ".deb .udeb";
575 bindirectory "incoming/"
578 BinOverride "override.sid.all3";
579 SrcOverride "override.sid.all3.src";
580 FileList "%(filelist)s";
584 class BuildQueue(object):
585 def __init__(self, *args, **kwargs):
589 return '<BuildQueue %s>' % self.queue_name
591 def write_metadata(self, starttime, force=False):
592 # Do we write out metafiles?
593 if not (force or self.generate_metadata):
596 session = DBConn().session().object_session(self)
598 fl_fd = fl_name = ac_fd = ac_name = None
600 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
601 startdir = os.getcwd()
604 # Grab files we want to include
605 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
606 # Write file list with newer files
607 (fl_fd, fl_name) = mkstemp()
609 os.write(fl_fd, '%s\n' % n.fullpath)
614 # Write minimal apt.conf
615 # TODO: Remove hardcoding from template
616 (ac_fd, ac_name) = mkstemp()
617 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
619 'cachedir': cnf["Dir::Cache"],
620 'overridedir': cnf["Dir::Override"],
624 # Run apt-ftparchive generate
625 os.chdir(os.path.dirname(ac_name))
626 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
628 # Run apt-ftparchive release
629 # TODO: Eww - fix this
630 bname = os.path.basename(self.path)
634 # We have to remove the Release file otherwise it'll be included in the
637 os.unlink(os.path.join(bname, 'Release'))
641 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
643 # Crude hack with open and append, but this whole section is and should be redone.
644 if self.notautomatic:
645 release=open("Release", "a")
646 release.write("NotAutomatic: yes")
651 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
652 if cnf.has_key("Dinstall::SigningPubKeyring"):
653 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
655 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
657 # Move the files if we got this far
658 os.rename('Release', os.path.join(bname, 'Release'))
660 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
662 # Clean up any left behind files
689 def clean_and_update(self, starttime, Logger, dryrun=False):
690 """WARNING: This routine commits for you"""
691 session = DBConn().session().object_session(self)
693 if self.generate_metadata and not dryrun:
694 self.write_metadata(starttime)
696 # Grab files older than our execution time
697 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
703 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
705 Logger.log(["I: Removing %s from the queue" % o.fullpath])
706 os.unlink(o.fullpath)
709 # If it wasn't there, don't worry
710 if e.errno == ENOENT:
713 # TODO: Replace with proper logging call
714 Logger.log(["E: Could not remove %s" % o.fullpath])
721 for f in os.listdir(self.path):
722 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
726 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
727 except NoResultFound:
728 fp = os.path.join(self.path, f)
730 Logger.log(["I: Would remove unused link %s" % fp])
732 Logger.log(["I: Removing unused link %s" % fp])
736 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
738 def add_file_from_pool(self, poolfile):
739 """Copies a file into the pool. Assumes that the PoolFile object is
740 attached to the same SQLAlchemy session as the Queue object is.
742 The caller is responsible for committing after calling this function."""
743 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
745 # Check if we have a file of this name or this ID already
746 for f in self.queuefiles:
747 if f.fileid is not None and f.fileid == poolfile.file_id or \
748 f.poolfile.filename == poolfile_basename:
749 # In this case, update the BuildQueueFile entry so we
750 # don't remove it too early
751 f.lastused = datetime.now()
752 DBConn().session().object_session(poolfile).add(f)
755 # Prepare BuildQueueFile object
756 qf = BuildQueueFile()
757 qf.build_queue_id = self.queue_id
758 qf.lastused = datetime.now()
759 qf.filename = poolfile_basename
761 targetpath = poolfile.fullpath
762 queuepath = os.path.join(self.path, poolfile_basename)
766 # We need to copy instead of symlink
768 utils.copy(targetpath, queuepath)
769 # NULL in the fileid field implies a copy
772 os.symlink(targetpath, queuepath)
773 qf.fileid = poolfile.file_id
777 # Get the same session as the PoolFile is using and add the qf to it
778 DBConn().session().object_session(poolfile).add(qf)
783 __all__.append('BuildQueue')
786 def get_build_queue(queuename, session=None):
788 Returns BuildQueue object for given C{queue name}, creating it if it does not
791 @type queuename: string
792 @param queuename: The name of the queue
794 @type session: Session
795 @param session: Optional SQLA session object (a temporary one will be
796 generated if not supplied)
799 @return: BuildQueue object for the given queue
802 q = session.query(BuildQueue).filter_by(queue_name=queuename)
806 except NoResultFound:
809 __all__.append('get_build_queue')
811 ################################################################################
813 class BuildQueueFile(object):
814 def __init__(self, *args, **kwargs):
818 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
822 return os.path.join(self.buildqueue.path, self.filename)
825 __all__.append('BuildQueueFile')
827 ################################################################################
829 class ChangePendingBinary(object):
830 def __init__(self, *args, **kwargs):
834 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
836 __all__.append('ChangePendingBinary')
838 ################################################################################
840 class ChangePendingFile(object):
841 def __init__(self, *args, **kwargs):
845 return '<ChangePendingFile %s>' % self.change_pending_file_id
847 __all__.append('ChangePendingFile')
849 ################################################################################
851 class ChangePendingSource(object):
852 def __init__(self, *args, **kwargs):
856 return '<ChangePendingSource %s>' % self.change_pending_source_id
858 __all__.append('ChangePendingSource')
860 ################################################################################
862 class Component(object):
863 def __init__(self, *args, **kwargs):
866 def __eq__(self, val):
867 if isinstance(val, str):
868 return (self.component_name == val)
869 # This signals to use the normal comparison operator
870 return NotImplemented
872 def __ne__(self, val):
873 if isinstance(val, str):
874 return (self.component_name != val)
875 # This signals to use the normal comparison operator
876 return NotImplemented
879 return '<Component %s>' % self.component_name
882 __all__.append('Component')
885 def get_component(component, session=None):
887 Returns database id for given C{component}.
889 @type component: string
890 @param component: The name of the override type
893 @return: the database id for the given component
896 component = component.lower()
898 q = session.query(Component).filter_by(component_name=component)
902 except NoResultFound:
905 __all__.append('get_component')
907 ################################################################################
909 class DBConfig(object):
910 def __init__(self, *args, **kwargs):
914 return '<DBConfig %s>' % self.name
916 __all__.append('DBConfig')
918 ################################################################################
921 def get_or_set_contents_file_id(filename, session=None):
923 Returns database id for given filename.
925 If no matching file is found, a row is inserted.
927 @type filename: string
928 @param filename: The filename
929 @type session: SQLAlchemy
930 @param session: Optional SQL session object (a temporary one will be
931 generated if not supplied). If not passed, a commit will be performed at
932 the end of the function, otherwise the caller is responsible for commiting.
935 @return: the database id for the given component
938 q = session.query(ContentFilename).filter_by(filename=filename)
941 ret = q.one().cafilename_id
942 except NoResultFound:
943 cf = ContentFilename()
944 cf.filename = filename
946 session.commit_or_flush()
947 ret = cf.cafilename_id
951 __all__.append('get_or_set_contents_file_id')
954 def get_contents(suite, overridetype, section=None, session=None):
956 Returns contents for a suite / overridetype combination, limiting
957 to a section if not None.
960 @param suite: Suite object
962 @type overridetype: OverrideType
963 @param overridetype: OverrideType object
965 @type section: Section
966 @param section: Optional section object to limit results to
968 @type session: SQLAlchemy
969 @param session: Optional SQL session object (a temporary one will be
970 generated if not supplied)
973 @return: ResultsProxy object set up to return tuples of (filename, section,
977 # find me all of the contents for a given suite
978 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
982 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
983 JOIN content_file_names n ON (c.filename=n.id)
984 JOIN binaries b ON (b.id=c.binary_pkg)
985 JOIN override o ON (o.package=b.package)
986 JOIN section s ON (s.id=o.section)
987 WHERE o.suite = :suiteid AND o.type = :overridetypeid
988 AND b.type=:overridetypename"""
990 vals = {'suiteid': suite.suite_id,
991 'overridetypeid': overridetype.overridetype_id,
992 'overridetypename': overridetype.overridetype}
994 if section is not None:
995 contents_q += " AND s.id = :sectionid"
996 vals['sectionid'] = section.section_id
998 contents_q += " ORDER BY fn"
1000 return session.execute(contents_q, vals)
1002 __all__.append('get_contents')
1004 ################################################################################
1006 class ContentFilepath(object):
1007 def __init__(self, *args, **kwargs):
1011 return '<ContentFilepath %s>' % self.filepath
1013 __all__.append('ContentFilepath')
1016 def get_or_set_contents_path_id(filepath, session=None):
1018 Returns database id for given path.
1020 If no matching file is found, a row is inserted.
1022 @type filepath: string
1023 @param filepath: The filepath
1025 @type session: SQLAlchemy
1026 @param session: Optional SQL session object (a temporary one will be
1027 generated if not supplied). If not passed, a commit will be performed at
1028 the end of the function, otherwise the caller is responsible for commiting.
1031 @return: the database id for the given path
1034 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1037 ret = q.one().cafilepath_id
1038 except NoResultFound:
1039 cf = ContentFilepath()
1040 cf.filepath = filepath
1042 session.commit_or_flush()
1043 ret = cf.cafilepath_id
1047 __all__.append('get_or_set_contents_path_id')
1049 ################################################################################
1051 class ContentAssociation(object):
1052 def __init__(self, *args, **kwargs):
1056 return '<ContentAssociation %s>' % self.ca_id
1058 __all__.append('ContentAssociation')
1060 def insert_content_paths(binary_id, fullpaths, session=None):
1062 Make sure given path is associated with given binary id
1064 @type binary_id: int
1065 @param binary_id: the id of the binary
1066 @type fullpaths: list
1067 @param fullpaths: the list of paths of the file being associated with the binary
1068 @type session: SQLAlchemy session
1069 @param session: Optional SQLAlchemy session. If this is passed, the caller
1070 is responsible for ensuring a transaction has begun and committing the
1071 results or rolling back based on the result code. If not passed, a commit
1072 will be performed at the end of the function, otherwise the caller is
1073 responsible for commiting.
1075 @return: True upon success
1078 privatetrans = False
1080 session = DBConn().session()
1085 def generate_path_dicts():
1086 for fullpath in fullpaths:
1087 if fullpath.startswith( './' ):
1088 fullpath = fullpath[2:]
1090 yield {'filename':fullpath, 'id': binary_id }
1092 for d in generate_path_dicts():
1093 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1102 traceback.print_exc()
1104 # Only rollback if we set up the session ourself
1111 __all__.append('insert_content_paths')
1113 ################################################################################
1115 class DSCFile(object):
1116 def __init__(self, *args, **kwargs):
1120 return '<DSCFile %s>' % self.dscfile_id
1122 __all__.append('DSCFile')
1125 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1127 Returns a list of DSCFiles which may be empty
1129 @type dscfile_id: int (optional)
1130 @param dscfile_id: the dscfile_id of the DSCFiles to find
1132 @type source_id: int (optional)
1133 @param source_id: the source id related to the DSCFiles to find
1135 @type poolfile_id: int (optional)
1136 @param poolfile_id: the poolfile id related to the DSCFiles to find
1139 @return: Possibly empty list of DSCFiles
1142 q = session.query(DSCFile)
1144 if dscfile_id is not None:
1145 q = q.filter_by(dscfile_id=dscfile_id)
1147 if source_id is not None:
1148 q = q.filter_by(source_id=source_id)
1150 if poolfile_id is not None:
1151 q = q.filter_by(poolfile_id=poolfile_id)
1155 __all__.append('get_dscfiles')
1157 ################################################################################
1159 class PoolFile(ORMObject):
1160 def __init__(self, filename = None, location = None, filesize = -1, \
1162 self.filename = filename
1163 self.location = location
1164 self.filesize = filesize
1165 self.md5sum = md5sum
1169 return os.path.join(self.location.path, self.filename)
1171 def is_valid(self, filesize = -1, md5sum = None):\
1172 return self.filesize == filesize and self.md5sum == md5sum
1174 def properties(self):
1175 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1176 'sha256sum', 'location', 'source', 'last_used']
1178 __all__.append('PoolFile')
1181 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1184 (ValidFileFound [boolean], PoolFile object or None)
1186 @type filename: string
1187 @param filename: the filename of the file to check against the DB
1190 @param filesize: the size of the file to check against the DB
1192 @type md5sum: string
1193 @param md5sum: the md5sum of the file to check against the DB
1195 @type location_id: int
1196 @param location_id: the id of the location to look in
1199 @return: Tuple of length 2.
1200 - If valid pool file found: (C{True}, C{PoolFile object})
1201 - If valid pool file not found:
1202 - (C{False}, C{None}) if no file found
1203 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1206 poolfile = session.query(Location).get(location_id). \
1207 files.filter_by(filename=filename).first()
1209 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1212 return (valid, poolfile)
1214 __all__.append('check_poolfile')
1216 # TODO: the implementation can trivially be inlined at the place where the
1217 # function is called
1219 def get_poolfile_by_id(file_id, session=None):
1221 Returns a PoolFile objects or None for the given id
1224 @param file_id: the id of the file to look for
1226 @rtype: PoolFile or None
1227 @return: either the PoolFile object or None
1230 return session.query(PoolFile).get(file_id)
1232 __all__.append('get_poolfile_by_id')
1235 def get_poolfile_like_name(filename, session=None):
1237 Returns an array of PoolFile objects which are like the given name
1239 @type filename: string
1240 @param filename: the filename of the file to check against the DB
1243 @return: array of PoolFile objects
1246 # TODO: There must be a way of properly using bind parameters with %FOO%
1247 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1251 __all__.append('get_poolfile_like_name')
1254 def add_poolfile(filename, datadict, location_id, session=None):
1256 Add a new file to the pool
1258 @type filename: string
1259 @param filename: filename
1261 @type datadict: dict
1262 @param datadict: dict with needed data
1264 @type location_id: int
1265 @param location_id: database id of the location
1268 @return: the PoolFile object created
1270 poolfile = PoolFile()
1271 poolfile.filename = filename
1272 poolfile.filesize = datadict["size"]
1273 poolfile.md5sum = datadict["md5sum"]
1274 poolfile.sha1sum = datadict["sha1sum"]
1275 poolfile.sha256sum = datadict["sha256sum"]
1276 poolfile.location_id = location_id
1278 session.add(poolfile)
1279 # Flush to get a file id (NB: This is not a commit)
1284 __all__.append('add_poolfile')
1286 ################################################################################
1288 class Fingerprint(object):
1289 def __init__(self, fingerprint = None):
1290 self.fingerprint = fingerprint
1293 return '<Fingerprint %s>' % self.fingerprint
1295 __all__.append('Fingerprint')
1298 def get_fingerprint(fpr, session=None):
1300 Returns Fingerprint object for given fpr.
1303 @param fpr: The fpr to find / add
1305 @type session: SQLAlchemy
1306 @param session: Optional SQL session object (a temporary one will be
1307 generated if not supplied).
1310 @return: the Fingerprint object for the given fpr or None
1313 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1317 except NoResultFound:
1322 __all__.append('get_fingerprint')
1325 def get_or_set_fingerprint(fpr, session=None):
1327 Returns Fingerprint object for given fpr.
1329 If no matching fpr is found, a row is inserted.
1332 @param fpr: The fpr to find / add
1334 @type session: SQLAlchemy
1335 @param session: Optional SQL session object (a temporary one will be
1336 generated if not supplied). If not passed, a commit will be performed at
1337 the end of the function, otherwise the caller is responsible for commiting.
1338 A flush will be performed either way.
1341 @return: the Fingerprint object for the given fpr
1344 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1348 except NoResultFound:
1349 fingerprint = Fingerprint()
1350 fingerprint.fingerprint = fpr
1351 session.add(fingerprint)
1352 session.commit_or_flush()
1357 __all__.append('get_or_set_fingerprint')
1359 ################################################################################
1361 # Helper routine for Keyring class
1362 def get_ldap_name(entry):
1364 for k in ["cn", "mn", "sn"]:
1366 if ret and ret[0] != "" and ret[0] != "-":
1368 return " ".join(name)
1370 ################################################################################
1372 class Keyring(object):
1373 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1374 " --with-colons --fingerprint --fingerprint"
1379 def __init__(self, *args, **kwargs):
1383 return '<Keyring %s>' % self.keyring_name
1385 def de_escape_gpg_str(self, txt):
1386 esclist = re.split(r'(\\x..)', txt)
1387 for x in range(1,len(esclist),2):
1388 esclist[x] = "%c" % (int(esclist[x][2:],16))
1389 return "".join(esclist)
1391 def parse_address(self, uid):
1392 """parses uid and returns a tuple of real name and email address"""
1394 (name, address) = email.Utils.parseaddr(uid)
1395 name = re.sub(r"\s*[(].*[)]", "", name)
1396 name = self.de_escape_gpg_str(name)
1399 return (name, address)
1401 def load_keys(self, keyring):
1402 if not self.keyring_id:
1403 raise Exception('Must be initialized with database information')
1405 k = os.popen(self.gpg_invocation % keyring, "r")
1409 for line in k.xreadlines():
1410 field = line.split(":")
1411 if field[0] == "pub":
1414 (name, addr) = self.parse_address(field[9])
1416 self.keys[key]["email"] = addr
1417 self.keys[key]["name"] = name
1418 self.keys[key]["fingerprints"] = []
1420 elif key and field[0] == "sub" and len(field) >= 12:
1421 signingkey = ("s" in field[11])
1422 elif key and field[0] == "uid":
1423 (name, addr) = self.parse_address(field[9])
1424 if "email" not in self.keys[key] and "@" in addr:
1425 self.keys[key]["email"] = addr
1426 self.keys[key]["name"] = name
1427 elif signingkey and field[0] == "fpr":
1428 self.keys[key]["fingerprints"].append(field[9])
1429 self.fpr_lookup[field[9]] = key
1431 def import_users_from_ldap(self, session):
1435 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1436 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1438 l = ldap.open(LDAPServer)
1439 l.simple_bind_s("","")
1440 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1441 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1442 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1444 ldap_fin_uid_id = {}
1451 uid = entry["uid"][0]
1452 name = get_ldap_name(entry)
1453 fingerprints = entry["keyFingerPrint"]
1455 for f in fingerprints:
1456 key = self.fpr_lookup.get(f, None)
1457 if key not in self.keys:
1459 self.keys[key]["uid"] = uid
1463 keyid = get_or_set_uid(uid, session).uid_id
1464 byuid[keyid] = (uid, name)
1465 byname[uid] = (keyid, name)
1467 return (byname, byuid)
1469 def generate_users_from_keyring(self, format, session):
1473 for x in self.keys.keys():
1474 if "email" not in self.keys[x]:
1476 self.keys[x]["uid"] = format % "invalid-uid"
1478 uid = format % self.keys[x]["email"]
1479 keyid = get_or_set_uid(uid, session).uid_id
1480 byuid[keyid] = (uid, self.keys[x]["name"])
1481 byname[uid] = (keyid, self.keys[x]["name"])
1482 self.keys[x]["uid"] = uid
1485 uid = format % "invalid-uid"
1486 keyid = get_or_set_uid(uid, session).uid_id
1487 byuid[keyid] = (uid, "ungeneratable user id")
1488 byname[uid] = (keyid, "ungeneratable user id")
1490 return (byname, byuid)
1492 __all__.append('Keyring')
1495 def get_keyring(keyring, session=None):
1497 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1498 If C{keyring} already has an entry, simply return the existing Keyring
1500 @type keyring: string
1501 @param keyring: the keyring name
1504 @return: the Keyring object for this keyring
1507 q = session.query(Keyring).filter_by(keyring_name=keyring)
1511 except NoResultFound:
1514 __all__.append('get_keyring')
1516 ################################################################################
1518 class KeyringACLMap(object):
1519 def __init__(self, *args, **kwargs):
1523 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1525 __all__.append('KeyringACLMap')
1527 ################################################################################
1529 class DBChange(object):
1530 def __init__(self, *args, **kwargs):
1534 return '<DBChange %s>' % self.changesname
1536 def clean_from_queue(self):
1537 session = DBConn().session().object_session(self)
1539 # Remove changes_pool_files entries
1542 # Remove changes_pending_files references
1545 # Clear out of queue
1546 self.in_queue = None
1547 self.approved_for_id = None
1549 __all__.append('DBChange')
1552 def get_dbchange(filename, session=None):
1554 returns DBChange object for given C{filename}.
1556 @type filename: string
1557 @param filename: the name of the file
1559 @type session: Session
1560 @param session: Optional SQLA session object (a temporary one will be
1561 generated if not supplied)
1564 @return: DBChange object for the given filename (C{None} if not present)
1567 q = session.query(DBChange).filter_by(changesname=filename)
1571 except NoResultFound:
1574 __all__.append('get_dbchange')
1576 ################################################################################
1578 class Location(object):
1579 def __init__(self, path = None):
1581 # the column 'type' should go away, see comment at mapper
1582 self.archive_type = 'pool'
1585 return '<Location %s (%s)>' % (self.path, self.location_id)
1587 __all__.append('Location')
1590 def get_location(location, component=None, archive=None, session=None):
1592 Returns Location object for the given combination of location, component
1595 @type location: string
1596 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1598 @type component: string
1599 @param component: the component name (if None, no restriction applied)
1601 @type archive: string
1602 @param archive: the archive name (if None, no restriction applied)
1604 @rtype: Location / None
1605 @return: Either a Location object or None if one can't be found
1608 q = session.query(Location).filter_by(path=location)
1610 if archive is not None:
1611 q = q.join(Archive).filter_by(archive_name=archive)
1613 if component is not None:
1614 q = q.join(Component).filter_by(component_name=component)
1618 except NoResultFound:
1621 __all__.append('get_location')
1623 ################################################################################
1625 class Maintainer(object):
1626 def __init__(self, name = None):
1630 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1632 def get_split_maintainer(self):
1633 if not hasattr(self, 'name') or self.name is None:
1634 return ('', '', '', '')
1636 return fix_maintainer(self.name.strip())
1638 __all__.append('Maintainer')
1641 def get_or_set_maintainer(name, session=None):
1643 Returns Maintainer object for given maintainer name.
1645 If no matching maintainer name is found, a row is inserted.
1648 @param name: The maintainer name to add
1650 @type session: SQLAlchemy
1651 @param session: Optional SQL session object (a temporary one will be
1652 generated if not supplied). If not passed, a commit will be performed at
1653 the end of the function, otherwise the caller is responsible for commiting.
1654 A flush will be performed either way.
1657 @return: the Maintainer object for the given maintainer
1660 q = session.query(Maintainer).filter_by(name=name)
1663 except NoResultFound:
1664 maintainer = Maintainer()
1665 maintainer.name = name
1666 session.add(maintainer)
1667 session.commit_or_flush()
1672 __all__.append('get_or_set_maintainer')
1675 def get_maintainer(maintainer_id, session=None):
1677 Return the name of the maintainer behind C{maintainer_id} or None if that
1678 maintainer_id is invalid.
1680 @type maintainer_id: int
1681 @param maintainer_id: the id of the maintainer
1684 @return: the Maintainer with this C{maintainer_id}
1687 return session.query(Maintainer).get(maintainer_id)
1689 __all__.append('get_maintainer')
1691 ################################################################################
1693 class NewComment(object):
1694 def __init__(self, *args, **kwargs):
1698 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1700 __all__.append('NewComment')
1703 def has_new_comment(package, version, session=None):
1705 Returns true if the given combination of C{package}, C{version} has a comment.
1707 @type package: string
1708 @param package: name of the package
1710 @type version: string
1711 @param version: package version
1713 @type session: Session
1714 @param session: Optional SQLA session object (a temporary one will be
1715 generated if not supplied)
1721 q = session.query(NewComment)
1722 q = q.filter_by(package=package)
1723 q = q.filter_by(version=version)
1725 return bool(q.count() > 0)
1727 __all__.append('has_new_comment')
1730 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1732 Returns (possibly empty) list of NewComment objects for the given
1735 @type package: string (optional)
1736 @param package: name of the package
1738 @type version: string (optional)
1739 @param version: package version
1741 @type comment_id: int (optional)
1742 @param comment_id: An id of a comment
1744 @type session: Session
1745 @param session: Optional SQLA session object (a temporary one will be
1746 generated if not supplied)
1749 @return: A (possibly empty) list of NewComment objects will be returned
1752 q = session.query(NewComment)
1753 if package is not None: q = q.filter_by(package=package)
1754 if version is not None: q = q.filter_by(version=version)
1755 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1759 __all__.append('get_new_comments')
1761 ################################################################################
1763 class Override(object):
1764 def __init__(self, *args, **kwargs):
1768 return '<Override %s (%s)>' % (self.package, self.suite_id)
1770 __all__.append('Override')
1773 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1775 Returns Override object for the given parameters
1777 @type package: string
1778 @param package: The name of the package
1780 @type suite: string, list or None
1781 @param suite: The name of the suite (or suites if a list) to limit to. If
1782 None, don't limit. Defaults to None.
1784 @type component: string, list or None
1785 @param component: The name of the component (or components if a list) to
1786 limit to. If None, don't limit. Defaults to None.
1788 @type overridetype: string, list or None
1789 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1790 limit to. If None, don't limit. Defaults to None.
1792 @type session: Session
1793 @param session: Optional SQLA session object (a temporary one will be
1794 generated if not supplied)
1797 @return: A (possibly empty) list of Override objects will be returned
1800 q = session.query(Override)
1801 q = q.filter_by(package=package)
1803 if suite is not None:
1804 if not isinstance(suite, list): suite = [suite]
1805 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1807 if component is not None:
1808 if not isinstance(component, list): component = [component]
1809 q = q.join(Component).filter(Component.component_name.in_(component))
1811 if overridetype is not None:
1812 if not isinstance(overridetype, list): overridetype = [overridetype]
1813 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1817 __all__.append('get_override')
1820 ################################################################################
1822 class OverrideType(object):
1823 def __init__(self, *args, **kwargs):
1827 return '<OverrideType %s>' % self.overridetype
1829 __all__.append('OverrideType')
1832 def get_override_type(override_type, session=None):
1834 Returns OverrideType object for given C{override type}.
1836 @type override_type: string
1837 @param override_type: The name of the override type
1839 @type session: Session
1840 @param session: Optional SQLA session object (a temporary one will be
1841 generated if not supplied)
1844 @return: the database id for the given override type
1847 q = session.query(OverrideType).filter_by(overridetype=override_type)
1851 except NoResultFound:
1854 __all__.append('get_override_type')
1856 ################################################################################
1858 class DebContents(object):
1859 def __init__(self, *args, **kwargs):
1863 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1865 __all__.append('DebContents')
1868 class UdebContents(object):
1869 def __init__(self, *args, **kwargs):
1873 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1875 __all__.append('UdebContents')
1877 class PendingBinContents(object):
1878 def __init__(self, *args, **kwargs):
1882 return '<PendingBinContents %s>' % self.contents_id
1884 __all__.append('PendingBinContents')
1886 def insert_pending_content_paths(package,
1891 Make sure given paths are temporarily associated with given
1895 @param package: the package to associate with should have been read in from the binary control file
1896 @type fullpaths: list
1897 @param fullpaths: the list of paths of the file being associated with the binary
1898 @type session: SQLAlchemy session
1899 @param session: Optional SQLAlchemy session. If this is passed, the caller
1900 is responsible for ensuring a transaction has begun and committing the
1901 results or rolling back based on the result code. If not passed, a commit
1902 will be performed at the end of the function
1904 @return: True upon success, False if there is a problem
1907 privatetrans = False
1910 session = DBConn().session()
1914 arch = get_architecture(package['Architecture'], session)
1915 arch_id = arch.arch_id
1917 # Remove any already existing recorded files for this package
1918 q = session.query(PendingBinContents)
1919 q = q.filter_by(package=package['Package'])
1920 q = q.filter_by(version=package['Version'])
1921 q = q.filter_by(architecture=arch_id)
1924 for fullpath in fullpaths:
1926 if fullpath.startswith( "./" ):
1927 fullpath = fullpath[2:]
1929 pca = PendingBinContents()
1930 pca.package = package['Package']
1931 pca.version = package['Version']
1933 pca.architecture = arch_id
1936 pca.type = 8 # gross
1938 pca.type = 7 # also gross
1941 # Only commit if we set up the session ourself
1949 except Exception, e:
1950 traceback.print_exc()
1952 # Only rollback if we set up the session ourself
1959 __all__.append('insert_pending_content_paths')
1961 ################################################################################
1963 class PolicyQueue(object):
1964 def __init__(self, *args, **kwargs):
1968 return '<PolicyQueue %s>' % self.queue_name
1970 __all__.append('PolicyQueue')
1973 def get_policy_queue(queuename, session=None):
1975 Returns PolicyQueue object for given C{queue name}
1977 @type queuename: string
1978 @param queuename: The name of the queue
1980 @type session: Session
1981 @param session: Optional SQLA session object (a temporary one will be
1982 generated if not supplied)
1985 @return: PolicyQueue object for the given queue
1988 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1992 except NoResultFound:
1995 __all__.append('get_policy_queue')
1998 def get_policy_queue_from_path(pathname, session=None):
2000 Returns PolicyQueue object for given C{path name}
2002 @type queuename: string
2003 @param queuename: The path
2005 @type session: Session
2006 @param session: Optional SQLA session object (a temporary one will be
2007 generated if not supplied)
2010 @return: PolicyQueue object for the given queue
2013 q = session.query(PolicyQueue).filter_by(path=pathname)
2017 except NoResultFound:
2020 __all__.append('get_policy_queue_from_path')
2022 ################################################################################
2024 class Priority(object):
2025 def __init__(self, *args, **kwargs):
2028 def __eq__(self, val):
2029 if isinstance(val, str):
2030 return (self.priority == val)
2031 # This signals to use the normal comparison operator
2032 return NotImplemented
2034 def __ne__(self, val):
2035 if isinstance(val, str):
2036 return (self.priority != val)
2037 # This signals to use the normal comparison operator
2038 return NotImplemented
2041 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2043 __all__.append('Priority')
2046 def get_priority(priority, session=None):
2048 Returns Priority object for given C{priority name}.
2050 @type priority: string
2051 @param priority: The name of the priority
2053 @type session: Session
2054 @param session: Optional SQLA session object (a temporary one will be
2055 generated if not supplied)
2058 @return: Priority object for the given priority
2061 q = session.query(Priority).filter_by(priority=priority)
2065 except NoResultFound:
2068 __all__.append('get_priority')
2071 def get_priorities(session=None):
2073 Returns dictionary of priority names -> id mappings
2075 @type session: Session
2076 @param session: Optional SQL session object (a temporary one will be
2077 generated if not supplied)
2080 @return: dictionary of priority names -> id mappings
2084 q = session.query(Priority)
2086 ret[x.priority] = x.priority_id
2090 __all__.append('get_priorities')
2092 ################################################################################
2094 class Section(object):
2095 def __init__(self, *args, **kwargs):
2098 def __eq__(self, val):
2099 if isinstance(val, str):
2100 return (self.section == val)
2101 # This signals to use the normal comparison operator
2102 return NotImplemented
2104 def __ne__(self, val):
2105 if isinstance(val, str):
2106 return (self.section != val)
2107 # This signals to use the normal comparison operator
2108 return NotImplemented
2111 return '<Section %s>' % self.section
2113 __all__.append('Section')
2116 def get_section(section, session=None):
2118 Returns Section object for given C{section name}.
2120 @type section: string
2121 @param section: The name of the section
2123 @type session: Session
2124 @param session: Optional SQLA session object (a temporary one will be
2125 generated if not supplied)
2128 @return: Section object for the given section name
2131 q = session.query(Section).filter_by(section=section)
2135 except NoResultFound:
2138 __all__.append('get_section')
2141 def get_sections(session=None):
2143 Returns dictionary of section names -> id mappings
2145 @type session: Session
2146 @param session: Optional SQL session object (a temporary one will be
2147 generated if not supplied)
2150 @return: dictionary of section names -> id mappings
2154 q = session.query(Section)
2156 ret[x.section] = x.section_id
2160 __all__.append('get_sections')
2162 ################################################################################
2164 class DBSource(object):
2165 def __init__(self, source = None, version = None, maintainer = None, \
2166 changedby = None, poolfile = None, install_date = None):
2167 self.source = source
2168 self.version = version
2169 self.maintainer = maintainer
2170 self.changedby = changedby
2171 self.poolfile = poolfile
2172 self.install_date = install_date
2175 return '<DBSource %s (%s)>' % (self.source, self.version)
2177 __all__.append('DBSource')
2180 def source_exists(source, source_version, suites = ["any"], session=None):
2182 Ensure that source exists somewhere in the archive for the binary
2183 upload being processed.
2184 1. exact match => 1.0-3
2185 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2187 @type source: string
2188 @param source: source name
2190 @type source_version: string
2191 @param source_version: expected source version
2194 @param suites: list of suites to check in, default I{any}
2196 @type session: Session
2197 @param session: Optional SQLA session object (a temporary one will be
2198 generated if not supplied)
2201 @return: returns 1 if a source with expected version is found, otherwise 0
2208 from daklib.regexes import re_bin_only_nmu
2209 orig_source_version = re_bin_only_nmu.sub('', source_version)
2211 for suite in suites:
2212 q = session.query(DBSource).filter_by(source=source). \
2213 filter(DBSource.version.in_([source_version, orig_source_version]))
2215 # source must exist in suite X, or in some other suite that's
2216 # mapped to X, recursively... silent-maps are counted too,
2217 # unreleased-maps aren't.
2218 maps = cnf.ValueList("SuiteMappings")[:]
2220 maps = [ m.split() for m in maps ]
2221 maps = [ (x[1], x[2]) for x in maps
2222 if x[0] == "map" or x[0] == "silent-map" ]
2225 if x[1] in s and x[0] not in s:
2228 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2233 # No source found so return not ok
2238 __all__.append('source_exists')
2241 def get_suites_source_in(source, session=None):
2243 Returns list of Suite objects which given C{source} name is in
2246 @param source: DBSource package name to search for
2249 @return: list of Suite objects for the given source
2252 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2254 __all__.append('get_suites_source_in')
2257 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2259 Returns list of DBSource objects for given C{source} name and other parameters
2262 @param source: DBSource package name to search for
2264 @type version: str or None
2265 @param version: DBSource version name to search for or None if not applicable
2267 @type dm_upload_allowed: bool
2268 @param dm_upload_allowed: If None, no effect. If True or False, only
2269 return packages with that dm_upload_allowed setting
2271 @type session: Session
2272 @param session: Optional SQL session object (a temporary one will be
2273 generated if not supplied)
2276 @return: list of DBSource objects for the given name (may be empty)
2279 q = session.query(DBSource).filter_by(source=source)
2281 if version is not None:
2282 q = q.filter_by(version=version)
2284 if dm_upload_allowed is not None:
2285 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2289 __all__.append('get_sources_from_name')
2291 # FIXME: This function fails badly if it finds more than 1 source package and
2292 # its implementation is trivial enough to be inlined.
2294 def get_source_in_suite(source, suite, session=None):
2296 Returns a DBSource object for a combination of C{source} and C{suite}.
2298 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2299 - B{suite} - a suite name, eg. I{unstable}
2301 @type source: string
2302 @param source: source package name
2305 @param suite: the suite name
2308 @return: the version for I{source} in I{suite}
2312 q = get_suite(suite, session).get_sources(source)
2315 except NoResultFound:
2318 __all__.append('get_source_in_suite')
2320 ################################################################################
2323 def add_dsc_to_db(u, filename, session=None):
2324 entry = u.pkg.files[filename]
2328 source.source = u.pkg.dsc["source"]
2329 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2330 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2331 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2332 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2333 source.install_date = datetime.now().date()
2335 dsc_component = entry["component"]
2336 dsc_location_id = entry["location id"]
2338 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2340 # Set up a new poolfile if necessary
2341 if not entry.has_key("files id") or not entry["files id"]:
2342 filename = entry["pool name"] + filename
2343 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2345 pfs.append(poolfile)
2346 entry["files id"] = poolfile.file_id
2348 source.poolfile_id = entry["files id"]
2351 suite_names = u.pkg.changes["distribution"].keys()
2352 source.suites = session.query(Suite). \
2353 filter(Suite.suite_name.in_(suite_names)).all()
2355 # Add the source files to the DB (files and dsc_files)
2357 dscfile.source_id = source.source_id
2358 dscfile.poolfile_id = entry["files id"]
2359 session.add(dscfile)
2361 for dsc_file, dentry in u.pkg.dsc_files.items():
2363 df.source_id = source.source_id
2365 # If the .orig tarball is already in the pool, it's
2366 # files id is stored in dsc_files by check_dsc().
2367 files_id = dentry.get("files id", None)
2369 # Find the entry in the files hash
2370 # TODO: Bail out here properly
2372 for f, e in u.pkg.files.items():
2377 if files_id is None:
2378 filename = dfentry["pool name"] + dsc_file
2380 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2381 # FIXME: needs to check for -1/-2 and or handle exception
2382 if found and obj is not None:
2383 files_id = obj.file_id
2386 # If still not found, add it
2387 if files_id is None:
2388 # HACK: Force sha1sum etc into dentry
2389 dentry["sha1sum"] = dfentry["sha1sum"]
2390 dentry["sha256sum"] = dfentry["sha256sum"]
2391 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2392 pfs.append(poolfile)
2393 files_id = poolfile.file_id
2395 poolfile = get_poolfile_by_id(files_id, session)
2396 if poolfile is None:
2397 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2398 pfs.append(poolfile)
2400 df.poolfile_id = files_id
2403 # Add the src_uploaders to the DB
2404 uploader_ids = [source.maintainer_id]
2405 if u.pkg.dsc.has_key("uploaders"):
2406 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2408 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2411 for up_id in uploader_ids:
2412 if added_ids.has_key(up_id):
2414 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2420 su.maintainer_id = up_id
2421 su.source_id = source.source_id
2426 return source, dsc_component, dsc_location_id, pfs
2428 __all__.append('add_dsc_to_db')
2431 def add_deb_to_db(u, filename, session=None):
2433 Contrary to what you might expect, this routine deals with both
2434 debs and udebs. That info is in 'dbtype', whilst 'type' is
2435 'deb' for both of them
2438 entry = u.pkg.files[filename]
2441 bin.package = entry["package"]
2442 bin.version = entry["version"]
2443 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2444 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2445 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2446 bin.binarytype = entry["dbtype"]
2449 filename = entry["pool name"] + filename
2450 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2451 if not entry.get("location id", None):
2452 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2454 if entry.get("files id", None):
2455 poolfile = get_poolfile_by_id(bin.poolfile_id)
2456 bin.poolfile_id = entry["files id"]
2458 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2459 bin.poolfile_id = entry["files id"] = poolfile.file_id
2462 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2463 if len(bin_sources) != 1:
2464 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2465 (bin.package, bin.version, entry["architecture"],
2466 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2468 bin.source_id = bin_sources[0].source_id
2470 # Add and flush object so it has an ID
2474 # Add BinAssociations
2475 for suite_name in u.pkg.changes["distribution"].keys():
2476 ba = BinAssociation()
2477 ba.binary_id = bin.binary_id
2478 ba.suite_id = get_suite(suite_name).suite_id
2483 # Deal with contents - disabled for now
2484 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2486 # print "REJECT\nCould not determine contents of package %s" % bin.package
2487 # session.rollback()
2488 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2492 __all__.append('add_deb_to_db')
2494 ################################################################################
2496 class SourceACL(object):
2497 def __init__(self, *args, **kwargs):
2501 return '<SourceACL %s>' % self.source_acl_id
2503 __all__.append('SourceACL')
2505 ################################################################################
2507 class SrcFormat(object):
2508 def __init__(self, *args, **kwargs):
2512 return '<SrcFormat %s>' % (self.format_name)
2514 __all__.append('SrcFormat')
2516 ################################################################################
2518 class SrcUploader(object):
2519 def __init__(self, *args, **kwargs):
2523 return '<SrcUploader %s>' % self.uploader_id
2525 __all__.append('SrcUploader')
2527 ################################################################################
2529 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2530 ('SuiteID', 'suite_id'),
2531 ('Version', 'version'),
2532 ('Origin', 'origin'),
2534 ('Description', 'description'),
2535 ('Untouchable', 'untouchable'),
2536 ('Announce', 'announce'),
2537 ('Codename', 'codename'),
2538 ('OverrideCodename', 'overridecodename'),
2539 ('ValidTime', 'validtime'),
2540 ('Priority', 'priority'),
2541 ('NotAutomatic', 'notautomatic'),
2542 ('CopyChanges', 'copychanges'),
2543 ('OverrideSuite', 'overridesuite')]
2545 # Why the heck don't we have any UNIQUE constraints in table suite?
2546 # TODO: Add UNIQUE constraints for appropriate columns.
2547 class Suite(object):
2548 def __init__(self, suite_name = None, version = None):
2549 self.suite_name = suite_name
2550 self.version = version
2553 return '<Suite %s>' % self.suite_name
2555 def __eq__(self, val):
2556 if isinstance(val, str):
2557 return (self.suite_name == val)
2558 # This signals to use the normal comparison operator
2559 return NotImplemented
2561 def __ne__(self, val):
2562 if isinstance(val, str):
2563 return (self.suite_name != val)
2564 # This signals to use the normal comparison operator
2565 return NotImplemented
2569 for disp, field in SUITE_FIELDS:
2570 val = getattr(self, field, None)
2572 ret.append("%s: %s" % (disp, val))
2574 return "\n".join(ret)
2576 def get_architectures(self, skipsrc=False, skipall=False):
2578 Returns list of Architecture objects
2580 @type skipsrc: boolean
2581 @param skipsrc: Whether to skip returning the 'source' architecture entry
2584 @type skipall: boolean
2585 @param skipall: Whether to skip returning the 'all' architecture entry
2589 @return: list of Architecture objects for the given name (may be empty)
2592 q = object_session(self).query(Architecture).with_parent(self)
2594 q = q.filter(Architecture.arch_string != 'source')
2596 q = q.filter(Architecture.arch_string != 'all')
2597 return q.order_by(Architecture.arch_string).all()
2599 def get_sources(self, source):
2601 Returns a query object representing DBSource that is part of C{suite}.
2603 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2605 @type source: string
2606 @param source: source package name
2608 @rtype: sqlalchemy.orm.query.Query
2609 @return: a query of DBSource
2613 session = object_session(self)
2614 return session.query(DBSource).filter_by(source = source). \
2617 __all__.append('Suite')
2620 def get_suite(suite, session=None):
2622 Returns Suite object for given C{suite name}.
2625 @param suite: The name of the suite
2627 @type session: Session
2628 @param session: Optional SQLA session object (a temporary one will be
2629 generated if not supplied)
2632 @return: Suite object for the requested suite name (None if not present)
2635 q = session.query(Suite).filter_by(suite_name=suite)
2639 except NoResultFound:
2642 __all__.append('get_suite')
2644 ################################################################################
2646 # TODO: should be removed because the implementation is too trivial
2648 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2650 Returns list of Architecture objects for given C{suite} name
2653 @param suite: Suite name to search for
2655 @type skipsrc: boolean
2656 @param skipsrc: Whether to skip returning the 'source' architecture entry
2659 @type skipall: boolean
2660 @param skipall: Whether to skip returning the 'all' architecture entry
2663 @type session: Session
2664 @param session: Optional SQL session object (a temporary one will be
2665 generated if not supplied)
2668 @return: list of Architecture objects for the given name (may be empty)
2671 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2673 __all__.append('get_suite_architectures')
2675 ################################################################################
2677 class SuiteSrcFormat(object):
2678 def __init__(self, *args, **kwargs):
2682 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2684 __all__.append('SuiteSrcFormat')
2687 def get_suite_src_formats(suite, session=None):
2689 Returns list of allowed SrcFormat for C{suite}.
2692 @param suite: Suite name to search for
2694 @type session: Session
2695 @param session: Optional SQL session object (a temporary one will be
2696 generated if not supplied)
2699 @return: the list of allowed source formats for I{suite}
2702 q = session.query(SrcFormat)
2703 q = q.join(SuiteSrcFormat)
2704 q = q.join(Suite).filter_by(suite_name=suite)
2705 q = q.order_by('format_name')
2709 __all__.append('get_suite_src_formats')
2711 ################################################################################
2714 def __init__(self, uid = None, name = None):
2718 def __eq__(self, val):
2719 if isinstance(val, str):
2720 return (self.uid == val)
2721 # This signals to use the normal comparison operator
2722 return NotImplemented
2724 def __ne__(self, val):
2725 if isinstance(val, str):
2726 return (self.uid != val)
2727 # This signals to use the normal comparison operator
2728 return NotImplemented
2731 return '<Uid %s (%s)>' % (self.uid, self.name)
2733 __all__.append('Uid')
2736 def get_or_set_uid(uidname, session=None):
2738 Returns uid object for given uidname.
2740 If no matching uidname is found, a row is inserted.
2742 @type uidname: string
2743 @param uidname: The uid to add
2745 @type session: SQLAlchemy
2746 @param session: Optional SQL session object (a temporary one will be
2747 generated if not supplied). If not passed, a commit will be performed at
2748 the end of the function, otherwise the caller is responsible for commiting.
2751 @return: the uid object for the given uidname
2754 q = session.query(Uid).filter_by(uid=uidname)
2758 except NoResultFound:
2762 session.commit_or_flush()
2767 __all__.append('get_or_set_uid')
2770 def get_uid_from_fingerprint(fpr, session=None):
2771 q = session.query(Uid)
2772 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2776 except NoResultFound:
2779 __all__.append('get_uid_from_fingerprint')
2781 ################################################################################
2783 class UploadBlock(object):
2784 def __init__(self, *args, **kwargs):
2788 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2790 __all__.append('UploadBlock')
2792 ################################################################################
2794 class DBConn(object):
2796 database module init.
2800 def __init__(self, *args, **kwargs):
2801 self.__dict__ = self.__shared_state
2803 if not getattr(self, 'initialised', False):
2804 self.initialised = True
2805 self.debug = kwargs.has_key('debug')
2808 def __setuptables(self):
2809 tables_with_primary = (
2820 'changes_pending_binaries',
2821 'changes_pending_files',
2822 'changes_pending_source',
2832 'pending_bin_contents',
2844 # The following tables have primary keys but sqlalchemy
2845 # version 0.5 fails to reflect them correctly with database
2846 # versions before upgrade #41.
2848 #'build_queue_files',
2851 tables_no_primary = (
2853 'changes_pending_files_map',
2854 'changes_pending_source_files',
2855 'changes_pool_files',
2858 'suite_architectures',
2859 'suite_src_formats',
2860 'suite_build_queue_copy',
2862 # see the comment above
2864 'build_queue_files',
2868 'almost_obsolete_all_associations',
2869 'almost_obsolete_src_associations',
2870 'any_associations_source',
2871 'bin_assoc_by_arch',
2872 'bin_associations_binaries',
2873 'binaries_suite_arch',
2874 'binfiles_suite_component_arch',
2877 'newest_all_associations',
2878 'newest_any_associations',
2880 'newest_src_association',
2881 'obsolete_all_associations',
2882 'obsolete_any_associations',
2883 'obsolete_any_by_all_associations',
2884 'obsolete_src_associations',
2886 'src_associations_bin',
2887 'src_associations_src',
2888 'suite_arch_by_name',
2891 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2892 # correctly and that is why we have to use a workaround. It can
2893 # be removed as soon as we switch to version 0.6.
2894 for table_name in tables_with_primary:
2895 table = Table(table_name, self.db_meta, \
2896 Column('id', Integer, primary_key = True), \
2897 autoload=True, useexisting=True)
2898 setattr(self, 'tbl_%s' % table_name, table)
2900 for table_name in tables_no_primary:
2901 table = Table(table_name, self.db_meta, autoload=True)
2902 setattr(self, 'tbl_%s' % table_name, table)
2904 for view_name in views:
2905 view = Table(view_name, self.db_meta, autoload=True)
2906 setattr(self, 'view_%s' % view_name, view)
2908 def __setupmappers(self):
2909 mapper(Architecture, self.tbl_architecture,
2910 properties = dict(arch_id = self.tbl_architecture.c.id,
2911 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2912 order_by='suite_name',
2913 backref=backref('architectures', order_by='arch_string'))))
2915 mapper(Archive, self.tbl_archive,
2916 properties = dict(archive_id = self.tbl_archive.c.id,
2917 archive_name = self.tbl_archive.c.name))
2919 mapper(BinAssociation, self.tbl_bin_associations,
2920 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2921 suite_id = self.tbl_bin_associations.c.suite,
2922 suite = relation(Suite),
2923 binary_id = self.tbl_bin_associations.c.bin,
2924 binary = relation(DBBinary)))
2926 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2927 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2928 filename = self.tbl_pending_bin_contents.c.filename,
2929 package = self.tbl_pending_bin_contents.c.package,
2930 version = self.tbl_pending_bin_contents.c.version,
2931 arch = self.tbl_pending_bin_contents.c.arch,
2932 otype = self.tbl_pending_bin_contents.c.type))
2934 mapper(DebContents, self.tbl_deb_contents,
2935 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2936 package=self.tbl_deb_contents.c.package,
2937 suite=self.tbl_deb_contents.c.suite,
2938 arch=self.tbl_deb_contents.c.arch,
2939 section=self.tbl_deb_contents.c.section,
2940 filename=self.tbl_deb_contents.c.filename))
2942 mapper(UdebContents, self.tbl_udeb_contents,
2943 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2944 package=self.tbl_udeb_contents.c.package,
2945 suite=self.tbl_udeb_contents.c.suite,
2946 arch=self.tbl_udeb_contents.c.arch,
2947 section=self.tbl_udeb_contents.c.section,
2948 filename=self.tbl_udeb_contents.c.filename))
2950 mapper(BuildQueue, self.tbl_build_queue,
2951 properties = dict(queue_id = self.tbl_build_queue.c.id))
2953 mapper(BuildQueueFile, self.tbl_build_queue_files,
2954 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2955 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2957 mapper(DBBinary, self.tbl_binaries,
2958 properties = dict(binary_id = self.tbl_binaries.c.id,
2959 package = self.tbl_binaries.c.package,
2960 version = self.tbl_binaries.c.version,
2961 maintainer_id = self.tbl_binaries.c.maintainer,
2962 maintainer = relation(Maintainer),
2963 source_id = self.tbl_binaries.c.source,
2964 source = relation(DBSource),
2965 arch_id = self.tbl_binaries.c.architecture,
2966 architecture = relation(Architecture),
2967 poolfile_id = self.tbl_binaries.c.file,
2968 poolfile = relation(PoolFile),
2969 binarytype = self.tbl_binaries.c.type,
2970 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2971 fingerprint = relation(Fingerprint),
2972 install_date = self.tbl_binaries.c.install_date,
2973 binassociations = relation(BinAssociation,
2974 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2976 mapper(BinaryACL, self.tbl_binary_acl,
2977 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2979 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2980 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2981 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2982 architecture = relation(Architecture)))
2984 mapper(Component, self.tbl_component,
2985 properties = dict(component_id = self.tbl_component.c.id,
2986 component_name = self.tbl_component.c.name))
2988 mapper(DBConfig, self.tbl_config,
2989 properties = dict(config_id = self.tbl_config.c.id))
2991 mapper(DSCFile, self.tbl_dsc_files,
2992 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2993 source_id = self.tbl_dsc_files.c.source,
2994 source = relation(DBSource),
2995 poolfile_id = self.tbl_dsc_files.c.file,
2996 poolfile = relation(PoolFile)))
2998 mapper(PoolFile, self.tbl_files,
2999 properties = dict(file_id = self.tbl_files.c.id,
3000 filesize = self.tbl_files.c.size,
3001 location_id = self.tbl_files.c.location,
3002 location = relation(Location,
3003 # using lazy='dynamic' in the back
3004 # reference because we have A LOT of
3005 # files in one location
3006 backref=backref('files', lazy='dynamic'))))
3008 mapper(Fingerprint, self.tbl_fingerprint,
3009 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3010 uid_id = self.tbl_fingerprint.c.uid,
3011 uid = relation(Uid),
3012 keyring_id = self.tbl_fingerprint.c.keyring,
3013 keyring = relation(Keyring),
3014 source_acl = relation(SourceACL),
3015 binary_acl = relation(BinaryACL)))
3017 mapper(Keyring, self.tbl_keyrings,
3018 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3019 keyring_id = self.tbl_keyrings.c.id))
3021 mapper(DBChange, self.tbl_changes,
3022 properties = dict(change_id = self.tbl_changes.c.id,
3023 poolfiles = relation(PoolFile,
3024 secondary=self.tbl_changes_pool_files,
3025 backref="changeslinks"),
3026 seen = self.tbl_changes.c.seen,
3027 source = self.tbl_changes.c.source,
3028 binaries = self.tbl_changes.c.binaries,
3029 architecture = self.tbl_changes.c.architecture,
3030 distribution = self.tbl_changes.c.distribution,
3031 urgency = self.tbl_changes.c.urgency,
3032 maintainer = self.tbl_changes.c.maintainer,
3033 changedby = self.tbl_changes.c.changedby,
3034 date = self.tbl_changes.c.date,
3035 version = self.tbl_changes.c.version,
3036 files = relation(ChangePendingFile,
3037 secondary=self.tbl_changes_pending_files_map,
3038 backref="changesfile"),
3039 in_queue_id = self.tbl_changes.c.in_queue,
3040 in_queue = relation(PolicyQueue,
3041 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3042 approved_for_id = self.tbl_changes.c.approved_for))
3044 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3045 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3047 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3048 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3049 filename = self.tbl_changes_pending_files.c.filename,
3050 size = self.tbl_changes_pending_files.c.size,
3051 md5sum = self.tbl_changes_pending_files.c.md5sum,
3052 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3053 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3055 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3056 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3057 change = relation(DBChange),
3058 maintainer = relation(Maintainer,
3059 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3060 changedby = relation(Maintainer,
3061 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3062 fingerprint = relation(Fingerprint),
3063 source_files = relation(ChangePendingFile,
3064 secondary=self.tbl_changes_pending_source_files,
3065 backref="pending_sources")))
3068 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3069 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3070 keyring = relation(Keyring, backref="keyring_acl_map"),
3071 architecture = relation(Architecture)))
3073 mapper(Location, self.tbl_location,
3074 properties = dict(location_id = self.tbl_location.c.id,
3075 component_id = self.tbl_location.c.component,
3076 component = relation(Component),
3077 archive_id = self.tbl_location.c.archive,
3078 archive = relation(Archive),
3079 # FIXME: the 'type' column is old cruft and
3080 # should be removed in the future.
3081 archive_type = self.tbl_location.c.type))
3083 mapper(Maintainer, self.tbl_maintainer,
3084 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3085 maintains_sources = relation(DBSource, backref='maintainer',
3086 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3087 changed_sources = relation(DBSource, backref='changedby',
3088 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3090 mapper(NewComment, self.tbl_new_comments,
3091 properties = dict(comment_id = self.tbl_new_comments.c.id))
3093 mapper(Override, self.tbl_override,
3094 properties = dict(suite_id = self.tbl_override.c.suite,
3095 suite = relation(Suite),
3096 package = self.tbl_override.c.package,
3097 component_id = self.tbl_override.c.component,
3098 component = relation(Component),
3099 priority_id = self.tbl_override.c.priority,
3100 priority = relation(Priority),
3101 section_id = self.tbl_override.c.section,
3102 section = relation(Section),
3103 overridetype_id = self.tbl_override.c.type,
3104 overridetype = relation(OverrideType)))
3106 mapper(OverrideType, self.tbl_override_type,
3107 properties = dict(overridetype = self.tbl_override_type.c.type,
3108 overridetype_id = self.tbl_override_type.c.id))
3110 mapper(PolicyQueue, self.tbl_policy_queue,
3111 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3113 mapper(Priority, self.tbl_priority,
3114 properties = dict(priority_id = self.tbl_priority.c.id))
3116 mapper(Section, self.tbl_section,
3117 properties = dict(section_id = self.tbl_section.c.id,
3118 section=self.tbl_section.c.section))
3120 mapper(DBSource, self.tbl_source,
3121 properties = dict(source_id = self.tbl_source.c.id,
3122 version = self.tbl_source.c.version,
3123 maintainer_id = self.tbl_source.c.maintainer,
3124 poolfile_id = self.tbl_source.c.file,
3125 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3126 fingerprint_id = self.tbl_source.c.sig_fpr,
3127 fingerprint = relation(Fingerprint),
3128 changedby_id = self.tbl_source.c.changedby,
3129 srcfiles = relation(DSCFile,
3130 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3131 suites = relation(Suite, secondary=self.tbl_src_associations,
3133 srcuploaders = relation(SrcUploader)))
3135 mapper(SourceACL, self.tbl_source_acl,
3136 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3138 mapper(SrcFormat, self.tbl_src_format,
3139 properties = dict(src_format_id = self.tbl_src_format.c.id,
3140 format_name = self.tbl_src_format.c.format_name))
3142 mapper(SrcUploader, self.tbl_src_uploaders,
3143 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3144 source_id = self.tbl_src_uploaders.c.source,
3145 source = relation(DBSource,
3146 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3147 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3148 maintainer = relation(Maintainer,
3149 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3151 mapper(Suite, self.tbl_suite,
3152 properties = dict(suite_id = self.tbl_suite.c.id,
3153 policy_queue = relation(PolicyQueue),
3154 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3156 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3157 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3158 suite = relation(Suite, backref='suitesrcformats'),
3159 src_format_id = self.tbl_suite_src_formats.c.src_format,
3160 src_format = relation(SrcFormat)))
3162 mapper(Uid, self.tbl_uid,
3163 properties = dict(uid_id = self.tbl_uid.c.id,
3164 fingerprint = relation(Fingerprint)))
3166 mapper(UploadBlock, self.tbl_upload_blocks,
3167 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3168 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3169 uid = relation(Uid, backref="uploadblocks")))
3171 ## Connection functions
3172 def __createconn(self):
3173 from config import Config
3177 connstr = "postgres://%s" % cnf["DB::Host"]
3178 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3179 connstr += ":%s" % cnf["DB::Port"]
3180 connstr += "/%s" % cnf["DB::Name"]
3183 connstr = "postgres:///%s" % cnf["DB::Name"]
3184 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3185 connstr += "?port=%s" % cnf["DB::Port"]
3187 self.db_pg = create_engine(connstr, echo=self.debug)
3188 self.db_meta = MetaData()
3189 self.db_meta.bind = self.db_pg
3190 self.db_smaker = sessionmaker(bind=self.db_pg,
3194 self.__setuptables()
3195 self.__setupmappers()
3198 return self.db_smaker()
3200 __all__.append('DBConn')