1 # Copyright (C) 2015, Ansgar Burchardt <ansgar@debian.org>
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17 import daklib.compress
30 from daklib.dbconn import DBSource, PoolFile
31 from sqlalchemy.orm import object_session
33 # Hmm, maybe use APT directly for all of this?
35 _release_hashes_fields = ('MD5Sum', 'SHA1', 'SHA256')
37 class Release(object):
38 def __init__(self, base, suite_name, data):
40 self._suite_name = suite_name
41 self._dict = apt_pkg.TagSection(data)
42 self._hashes = daklib.upload.parse_file_list(self._dict, False, daklib.regexes.re_file_safe_slash, _release_hashes_fields)
43 def architectures(self):
44 return self._dict['Architectures'].split()
46 return self._dict['Components'].split()
47 def packages(self, component, architecture):
48 fn = '{0}/binary-{1}/Packages'.format(component, architecture)
49 tmp = obtain_release_file(self, fn)
50 return apt_pkg.TagFile(tmp.fh())
51 def sources(self, component):
52 fn = '{0}/source/Sources'.format(component)
53 tmp = obtain_release_file(self, fn)
54 return apt_pkg.TagFile(tmp.fh())
56 return self._dict['Suite']
58 return self._dict['Codename']
59 # TODO: Handle Date/Valid-Until to make sure we import
60 # a newer version than before
64 config = daklib.config.Config()
65 self._tmp = tempfile.NamedTemporaryFile(dir=config['Dir::TempPath'])
70 return apt_pkg.Hashes(self.fh())
72 def obtain_file(base, path):
73 """Obtain a file 'path' located below 'base'
75 Returns: daklib.import_repository.File
77 Note: return type can still change
79 fn = '{0}/{1}'.format(base, path)
81 if fn.startswith('http://'):
82 fh = urllib2.urlopen(fn, timeout=300)
83 shutil.copyfileobj(fh, tmp._tmp)
86 with open(fn, 'r') as fh:
87 shutil.copyfileobj(fh, tmp._tmp)
90 def obtain_release(base, suite_name, keyring, fingerprint=None):
91 """Obtain release information
93 Returns: daklib.import_repository.Release
95 tmp = obtain_file(base, 'dists/{0}/InRelease'.format(suite_name))
96 data = tmp.fh().read()
97 f = daklib.gpg.SignedFile(data, [keyring])
98 r = Release(base, suite_name, f.contents)
99 if r.suite() != suite_name and r.codename() != suite_name:
100 raise Exception("Suite {0} doesn't match suite or codename from Release file.".format(suite_name))
103 _compressions = ('.xz', '.gz', '.bz2')
105 def obtain_release_file(release, filename):
106 """Obtain file referenced from Release
108 A compressed version is automatically selected and decompressed if it exists.
110 Returns: daklib.import_repository.File
112 if filename not in release._hashes:
113 raise IOError("File {0} not referenced in Release".format(filename))
116 for ext in _compressions:
117 compressed_file = filename + ext
118 if compressed_file in release._hashes:
120 filename = compressed_file
123 # Obtain file and check hashes
124 tmp = obtain_file(release._base, 'dists/{0}/{1}'.format(release._suite_name, filename))
125 hashedfile = release._hashes[filename]
126 hashedfile.check_fh(tmp.fh())
130 daklib.compress.decompress(tmp.fh(), tmp2.fh(), filename)
135 def import_source_to_archive(base, entry, transaction, archive, component):
136 """Import source package described by 'entry' into the given 'archive' and 'component'
138 'entry' needs to be a dict-like object with at least the following
139 keys as used in a Sources index: Directory, Files, Checksums-Sha1,
142 Return: daklib.dbconn.DBSource
145 # Obtain and verify files
146 if not daklib.regexes.re_file_safe_slash.match(entry['Directory']):
147 raise Exception("Unsafe path in Directory field")
148 hashed_files = daklib.upload.parse_file_list(entry, False)
150 for f in hashed_files.values():
151 path = os.path.join(entry['Directory'], f.filename)
152 tmp = obtain_file(base, path)
155 directory, f.input_filename = os.path.split(tmp.fh().name)
157 # Inject files into archive
158 source = daklib.upload.Source(directory, hashed_files.values(), [], require_signature=False)
160 for f in hashed_files.keys():
161 if f.endswith('.dsc'):
163 source.files[f].input_filename = hashed_files[f].input_filename
165 # TODO: allow changed_by to be NULL
166 changed_by = source.dsc['Maintainer']
167 db_changed_by = daklib.dbconn.get_or_set_maintainer(changed_by, transaction.session)
168 db_source = transaction.install_source_to_archive(directory, source, archive, component, db_changed_by)
172 def import_package_to_suite(base, entry, transaction, suite, component):
173 """Import binary package described by 'entry' into the given 'suite' and 'component'
175 'entry' needs to be a dict-like object with at least the following
176 keys as used in a Packages index: Filename, Size, MD5sum, SHA1,
179 Returns: daklib.dbconn.DBBinary
181 # Obtain and verify file
182 filename = entry['Filename']
183 tmp = obtain_file(base, filename)
184 directory, fn = os.path.split(tmp.fh().name)
185 hashedfile = daklib.upload.HashedFile(os.path.basename(filename), long(entry['Size']), entry['MD5sum'], entry['SHA1'], entry['SHA256'], input_filename=fn)
186 hashedfile.check_fh(tmp.fh())
188 # Inject file into archive
189 binary = daklib.upload.Binary(directory, hashedfile)
190 db_binary = transaction.install_binary(directory, binary, suite, component)
195 def import_source_to_suite(base, entry, transaction, suite, component):
196 """Import source package described by 'entry' into the given 'suite' and 'component'
198 'entry' needs to be a dict-like object with at least the following
199 keys as used in a Sources index: Directory, Files, Checksums-Sha1,
202 Returns: daklib.dbconn.DBBinary
204 source = import_source_to_archive(base, entry, transaction, suite.archive, component)
205 source.suites.append(suite)
208 def source_in_archive(source, version, archive, component=None):
209 """Check that source package 'source' with version 'version' exists in 'archive',
210 with an optional check for the given component 'component'.
214 @type archive: daklib.dbconn.Archive
215 @type component: daklib.dbconn.Component or None
218 Note: This should probably be moved somewhere else
220 session = object_session(archive)
221 query = session.query(DBSource).filter_by(source=source, version=version) \
222 .join(DBSource.poolfile).join(PoolFile.archives).filter_by(archive=archive)
223 if component is not None:
224 query = query.filter_by(component=component)
225 return session.query(query.exists()).scalar()