2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 function remove_locks {
24 trap - EXIT TERM HUP INT QUIT
25 ts "locked part finished"
28 function lockaccepted {
29 lockfile "$LOCK_ACCEPTED"
30 trap remove_all_locks EXIT TERM HUP INT QUIT
33 # If we error out this one is called, *FOLLOWED* by cleanup above
35 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
37 subject="ATTENTION ATTENTION!"
38 if [ "${error}" = "false" ]; then
39 subject="${subject} (continued)"
41 subject="${subject} (interrupted)"
43 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
45 if [ -r "${STAGEFILE}.log" ]; then
46 cat "${STAGEFILE}.log"
48 echo "file ${STAGEFILE}.log does not exist, sorry"
49 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
52 ########################################################################
53 # the actual dinstall functions follow #
54 ########################################################################
56 # pushing merkels QA user, part one
58 log "Telling QA user that we start dinstall"
59 ssh -n -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
62 # Updating various files
64 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
66 $scriptsdir/update-bugdoctxt
67 $scriptsdir/update-mirrorlists
68 $scriptsdir/update-mailingliststxt
69 $scriptsdir/update-pseudopackages.sh
72 # The first i18n one, syncing new descriptions
74 log "Synchronizing i18n package descriptions"
75 # First sync their newest data
76 cd ${scriptdir}/i18nsync
77 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
79 # Now check if we still know about the packages for which they created the files
80 # is the timestamp signed by us?
81 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
82 # now read it. As its signed by us we are sure the content is what we expect, no need
83 # to do more here. And we only test -d a directory on it anyway.
84 TSTAMP=$(cat timestamp)
85 # do we have the dir still?
86 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
88 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
89 # Yay, worked, lets copy around
90 for dir in stretch sid; do
91 if [ -d dists/${dir}/ ]; then
92 cd dists/${dir}/main/i18n
93 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
95 cd ${scriptdir}/i18nsync
98 echo "ARRRR, bad guys, wrong files, ARRR"
99 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
102 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
103 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
106 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
107 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
111 # Syncing AppStream/DEP-11 data
113 log "Synchronizing AppStream metadata"
114 # First sync their newest data
115 mkdir -p ${scriptdir}/dep11
116 cd ${scriptdir}/dep11
117 rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
120 if ${scriptsdir}/dep11-basic-validate.py . ${scriptdir}/dep11/; then
121 # Yay, worked, lets copy around
122 for dir in stretch sid; do
123 if [ -d ${dir}/ ]; then
124 for comp in main contrib non-free; do
125 mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
127 rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
128 cd ${scriptdir}/dep11
133 echo "ARRRR, bad guys, wrong files, ARRR"
134 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
139 log "Checking for cruft in overrides"
143 function dominate() {
144 log "Removing obsolete source and binary associations"
146 dak manage-debug-suites unstable-debug experimental-debug
149 function autocruft() {
150 log "Check for obsolete binary packages"
151 dak auto-decruft -s unstable
152 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
155 function fingerprints() {
156 log "Updating fingerprints"
157 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
160 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
162 if [ -s "${OUTFILE}" ]; then
163 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
164 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
165 To: <debian-project@lists.debian.org>
166 Subject: Debian Maintainers Keyring changes
167 Content-Type: text/plain; charset=utf-8
171 The following changes to the debian-maintainers keyring have just been activated:
175 Debian distribution maintenance software,
176 on behalf of the Keyring maintainers
183 function overrides() {
184 log "Writing overrides into text files"
192 log "Generating package / file mapping"
193 for archive in "${public_archives[@]}"; do
194 archiveroot="$(get_archiveroot "${archive}")"
195 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
199 function packages() {
200 log "Generating Packages and Sources files"
201 for archive in "${public_archives[@]}"; do
202 log " Generating Packages/Sources for ${archive}"
203 dak generate-packages-sources2 -a "${archive}"
204 log " Generating Contents for ${archive}"
205 dak contents generate -a "${archive}"
210 log "Generating pdiff files"
211 dak generate-index-diffs
215 log "Generating Release files"
216 for archive in "${public_archives[@]}"; do
217 dak generate-releases -a "${archive}"
221 function dakcleanup() {
222 log "Cleanup old packages/files"
223 dak clean-suites -m 10000
224 dak clean-queues -i "$unchecked"
231 for archive in "${public_archives[@]}"; do
232 archiveroot="$(get_archiveroot "${archive}")"
235 log "Removing any core files ..."
236 find -type f -name core -print -delete
238 log "Checking symlinks ..."
241 log "Creating recursive directory listing ... "
243 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
247 function mkmaintainers() {
251 log 'Creating Maintainers index ... '
253 for archive in "${public_archives[@]}"; do
254 archiveroot="$(get_archiveroot "${archive}")"
255 indices="${archiveroot}/indices"
256 if ! [ -d "${indices}" ]; then
261 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
262 gzip -9v --rsyncable <Maintainers >Maintainers.gz
263 gzip -9v --rsyncable <Uploaders >Uploaders.gz
267 function copyoverrides() {
268 log 'Copying override files into public view ...'
272 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
274 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
275 chmod g+w ${indices}/${bname}.gz
280 function mkfilesindices() {
283 cd $base/ftp/indices/files/components
287 log "Querying postgres"
289 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
291 JOIN files_archive_map af ON f.id = af.file_id
292 JOIN component c ON af.component_id = c.id
293 JOIN archive ON af.archive_id = archive.id
296 JOIN architecture a ON b.architecture = a.id)
298 WHERE archive.name = 'ftp-master'
299 ORDER BY path, arch_string
301 psql -At -c "$query" >$ARCHLIST
304 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
307 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
310 log "Generating sources list"
312 sed -n 's/|$//p' $ARCHLIST
314 find ./dists -maxdepth 1 \! -type d
315 find ./dists \! -type d | grep "/source/"
316 ) | sort -u | gzip -9 > source.list.gz
318 log "Generating arch lists"
320 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
322 (sed -n "s/|$a$//p" $ARCHLIST
323 sed -n 's/|all$//p' $ARCHLIST
326 find ./dists -maxdepth 1 \! -type d
327 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
328 ) | sort -u | gzip -9 > arch-$a.list.gz
331 log "Generating suite lists"
334 local suite_id="$(printf %d $1)"
337 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
339 (SELECT sa.source AS source
340 FROM src_associations sa
341 WHERE sa.suite = $suite_id
344 FROM extra_src_references esr
345 JOIN bin_associations ba ON esr.bin_id = ba.bin
346 WHERE ba.suite = $suite_id
348 SELECT b.source AS source
349 FROM bin_associations ba
350 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
351 JOIN dsc_files df ON s.source = df.source
352 JOIN files f ON df.file = f.id
353 JOIN files_archive_map af ON f.id = af.file_id
354 JOIN component c ON af.component_id = c.id
355 JOIN archive ON af.archive_id = archive.id
356 WHERE archive.name = 'ftp-master'
358 psql -F' ' -A -t -c "$query"
361 SELECT './pool/' || c.name || '/' || f.filename
362 FROM bin_associations ba
363 JOIN binaries b ON ba.bin = b.id
364 JOIN files f ON b.file = f.id
365 JOIN files_archive_map af ON f.id = af.file_id
366 JOIN component c ON af.component_id = c.id
367 JOIN archive ON af.archive_id = archive.id
368 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
370 psql -F' ' -A -t -c "$query"
373 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
374 while read id suite; do
375 [ -e $base/ftp/dists/$suite ] || continue
378 distname=$(cd dists; readlink $suite || echo $suite)
379 find ./dists/$distname \! -type d
380 for distdir in ./dists/*; do
381 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
385 ) | sort -u | gzip -9 > suite-${suite}.list.gz
388 log "Finding everything on the ftp site to generate sundries"
389 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
392 zcat *.list.gz | cat - *.list | sort -u |
393 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
395 log "Generating files list"
398 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
399 cat - sundries.list dists.list project.list docs.list indices.list |
400 sort -u | poolfirst > ../arch-$a.files
404 for dist in sid jessie stretch; do
405 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
409 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
410 sort -u | poolfirst > ../typical.files
417 function mkchecksums() {
418 local archiveroot dsynclist md5list
420 for archive in "${public_archives[@]}"; do
421 archiveroot="$(get_archiveroot "${archive}")"
422 dsynclist=$dbdir/dsync.${archive}.list
423 md5list=${archiveroot}/indices/md5sums
425 log -n "Creating md5 / dsync index file for ${archive}... "
428 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
429 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
430 ${bindir}/dsync-flist -q link-dups $dsynclist || true
435 local archiveroot mirrordir targetpath TRACEFILE
437 for archive in "${public_archives[@]}"; do
438 archiveroot="$(get_archiveroot "${archive}")"
439 mirrordir="${archiveroot}/../mirror"
440 targetpath="${mirrordir}/${archive}"
441 TRACEFILE="${archiveroot}/project/trace/ftp-master.debian.org"
442 mkdir -p "${archiveroot}/project/trace/"
444 log "Regenerating \"public\" mirror/${archive} hardlink fun"
445 DATE_SERIAL=$(date +"%Y%m%d01")
446 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
447 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
448 SERIAL="$DATE_SERIAL"
450 SERIAL="$FILESOAPLUS1"
452 date -u > ${TRACEFILE}
453 echo "Using dak v1" >> ${TRACEFILE}
454 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
455 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
457 mkdir -p ${targetpath}
459 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
464 log "Expiring old database dumps..."
466 $scriptsdir/expire_dumps -d . -p -f "dump_*"
469 function transitionsclean() {
470 log "Removing out of date transitions..."
472 dak transitions -c -a
476 log "Updating DM permissions page"
477 dak acl export-per-source dm >$exportdir/dm.txt
481 log "Categorizing uncategorized bugs filed against ftp.debian.org"
482 sudo -u dak-unpriv dak bts-categorize
485 function ddaccess() {
486 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
487 log "Trigger dd accessible parts sync including ftp dir"
488 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
491 function mirrorpush() {
492 log "Checking the public archive copies..."
494 local archiveroot mirrordir targetpath
496 for archive in "${public_archives[@]}"; do
497 log "... archive: ${archive}"
498 archiveroot="$(get_archiveroot "${archive}")"
499 mirrordir="${archiveroot}/../mirror"
500 targetpath="${mirrordir}/${archive}"
501 cd ${targetpath}/dists
504 for release in $(find . -name "InRelease"); do
505 echo "Processing: ${release}"
506 subdir=${release%/InRelease}
507 while read SHASUM SIZE NAME; do
508 if ! [ -f "${subdir}/${NAME}" ]; then
509 bname=$(basename ${NAME})
510 if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-(amd64|i386)\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
512 # We don't keep unpacked files, don't check for their existance.
513 # We might want to go and check their unpacked shasum, but right now
514 # I don't care. I believe it should be enough if all the packed shasums
518 broken=$(( broken + 1 ))
519 echo "File ${subdir}/${NAME} is missing"
523 # We do have symlinks in the tree (see the contents files currently).
524 # So we use "readlink -f" to check the size of the target, as thats basically
525 # what gen-releases does
526 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
527 if [ ${fsize} -ne ${SIZE} ]; then
528 broken=$(( broken + 1 ))
529 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
533 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
534 fshasum=${fshasum%% *}
535 if [ "${fshasum}" != "${SHASUM}" ]; then
536 broken=$(( broken + 1 ))
537 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
540 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
543 if [ $broken -gt 0 ]; then
544 log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
547 log "Starting the mirrorpush for ${archive}"
557 pusharg="-a backports"
560 fname="mirrorstart.${archive}"
563 date -u > /srv/ftp.debian.org/web/${fname}
564 echo "Using dak v1" >> /srv/ftp.debian.org/web/${fname}
565 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/${fname}
566 sudo -H -u archvsync /home/archvsync/runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
571 function mirrorpush-backports() {
572 log "Syncing backports mirror"
573 sudo -u backports /home/backports/bin/update-archive
577 log "Exporting package data foo for i18n project"
578 STAMP=$(date "+%Y%m%d%H%M")
579 mkdir -p ${scriptdir}/i18n/${STAMP}
580 cd ${scriptdir}/i18n/${STAMP}
581 for suite in stable testing unstable; do
582 codename=$(dak admin s show ${suite}|grep '^Codename')
583 codename=${codename##* }
584 echo "Codename is ${codename}"
585 dak control-suite -l ${suite} >${codename}
587 echo "${STAMP}" > timestamp
588 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
592 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
595 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
599 log "Updating stats data"
601 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
602 R --slave --vanilla < $base/misc/ftpstats.R
603 dak stats arch-space > $webdir/arch-space
604 dak stats pkg-nums > $webdir/pkg-nums
607 function cleantransactions() {
608 log "Cleanup transaction ids older than 3 months"
610 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
613 function logstats() {
614 $masterdir/tools/logs.py "$1"
617 # save timestamp when we start
618 function savetimestamp() {
619 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
620 echo ${NOW} > "${dbdir}/dinstallstart"
623 function maillogfile() {
624 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
627 function renamelogfile() {
628 if [ -f "${dbdir}/dinstallstart" ]; then
629 NOW=$(cat "${dbdir}/dinstallstart")
631 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
632 logstats "$logdir/dinstall_${NOW}.log"
633 bzip2 -9 "$logdir/dinstall_${NOW}.log"
635 error "Problem, I don't know when dinstall started, unable to do log statistics."
636 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
638 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
639 bzip2 -9 "$logdir/dinstall_${NOW}.log"
643 function testingsourcelist() {
644 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
647 # do a last run of process-unchecked before dinstall is on.
648 function process_unchecked() {
649 log "Processing the unchecked queue"
650 UNCHECKED_WITHOUT_LOCK="-p"
655 # Function to update a "statefile" telling people what we are doing
658 # This should be called with the argument(s)
659 # - Status name we want to show.
662 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
663 cat >"${DINSTALLSTATE}" <<EOF
664 Dinstall start: ${DINSTALLBEGIN}
666 Action start: ${RIGHTNOW}
670 # extract changelogs and stuff
671 function changelogs() {
672 if lockfile -r3 $LOCK_CHANGELOG; then
673 log "Extracting changelogs"
674 dak make-changelog -e -a ftp-master
675 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
676 mkdir -p ${exportpublic}/changelogs
677 cd ${exportpublic}/changelogs
678 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
679 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
681 dak make-changelog -e -a backports
682 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
683 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
684 cd /srv/backports-master.debian.org/rsync/export/changelogs
685 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
686 remove_changelog_lock
687 trap remove_changelog_lock EXIT TERM HUP INT QUIT
691 function gitpdiff() {
692 # Might be that we want to change this to have more than one git repository.
693 # Advantage of one is that we do not need much space in terms of storage in git itself,
694 # git gc is pretty good on our input.
695 # But it might be faster. Well, lets test.
696 log "Adjusting the git tree for pdiffs"
699 # The regex needs the architectures seperated with \|
700 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
702 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
704 # Also, we only want contents, packages and sources.
705 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
714 # Second, add all there is into git
717 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
719 TAGD=$(date +%Y-%m-%d-%H-%M)
720 git commit -m "Commit of ${COMD}"