2 # Timestamp. Used for dinstall stat graphs
4 echo "Archive maintenance timestamp ($1): $(date +%H:%M:%S)"
8 function remove_daily_lock() {
12 # Remove changelog lock
13 function remove_changelog_lock() {
18 function remove_all_locks() {
19 rm -f $LOCK_DAILY $LOCK_ACCEPTED
22 # If we error out this one is called, *FOLLOWED* by cleanup above
24 ERRDATE=$(date "+%Y.%m.%d-%H:%M:%S")
26 subject="ATTENTION ATTENTION!"
27 if [ "${error}" = "false" ]; then
28 subject="${subject} (continued)"
30 subject="${subject} (interrupted)"
32 subject="${subject} dinstall error at ${ERRDATE} in ${STAGEFILE} - (Be quiet, Brain, or I'll stab you with a Q-tip)"
34 if [ -r "${STAGEFILE}.log" ]; then
35 cat "${STAGEFILE}.log"
37 echo "file ${STAGEFILE}.log does not exist, sorry"
38 fi | mail -s "${subject}" -a "X-Debian: DAK" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
41 ########################################################################
42 # the actual dinstall functions follow #
43 ########################################################################
45 # pushing merkels QA user, part one
47 log "Telling QA user that we start dinstall"
48 ssh -2 -i ~dak/.ssh/push_merkel_qa -o BatchMode=yes -o SetupTimeOut=90 -o ConnectTimeout=90 qa@qa.debian.org sleep 1
51 # Updating various files
53 log "Updating Bugs docu, Mirror list and mailing-lists.txt"
55 $scriptsdir/update-bugdoctxt
56 $scriptsdir/update-mirrorlists
57 $scriptsdir/update-mailingliststxt
58 $scriptsdir/update-pseudopackages.sh
61 # The first i18n one, syncing new descriptions
63 log "Synchronizing i18n package descriptions"
64 # First sync their newest data
65 cd ${scriptdir}/i18nsync
66 rsync -aq --delete --delete-after ddtp-sync:/does/not/matter . || true
68 # Now check if we still know about the packages for which they created the files
69 # is the timestamp signed by us?
70 if $(gpgv --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg timestamp.gpg timestamp); then
71 # now read it. As its signed by us we are sure the content is what we expect, no need
72 # to do more here. And we only test -d a directory on it anyway.
73 TSTAMP=$(cat timestamp)
74 # do we have the dir still?
75 if [ -d ${scriptdir}/i18n/${TSTAMP} ]; then
77 if ${scriptsdir}/ddtp-i18n-check.sh . ${scriptdir}/i18n/${TSTAMP}; then
78 # Yay, worked, lets copy around
79 for dir in stretch sid; do
80 if [ -d dists/${dir}/ ]; then
81 cd dists/${dir}/main/i18n
82 rsync -aq --delete --delete-after --exclude Translation-en.bz2 --exclude Translation-*.diff/ . ${ftpdir}/dists/${dir}/main/i18n/.
84 cd ${scriptdir}/i18nsync
87 echo "ARRRR, bad guys, wrong files, ARRR"
88 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
91 echo "ARRRR, missing the timestamp ${TSTAMP} directory, not updating i18n, ARRR"
92 echo "Arf, Arf, Arf, missing the timestamp ${TSTAMP} directory, not updating i18n, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Lisa, if you don't like your job you don't strike. You just go in every day and do it really half-assed. That's the American way." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
95 echo "ARRRRRRR, could not verify our timestamp signature, ARRR. Don't mess with our files, i18n guys, ARRRRR."
96 echo "Arf, Arf, Arf, could not verify our timestamp signature, arf. Don't mess with our files, i18n guys, arf, arf, arf" | mail -a "X-Debian: DAK" -s "You can't keep blaming yourself. Just blame yourself once, and move on." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" debian-l10n-devel@lists.alioth.debian.org
100 # Syncing AppStream/DEP-11 data
102 log "Synchronizing AppStream metadata"
103 # First sync their newest data
104 mkdir -p ${scriptdir}/dep11
105 cd ${scriptdir}/dep11
106 rsync -aq --delete --delete-after dep11-sync:/does/not/matter . || true
109 if ${scriptsdir}/dep11-basic-validate.py . ${scriptdir}/dep11/; then
110 # Yay, worked, lets copy around
111 for dir in stretch sid; do
112 if [ -d ${dir}/ ]; then
113 for comp in main contrib non-free; do
114 mkdir -p ${ftpdir}/dists/${dir}/${comp}/dep11
116 rsync -aq --delete --delete-after --exclude *.tmp . ${ftpdir}/dists/${dir}/${comp}/dep11/.
117 cd ${scriptdir}/dep11
122 echo "ARRRR, bad guys, wrong files, ARRR"
123 echo "Arf, Arf, Arf, bad guys, wrong files, arf, arf, arf" | mail -a "X-Debian: DAK" -s "Don't you kids take anything. I'm watching you. I've got eye implants in the back of my head." -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" mak@debian.org
128 log "Checking for cruft in overrides"
132 function dominate() {
133 log "Removing obsolete source and binary associations"
135 dak manage-debug-suites unstable-debug experimental-debug
138 function autocruft() {
139 log "Check for obsolete binary packages"
140 dak auto-decruft -s unstable
141 dak auto-decruft -s experimental --if-newer-version-in unstable --if-newer-version-in-rm-msg "NVIU"
144 function fingerprints() {
145 log "Updating fingerprints"
146 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
149 dak import-keyring --generate-users "%s" /srv/keyring.debian.org/keyrings/debian-maintainers.gpg >"${OUTFILE}"
151 if [ -s "${OUTFILE}" ]; then
152 /usr/sbin/sendmail -odq -oi -t -f envelope@ftp-master.debian.org <<EOF
153 From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>
154 To: <debian-project@lists.debian.org>
155 Subject: Debian Maintainers Keyring changes
156 Content-Type: text/plain; charset=utf-8
160 The following changes to the debian-maintainers keyring have just been activated:
164 Debian distribution maintenance software,
165 on behalf of the Keyring maintainers
172 function overrides() {
173 log "Writing overrides into text files"
181 log "Generating package / file mapping"
182 for archive in "${public_archives[@]}"; do
183 archiveroot="$(get_archiveroot "${archive}")"
184 dak make-pkg-file-mapping "${archive}" | bzip2 -9 > "${archiveroot}/indices/package-file.map.bz2"
188 function packages() {
189 log "Generating Packages and Sources files"
190 for archive in "${public_archives[@]}"; do
191 log " Generating Packages/Sources for ${archive}"
192 dak generate-packages-sources2 -a "${archive}"
193 log " Generating Contents for ${archive}"
194 dak contents generate -a "${archive}"
199 log "Generating pdiff files"
200 dak generate-index-diffs
204 log "Generating Release files"
205 for archive in "${public_archives[@]}"; do
206 dak generate-releases -a "${archive}"
210 function dakcleanup() {
211 log "Cleanup old packages/files"
212 dak clean-suites -m 10000
213 dak clean-queues -i "$unchecked"
220 for archive in "${public_archives[@]}"; do
221 archiveroot="$(get_archiveroot "${archive}")"
224 log "Removing any core files ..."
225 find -type f -name core -print -delete
227 log "Checking symlinks ..."
230 log "Creating recursive directory listing ... "
232 TZ=UTC ls -lR | gzip -9c --rsyncable > ${FILENAME}.gz
236 function mkmaintainers() {
240 log 'Creating Maintainers index ... '
242 for archive in "${public_archives[@]}"; do
243 archiveroot="$(get_archiveroot "${archive}")"
244 indices="${archiveroot}/indices"
245 if ! [ -d "${indices}" ]; then
250 dak make-maintainers -a "${archive}" ${scriptdir}/masterfiles/pseudo-packages.maintainers
251 gzip -9v --rsyncable <Maintainers >Maintainers.gz
252 gzip -9v --rsyncable <Uploaders >Uploaders.gz
256 function copyoverrides() {
257 log 'Copying override files into public view ...'
261 for ofile in ${overridedir}/override.{squeeze,wheezy,jessie,stretch,sid}.{,extra.}{main,contrib,non-free}*; do
263 gzip -9cv --rsyncable ${ofile} > ${indices}/${bname}.gz
264 chmod g+w ${indices}/${bname}.gz
269 function mkfilesindices() {
272 cd $base/ftp/indices/files/components
276 log "Querying postgres"
278 SELECT './pool/' || c.name || '/' || f.filename AS path, a.arch_string AS arch_string
280 JOIN files_archive_map af ON f.id = af.file_id
281 JOIN component c ON af.component_id = c.id
282 JOIN archive ON af.archive_id = archive.id
285 JOIN architecture a ON b.architecture = a.id)
287 WHERE archive.name = 'ftp-master'
288 ORDER BY path, arch_string
290 psql -At -c "$query" >$ARCHLIST
293 perl -ne 'print; while (m,/[^/]+$,) { $_=$`; print $_ . "\n" unless $d{$_}++; }'
296 perl -e '@nonpool=(); while (<>) { if (m,^\./pool/,) { print; } else { push @nonpool, $_; } } print for (@nonpool);'
299 log "Generating sources list"
301 sed -n 's/|$//p' $ARCHLIST
303 find ./dists -maxdepth 1 \! -type d
304 find ./dists \! -type d | grep "/source/"
305 ) | sort -u | gzip -9 > source.list.gz
307 log "Generating arch lists"
309 ARCHES=$( (<$ARCHLIST sed -n 's/^.*|//p'; echo amd64) | grep . | grep -v all | sort -u)
311 (sed -n "s/|$a$//p" $ARCHLIST
312 sed -n 's/|all$//p' $ARCHLIST
315 find ./dists -maxdepth 1 \! -type d
316 find ./dists \! -type d | grep -E "(proposed-updates.*_$a.changes$|/main/disks-$a/|/main/installer-$a/|/Contents-$a|/binary-$a/)"
317 ) | sort -u | gzip -9 > arch-$a.list.gz
320 log "Generating suite lists"
323 local suite_id="$(printf %d $1)"
326 SELECT DISTINCT './pool/' || c.name || '/' || f.filename
328 (SELECT sa.source AS source
329 FROM src_associations sa
330 WHERE sa.suite = $suite_id
333 FROM extra_src_references esr
334 JOIN bin_associations ba ON esr.bin_id = ba.bin
335 WHERE ba.suite = $suite_id
337 SELECT b.source AS source
338 FROM bin_associations ba
339 JOIN binaries b ON ba.bin = b.id WHERE ba.suite = $suite_id) s
340 JOIN dsc_files df ON s.source = df.source
341 JOIN files f ON df.file = f.id
342 JOIN files_archive_map af ON f.id = af.file_id
343 JOIN component c ON af.component_id = c.id
344 JOIN archive ON af.archive_id = archive.id
345 WHERE archive.name = 'ftp-master'
347 psql -F' ' -A -t -c "$query"
350 SELECT './pool/' || c.name || '/' || f.filename
351 FROM bin_associations ba
352 JOIN binaries b ON ba.bin = b.id
353 JOIN files f ON b.file = f.id
354 JOIN files_archive_map af ON f.id = af.file_id
355 JOIN component c ON af.component_id = c.id
356 JOIN archive ON af.archive_id = archive.id
357 WHERE ba.suite = $suite_id AND archive.name = 'ftp-master'
359 psql -F' ' -A -t -c "$query"
362 psql -F' ' -At -c "SELECT id, suite_name FROM suite" |
363 while read id suite; do
364 [ -e $base/ftp/dists/$suite ] || continue
367 distname=$(cd dists; readlink $suite || echo $suite)
368 find ./dists/$distname \! -type d
369 for distdir in ./dists/*; do
370 [ "$(readlink $distdir)" != "$distname" ] || echo $distdir
374 ) | sort -u | gzip -9 > suite-${suite}.list.gz
377 log "Finding everything on the ftp site to generate sundries"
378 (cd $base/ftp; find . \! -type d \! -name 'Archive_Maintenance_In_Progress' | sort) >$ARCHLIST
381 zcat *.list.gz | cat - *.list | sort -u |
382 diff - $ARCHLIST | sed -n 's/^> //p' > sundries.list
384 log "Generating files list"
387 (echo ./project/trace; zcat arch-$a.list.gz source.list.gz) |
388 cat - sundries.list dists.list project.list docs.list indices.list |
389 sort -u | poolfirst > ../arch-$a.files
393 for dist in sid jessie stretch; do
394 find ./dists/$dist/main/i18n/ \! -type d | sort -u | gzip -9 > $base/ftp/indices/files/components/translation-$dist.list.gz
398 (cat ../arch-i386.files ../arch-amd64.files; zcat suite-proposed-updates.list.gz ; zcat translation-sid.list.gz ; zcat translation-jessie.list.gz ; zcat translation-stretch.list.gz) |
399 sort -u | poolfirst > ../typical.files
406 function mkchecksums() {
407 local archiveroot dsynclist md5list
409 for archive in "${public_archives[@]}"; do
410 archiveroot="$(get_archiveroot "${archive}")"
411 dsynclist=$dbdir/dsync.${archive}.list
412 md5list=${archiveroot}/indices/md5sums
414 log -n "Creating md5 / dsync index file for ${archive}... "
417 ${bindir}/dsync-flist -q generate $dsynclist --exclude $dsynclist --md5
418 ${bindir}/dsync-flist -q md5sums $dsynclist | gzip -9n > ${md5list}.gz
419 ${bindir}/dsync-flist -q link-dups $dsynclist || true
424 local archiveroot mirrordir targetpath TRACEFILE
426 for archive in "${public_archives[@]}"; do
427 archiveroot="$(get_archiveroot "${archive}")"
428 mirrordir="${archiveroot}/../mirror"
429 targetpath="${mirrordir}/${archive}"
430 TRACEFILE="${archiveroot}/project/trace/ftp-master.debian.org"
431 mkdir -p "${archiveroot}/project/trace/"
433 log "Regenerating \"public\" mirror/${archive} hardlink fun"
434 DATE_SERIAL=$(date +"%Y%m%d01")
435 FILESOAPLUS1=$(awk '/serial/ { print $3+1 }' ${TRACEFILE} )
436 if [ "$DATE_SERIAL" -gt "$FILESOAPLUS1" ]; then
437 SERIAL="$DATE_SERIAL"
439 SERIAL="$FILESOAPLUS1"
441 date -u > ${TRACEFILE}
442 echo "Using dak v1" >> ${TRACEFILE}
443 echo "Running on host: $(hostname -f)" >> ${TRACEFILE}
444 echo "Archive serial: ${SERIAL}" >> ${TRACEFILE}
446 mkdir -p ${targetpath}
448 rsync -aH --link-dest ${archiveroot} --delete --delete-after --delete-excluded --exclude Packages.*.new --exclude Sources.*.new --ignore-errors ${archiveroot}/. .
453 log "Expiring old database dumps..."
455 $scriptsdir/expire_dumps -d . -p -f "dump_*"
458 function transitionsclean() {
459 log "Removing out of date transitions..."
461 dak transitions -c -a
465 log "Updating DM permissions page"
466 dak acl export-per-source dm >$exportdir/dm.txt
470 log "Categorizing uncategorized bugs filed against ftp.debian.org"
471 sudo -u dak-unpriv dak bts-categorize
474 function ddaccess() {
475 # Tell our dd accessible mirror to sync itself up. Including ftp dir.
476 log "Trigger dd accessible parts sync including ftp dir"
477 ${scriptsdir}/sync-dd dd-sync dd-sync1 dd-sync2 sync
480 function mirrorpush() {
481 log "Checking the public archive copies..."
483 local archiveroot mirrordir targetpath
485 for archive in "${public_archives[@]}"; do
486 log "... archive: ${archive}"
487 archiveroot="$(get_archiveroot "${archive}")"
488 mirrordir="${archiveroot}/../mirror"
489 targetpath="${mirrordir}/${archive}"
490 cd ${targetpath}/dists
493 for release in $(find . -name "InRelease"); do
494 echo "Processing: ${release}"
495 subdir=${release%/InRelease}
496 while read SHASUM SIZE NAME; do
497 if ! [ -f "${subdir}/${NAME}" ]; then
498 bname=$(basename ${NAME})
499 if [[ "${bname}" =~ ^(Packages|Sources|Contents-[a-zA-Z0-9-]+|Translation-[a-zA-Z_]+|Components-(amd64|i386)\.yml|icons-(128x128|64x64)\.tar)$ ]]; then
501 # We don't keep unpacked files, don't check for their existance.
502 # We might want to go and check their unpacked shasum, but right now
503 # I don't care. I believe it should be enough if all the packed shasums
507 broken=$(( broken + 1 ))
508 echo "File ${subdir}/${NAME} is missing"
512 # We do have symlinks in the tree (see the contents files currently).
513 # So we use "readlink -f" to check the size of the target, as thats basically
514 # what gen-releases does
515 fsize=$(stat -c %s $(readlink -f "${subdir}/${NAME}"))
516 if [ ${fsize} -ne ${SIZE} ]; then
517 broken=$(( broken + 1 ))
518 echo "File ${subdir}/${NAME} has size ${fsize}, expected is ${SIZE}"
522 fshasum=$(sha1sum $(readlink -f "${subdir}/${NAME}"))
523 fshasum=${fshasum%% *}
524 if [ "${fshasum}" != "${SHASUM}" ]; then
525 broken=$(( broken + 1 ))
526 echo "File ${subdir}/${NAME} has checksum ${fshasum}, expected is ${SHASUM}"
529 done < <(sed '1,/SHA1:/d' "${release}" | sed '/SHA256:/,$d')
532 if [ $broken -gt 0 ]; then
533 log_error "Trouble with the public mirror for ${archive}, found ${broken} errors"
536 log "Starting the mirrorpush for ${archive}"
546 pusharg="-a backports"
549 fname="mirrorstart.${archive}"
552 date -u > /srv/ftp.debian.org/web/${fname}
553 echo "Using dak v1" >> /srv/ftp.debian.org/web/${fname}
554 echo "Running on host $(hostname -f)" >> /srv/ftp.debian.org/web/${fname}
555 sudo -H -u archvsync /home/archvsync/runmirrors ${pusharg} > ~dak/runmirrors-${archive}.log 2>&1 &
559 function mirrorpush-backports() {
560 log "Syncing backports mirror"
561 sudo -u backports /home/backports/bin/update-archive
565 log "Exporting package data foo for i18n project"
566 STAMP=$(date "+%Y%m%d%H%M")
567 mkdir -p ${scriptdir}/i18n/${STAMP}
568 cd ${scriptdir}/i18n/${STAMP}
569 for suite in stable testing unstable; do
570 codename=$(dak admin s show ${suite}|grep '^Codename')
571 codename=${codename##* }
572 echo "Codename is ${codename}"
573 dak control-suite -l ${suite} >${codename}
575 echo "${STAMP}" > timestamp
576 gpg --secret-keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp-master.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 473041FA --detach-sign -o timestamp.gpg timestamp
580 ln -sfT ${scriptdir}/i18n/${STAMP} i18n
583 find ./i18n -mindepth 1 -maxdepth 1 -mtime +2 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
587 log "Updating stats data"
589 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
590 R --slave --vanilla < $base/misc/ftpstats.R
591 dak stats arch-space > $webdir/arch-space
592 dak stats pkg-nums > $webdir/pkg-nums
595 function cleantransactions() {
596 log "Cleanup transaction ids older than 3 months"
598 find -maxdepth 1 -mindepth 1 -type f -name 'txid_*' -mtime +90 -delete
601 function logstats() {
602 $masterdir/tools/logs.py "$1"
605 # save timestamp when we start
606 function savetimestamp() {
607 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
608 echo ${NOW} > "${dbdir}/dinstallstart"
611 function maillogfile() {
612 cat "$LOGFILE" | mail -a "X-Debian: DAK" -s "Log for dinstall run of ${NOW}" -a "From: Debian FTP Masters <ftpmaster@ftp-master.debian.org>" cron@ftp-master.debian.org
615 function renamelogfile() {
616 if [ -f "${dbdir}/dinstallstart" ]; then
617 NOW=$(cat "${dbdir}/dinstallstart")
619 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
620 logstats "$logdir/dinstall_${NOW}.log"
621 bzip2 -9 "$logdir/dinstall_${NOW}.log"
623 error "Problem, I don't know when dinstall started, unable to do log statistics."
624 NOW=`date "+%Y.%m.%d-%H:%M:%S"`
626 mv "$LOGFILE" "$logdir/dinstall_${NOW}.log"
627 bzip2 -9 "$logdir/dinstall_${NOW}.log"
631 function testingsourcelist() {
632 dak ls -s testing -f heidi -r .| egrep 'source$' > ${webdir}/testing.list
635 # do a last run of process-unchecked before dinstall is on.
636 function process_unchecked() {
637 log "Processing the unchecked queue"
638 UNCHECKED_WITHOUT_LOCK="-p"
643 # Function to update a "statefile" telling people what we are doing
646 # This should be called with the argument(s)
647 # - Status name we want to show.
650 RIGHTNOW="$(date -u +"%a %b %d %T %Z %Y (%s)")"
651 cat >"${DINSTALLSTATE}" <<EOF
652 Dinstall start: ${DINSTALLBEGIN}
654 Action start: ${RIGHTNOW}
658 # extract changelogs and stuff
659 function changelogs() {
660 if lockfile -r3 $LOCK_CHANGELOG; then
661 log "Extracting changelogs"
662 dak make-changelog -e -a ftp-master
663 [ -f ${exportdir}/changelogs/filelist.yaml ] && xz -f ${exportdir}/changelogs/filelist.yaml
664 mkdir -p ${exportpublic}/changelogs
665 cd ${exportpublic}/changelogs
666 rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. .
667 sudo -H -u staticsync /usr/local/bin/static-update-component metadata.ftp-master.debian.org >/dev/null 2>&1 &
669 dak make-changelog -e -a backports
670 [ -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml ] && xz -f /srv/backports-master.debian.org/export/changelogs/filelist.yaml
671 mkdir -p /srv/backports-master.debian.org/rsync/export/changelogs
672 cd /srv/backports-master.debian.org/rsync/export/changelogs
673 rsync -aHW --delete --delete-after --ignore-errors /srv/backports-master.debian.org/export/changelogs/. .
674 remove_changelog_lock
675 trap remove_changelog_lock EXIT TERM HUP INT QUIT
679 function gitpdiff() {
680 # Might be that we want to change this to have more than one git repository.
681 # Advantage of one is that we do not need much space in terms of storage in git itself,
682 # git gc is pretty good on our input.
683 # But it might be faster. Well, lets test.
684 log "Adjusting the git tree for pdiffs"
687 # The regex needs the architectures seperated with \|
688 garchs=$(dak admin a list|sed -e ':q;N;s/\n/\\|/g;t q')
690 # First, get all the files we want to work on. ../dists/ is a symlink to the real dists/ we
692 # Also, we only want contents, packages and sources.
693 for file in $(find ../dists/ -regex ".*/\(Contents-\($archs\)\|\(Packages\|Sources\)\).gz"); do
702 # Second, add all there is into git
705 # Maybe we want to make this the same for tag and commit? But well, shouldn't matter
707 TAGD=$(date +%Y-%m-%d-%H-%M)
708 git commit -m "Commit of ${COMD}"