[vector] add AMI data preparation scripts

pull/1335/head
qingen 3 years ago
parent 2775235c8d
commit 9d32f62f48

@ -1,5 +0,0 @@
# [AMI](https://groups.inf.ed.ac.uk/ami/corpus/)
The AMI Meeting Corpus is a multi-modal data set consisting of 100 hours of meeting recordings. For a gentle introduction to the corpus, see the [corpus overview](https://groups.inf.ed.ac.uk/ami/corpus/overview.shtml). To access the data, follow the directions given [there](https://groups.inf.ed.ac.uk/ami/download). Around two-thirds of the data has been elicited using a scenario in which the participants play different roles in a design team, taking a design project from kick-off to completion over the course of a day. The rest consists of naturally occurring meetings in a range of domains.
Detailed information can be found in the [documentation section](http://groups.inf.ed.ac.uk/ami/corpus/datasets.shtml).

@ -0,0 +1,13 @@
# Speaker Diarization on AMI corpus
## About the AMI corpus:
"The AMI Meeting Corpus consists of 100 hours of meeting recordings. The recordings use a range of signals synchronized to a common timeline. These include close-talking and far-field microphones, individual and room-view video cameras, and output from a slide projector and an electronic whiteboard. During the meetings, the participants also have unsynchronized pens available to them that record what is written. The meetings were recorded in English using three different rooms with different acoustic properties, and include mostly non-native speakers." See [ami overview](http://groups.inf.ed.ac.uk/ami/corpus/overview.shtml) for more details.
## About the example
The script performs diarization using x-vectors(TDNN,ECAPA-TDNN) on the AMI mix-headset data. We demonstrate the use of different clustering methods: AHC, spectral.
## How to Run
Use the following command to run diarization on AMI corpus.
`bash ./run.sh`
## Results (DER) coming soon! :)

@ -567,7 +567,6 @@ if __name__ == '__main__':
help='Overlap duration in seconds between adjacent subsegments')
args = parser.parse_args()
print(args)
prepare_ami(args.data_folder, args.manual_annot_folder, args.save_folder,
args.ref_rttm_dir, args.meta_data_dir)

@ -0,0 +1,47 @@
#!/bin/bash
stage=1
data_folder=/home/data/ami/amicorpus #e.g., /path/to/amicorpus/
manual_annot_folder=/home/data/ami/ami_public_manual_1.6.2 #e.g., /path/to/ami_public_manual_1.6.2/
save_folder=results
ref_rttm_dir=results/ref_rttms
meta_data_dir=results/metadata
set=L
. ${MAIN_ROOT}/utils/parse_options.sh || exit 1;
set -u
set -o pipefail
mkdir -p ${save_folder}
if [ ${stage} -le 0 ]; then
# Download AMI corpus, You need around 10GB of free space to get whole data
# The signals are too large to package in this way,
# so you need to use the chooser to indicate which ones you wish to download
echo "Please follow https://groups.inf.ed.ac.uk/ami/download/ to download the data."
echo "Annotations: AMI manual annotations v1.6.2 "
echo "Signals: Scenario Meetings/Non Scenario Meetings, some sessions recommended but not all"
echo "media streams: Headset mix, recommended first"
exit 0;
fi
if [ ${stage} -le 1 ]; then
echo "AMI Data preparation"
python local/ami_prepare.py --data_folder ${data_folder} \
--manual_annot_folder ${manual_annot_folder} \
--save_folder ${save_folder} --ref_rttm_dir ${ref_rttm_dir} \
--meta_data_dir ${meta_data_dir}
if [ $? -ne 0 ]; then
echo "Prepare AMI failed. Please check log message."
exit 1
fi
fi
echo "AMI data preparation done."
exit 0

@ -0,0 +1,15 @@
export MAIN_ROOT=`realpath ${PWD}/../../../`
export PATH=${MAIN_ROOT}:${MAIN_ROOT}/utils:${PATH}
export LC_ALL=C
export PYTHONDONTWRITEBYTECODE=1
# Use UTF-8 in Python to avoid UnicodeDecodeError when LC_ALL=C
export PYTHONIOENCODING=UTF-8
export PYTHONPATH=${MAIN_ROOT}:${PYTHONPATH}
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/usr/local/lib/
# model exp
#MODEL=ECAPA_TDNN
#export BIN_DIR=${MAIN_ROOT}/paddlespeech/vector/exps/${MODEL}/bin

@ -0,0 +1,14 @@
#!/bin/bash
. path.sh || exit 1;
set -e
stage=1
. ${MAIN_ROOT}/utils/parse_options.sh || exit 1;
if [ ${stage} -le 1 ]; then
# prepare data
bash ./local/data.sh || exit -1
fi
Loading…
Cancel
Save