Commit f62e51ea authored by Dan Povey's avatar Dan Povey
Browse files

trunk: Updating svn:ignore properties. These are now extracted from the...

trunk: Updating svn:ignore properties.  These are now extracted from the .gitignore file using the script misc/maintenance/svnignore.sh.
    Adding 'unlink' commands to various tests that write to files (this process is not complete).
    Some changes to the online-decoding setup as it relates to pitch (this work is not yet finished, but checking it in with other changes,
      as it shouldn't break anything that currently runs.)



git-svn-id: https://svn.code.sf.net/p/kaldi/code/trunk@4365 5e6a8d80-dfce-4ca6-a32a-6e07a63d50c8
parent d0ef62c7
...@@ -12,24 +12,37 @@ ...@@ -12,24 +12,37 @@
*.la *.la
*.a *.a
# some weird thing that MacOs creates.
*.dSYM
# /tools/ # /tools/
tools/ATLAS/ /tools/ATLAS/
tools/atlas3.8.3.tar.gz /tools/atlas3.8.3.tar.gz
tools/irstlm/ /tools/irstlm/
tools/openfst /tools/openfst
tools/openfst-1.3.2.tar.gz /tools/openfst-1.3.2.tar.gz
tools/openfst-1.3.2/ /tools/openfst-1.3.2/
tools/openfst-1.3.4.tar.gz /tools/openfst-1.3.4.tar.gz
tools/openfst-1.3.4/ /tools/openfst-1.3.4/
tools/pa_stable_v19_20111121.tgz /tools/pa_stable_v19_20111121.tgz
tools/portaudio/ /tools/portaudio/
tools/sctk-2.4.0-20091110-0958.tar.bz2 /tools/sctk-2.4.0-20091110-0958.tar.bz2
tools/sctk-2.4.0/ /tools/sctk-2.4.0/
tools/sph2pipe_v2.5.tar.gz /tools/sph2pipe_v2.5.tar.gz
tools/sph2pipe_v2.5/ /tools/sph2pipe_v2.5/
/tools/kaldi_lm.tar.gz
/tools/sctk-2.4.8-20130429-2145.tar.bz2
/tools/OpenBLAS
/tools/sctk
/tools/sctk-2.4.8
/tools/srilm
/tools/CLAPACK_include
/tools/kaldi_lm
# /src/ # /src/
/src/kaldi.mk* /src/kaldi.mk
/src/kaldi.mk.tmp
/src/kaldi.mk.bak
# /src/base/ # /src/base/
/src/base/.depend.mk /src/base/.depend.mk
...@@ -627,3 +640,200 @@ tools/sph2pipe_v2.5/ ...@@ -627,3 +640,200 @@ tools/sph2pipe_v2.5/
/src/util/kaldi-io-test /src/util/kaldi-io-test
/src/util/text-utils-test /src/util/text-utils-test
/src/util/parse-options-test /src/util/parse-options-test
# we should probably have more things like this in
# the .gitignore:
/egs/callhome_egyptian/s5/mfcc
/egs/callhome_egyptian/s5/data
/egs/callhome_egyptian/s5/exp
/egs/ami/s5/mfcc
/egs/ami/s5/plp
/egs/ami/s5/exp
/egs/ami/s5/data
/egs/aurora4/s5/mfcc
/egs/aurora4/s5/plp
/egs/aurora4/s5/exp
/egs/aurora4/s5/data
/egs/babel/s5/mfcc
/egs/babel/s5/plp
/egs/babel/s5/exp
/egs/babel/s5/data
/egs/babel/s5b/mfcc
/egs/babel/s5b/plp
/egs/babel/s5b/exp
/egs/babel/s5b/data
/egs/callhome_egyptian/s5/mfcc
/egs/callhome_egyptian/s5/plp
/egs/callhome_egyptian/s5/exp
/egs/callhome_egyptian/s5/data
/egs/chime_wsj0/s5/mfcc
/egs/chime_wsj0/s5/plp
/egs/chime_wsj0/s5/exp
/egs/chime_wsj0/s5/data
/egs/fisher_english/s5/mfcc
/egs/fisher_english/s5/plp
/egs/fisher_english/s5/exp
/egs/fisher_english/s5/data
/egs/fisher_swbd/s5/mfcc
/egs/fisher_swbd/s5/plp
/egs/fisher_swbd/s5/exp
/egs/fisher_swbd/s5/data
/egs/gale_arabic/s5/mfcc
/egs/gale_arabic/s5/plp
/egs/gale_arabic/s5/exp
/egs/gale_arabic/s5/data
/egs/gp/s1/mfcc
/egs/gp/s1/plp
/egs/gp/s1/exp
/egs/gp/s1/data
/egs/gp/s5/mfcc
/egs/gp/s5/plp
/egs/gp/s5/exp
/egs/gp/s5/data
/egs/hkust/s5/mfcc
/egs/hkust/s5/plp
/egs/hkust/s5/exp
/egs/hkust/s5/data
/egs/rm/s4/mfcc
/egs/rm/s4/plp
/egs/rm/s4/exp
/egs/rm/s4/data
/egs/rm/s5/mfcc
/egs/rm/s5/plp
/egs/rm/s5/exp
/egs/rm/s5/data
/egs/rm/s6/mfcc
/egs/rm/s6/plp
/egs/rm/s6/exp
/egs/rm/s6/data
/egs/sprakbanken/s5/mfcc
/egs/sprakbanken/s5/plp
/egs/sprakbanken/s5/exp
/egs/sprakbanken/s5/data
/egs/swbd/s5/mfcc
/egs/swbd/s5/plp
/egs/swbd/s5/exp
/egs/swbd/s5/data
/egs/swbd/s5b/mfcc
/egs/swbd/s5b/plp
/egs/swbd/s5b/exp
/egs/swbd/s5b/data
/egs/tedlium/s5/mfcc
/egs/tedlium/s5/plp
/egs/tedlium/s5/exp
/egs/tedlium/s5/data
/egs/tidigits/s5/mfcc
/egs/tidigits/s5/plp
/egs/tidigits/s5/exp
/egs/tidigits/s5/data
/egs/timit/s3/mfcc
/egs/timit/s3/plp
/egs/timit/s3/exp
/egs/timit/s3/data
/egs/timit/s4/mfcc
/egs/timit/s4/plp
/egs/timit/s4/exp
/egs/timit/s4/data
/egs/timit/s5/mfcc
/egs/timit/s5/plp
/egs/timit/s5/exp
/egs/timit/s5/data
/egs/voxforge/s5/mfcc
/egs/voxforge/s5/plp
/egs/voxforge/s5/exp
/egs/voxforge/s5/data
/egs/vystadial_cz/s5/mfcc
/egs/vystadial_cz/s5/plp
/egs/vystadial_cz/s5/exp
/egs/vystadial_cz/s5/data
/egs/vystadial_en/s5/mfcc
/egs/vystadial_en/s5/plp
/egs/vystadial_en/s5/exp
/egs/vystadial_en/s5/data
/egs/wsj/s5/mfcc
/egs/wsj/s5/plp
/egs/wsj/s5/exp
/egs/wsj/s5/data
/egs/yesno/s5/mfcc
/egs/yesno/s5/plp
/egs/yesno/s5/exp
/egs/yesno/s5/data
/src/bin/est-pca
/src/bin/matrix-sum-rows
/src/bin/sum-lda-accs
/src/bin/sum-mllt-accs
/src/bin/transform-vec
/src/cudamatrix/cu-array-test
/src/cudamatrix/cu-block-matrix-test
/src/cudamatrix/cu-math-test
/src/cudamatrix/cu-matrix-speed-test
/src/cudamatrix/cu-matrix-test
/src/cudamatrix/cu-packed-matrix-test
/src/cudamatrix/cu-sp-matrix-speed-test
/src/cudamatrix/cu-sp-matrix-test
/src/cudamatrix/cu-test
/src/cudamatrix/cu-tp-matrix-test
/src/cudamatrix/cu-vector-speed-test
/src/cudamatrix/cu-vector-test
/src/feat/feature-functions-test
/src/feat/feature-sdc-test
/src/feat/pitch-functions-test
/src/featbin/append-vector-to-feats
/src/featbin/compute-and-process-kaldi-pitch-feats
/src/featbin/modify-cmvn-stats
/src/featbin/wav-copy
/src/fstext/push-special-test
/src/gmmbin/gmm-acc-mllt-global
/src/gmmbin/gmm-est-fmllr-global
/src/gmmbin/gmm-global-est-lvtln-trans
/src/gmmbin/gmm-global-get-post
/src/gmmbin/gmm-global-gselect-to-post
/src/gmmbin/gmm-latgen-faster-regtree-fmllr
/src/gmmbin/gmm-transform-means-global
/src/ivector/ivector-extractor-test
/src/ivector/logistic-regression-test
/src/ivector/plda-test
/src/ivectorbin/create-split-from-vad
/src/ivectorbin/ivector-adapt-plda
/src/ivectorbin/ivector-extract-online
/src/ivectorbin/logistic-regression-copy
/src/ivectorbin/logistic-regression-eval
/src/ivectorbin/logistic-regression-train
/src/lat/determinize-lattice-pruned-test
/src/lat/minimize-lattice-test
/src/lat/push-lattice-test
/src/latbin/lattice-confidence
/src/latbin/lattice-expand-ngram
/src/nnet/nnet-component-test
/src/nnet/nnet-randomizer-test
/src/nnet2/nnet-example-functions-test
/src/nnet2/nnet-precondition-online-test
/src/nnet2/nnet-precondition-test
/src/nnet2bin/cuda-compiled
/src/nnet2bin/nnet-adjust-priors
/src/nnet2bin/nnet-am-switch-preconditioning
/src/nnet2bin/nnet-replace-last-layers
/src/nnet2bin/nnet-to-raw-nnet
/src/nnet2bin/nnet-train-parallel-perturbed
/src/nnet2bin/nnet-train-simple-perturbed
/src/nnet2bin/nnet1-to-raw-nnet
/src/nnet2bin/raw-nnet-copy
/src/online2bin/apply-cmvn-online
/src/online2bin/compress-uncompress-speex
/src/online2bin/extend-wav-with-silence
/src/online2bin/ivector-extract-online2
/src/online2bin/ivector-randomize
/src/online2bin/online2-wav-dump-features
/src/online2bin/online2-wav-gmm-latgen-faster
/src/online2bin/online2-wav-nnet2-latgen-faster
/src/sgmm/estimate-am-sgmm-multi-test
/src/sgmm2/am-sgmm2-test
/src/sgmm2/estimate-am-sgmm2-test
/src/sgmm2/fmllr-sgmm2-test
/src/thread/kaldi-task-sequence-test
/src/thread/kaldi-thread-test
/src/transform/fmllr-raw-test
/src/util/simple-options-test
## This config is given by conf/make_pitch_online.sh to the program compute-and-process-kaldi-pitch-feats,
## and is copied by steps/online/nnet2/prepare_online_decoding.sh and similar scripts, to be given
## to programs like online2-wav-nnet2-latgen-faster.
## The program compute-and-process-kaldi-pitch-feats will use it to compute pitch features that
## are the same as that those which will generated in online decoding; this enables us to train
## in a way that's compatible with online decoding.
##
## most of these options relate to the post-processing rather than the pitch
## extraction itself.
--add-raw-log-pitch=true ## this is intended for input to neural nets, so our
## approach is "throw everything in and see what
## sticks".
--normalization-left-context=100
--normalization-right-context=10 # We're removing amost all the right-context
# for the normalization. The reason why we
# include a small nonzero right-context (of
# just 0.1 second) is that by adding a little
# latency to the computation, it enables us to
# get a more accurate estimate of the pitch on
# the frame we're currently computing the
# normalized pitch of. We know for the current
# frame that we will have at least 10 frames to
# the right, and those extra 10 frames will
# increase the quality of the Viterbi
# backtrace.
#
# Note: our changes to the (left,right) context
# from the defaults of (75,75) to (100,10) will
# almost certainly worsen results, but will
# reduce latency.
--frames-per-chunk=10 ## relates to offline simulation of online decoding; 1
## would be equivalent to getting in samples one by
## one.
--simulate-first-pass-online=true ## this make the online-pitch-extraction code
## output the 'first-pass' features, which
## are less accurate than the final ones, and
## which are the only features the neural-net
## decoding would ever see (since we can't
## afford to do lattice rescoring in the
## neural-net code
--delay=5 ## We delay all the pitch information by 5 frames. This is almost
## certainly not helpful, but it helps to reduce the overall latency
## added by the pitch computation, from 10 (given by
## --normalization-right-context) to 10 - 5 = 5.
...@@ -10,8 +10,10 @@ add_pitch=false ...@@ -10,8 +10,10 @@ add_pitch=false
mfcc_config=conf/mfcc.conf # you can override any of these you need to override. mfcc_config=conf/mfcc.conf # you can override any of these you need to override.
plp_config=conf/plp.conf plp_config=conf/plp.conf
fbank_config=conf/fbank.conf fbank_config=conf/fbank.conf
pitch_config=conf/pitch.conf # online_pitch_config is the config file for both pitch extraction and
pitch_process_config=conf/pitch_process.conf # post-processing; we combine them into one because during training this
# is given to the program compute-and-process-kaldi-pitch-feats.
online_pitch_config=conf/online_pitch.conf
# Below are some options that affect the iVectors, and should probably # Below are some options that affect the iVectors, and should probably
# match those used in extract_ivectors_online.sh. # match those used in extract_ivectors_online.sh.
...@@ -132,12 +134,15 @@ fi ...@@ -132,12 +134,15 @@ fi
if $add_pitch; then if $add_pitch; then
echo "$0: enabling pitch features (note: this has not been tested)" echo "$0: enabling pitch features (note: this has not been tested)"
echo "--add-pitch=true" >>$conf echo "--add-pitch=true" >>$conf
echo "$0: creating $dir/conf/pitch.conf" echo "$0: creating $dir/conf/online_pitch.conf"
echo "--pitch-config=$dir/conf/pitch.conf" >>$conf if [ ! -f $online_pitch_config ]; then
cp $pitch_config $dir/conf/pitch.conf || exit 1; echo "$0: expected file '$online_pitch_config' to exist.";
echo "--pitch-process-config=$dir/conf/pitch_process.conf" >>$conf exit 1;
cp $pitch_process_config $dir/conf/pitch_process.conf || exit 1; fi
cp $online_pitch_config $dir/conf/online_pitch.conf || exit 1;
echo "--online-pitch-config=$dir/conf/online_pitch.conf" >>$conf
fi fi
silphonelist=`cat $lang/phones/silence.csl` || exit 1; silphonelist=`cat $lang/phones/silence.csl` || exit 1;
echo "--endpoint.silence-phones=$silphonelist" >>$conf echo "--endpoint.silence-phones=$silphonelist" >>$conf
echo "$0: created config file $conf" echo "$0: created config file $conf"
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# Copyright 2014 Brno University of Technology (Author: Karel Vesely) # Copyright 2014 Brno University of Technology (Author: Karel Vesely)
# Copyright 2012 Johns Hopkins University (Author: Daniel Povey) # Copyright 2012 Johns Hopkins University (Author: Daniel Povey)
# Apache 2.0 # Apache 2.0
# This script appends the features in two data directories. # This script appends the features in two or more data directories.
# To be run from .. (one directory up from here) # To be run from .. (one directory up from here)
# see ../run.sh for example # see ../run.sh for example
...@@ -22,6 +22,7 @@ if [ -f path.sh ]; then . ./path.sh; fi ...@@ -22,6 +22,7 @@ if [ -f path.sh ]; then . ./path.sh; fi
if [ $# -lt 5 ]; then if [ $# -lt 5 ]; then
echo "usage: $0 [options] <src-data-dir1> <src-data-dir2> [<src-data-dirN>] <dest-data-dir> <log-dir> <path-to-storage-dir>"; echo "usage: $0 [options] <src-data-dir1> <src-data-dir2> [<src-data-dirN>] <dest-data-dir> <log-dir> <path-to-storage-dir>";
echo "e.g.: $0 data/train_mfcc data/train_bottleneck data/train_combined exp/append_mfcc_plp mfcc"
echo "options: " echo "options: "
echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs." echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs."
exit 1; exit 1;
...@@ -71,7 +72,7 @@ done > $data/feats.scp || exit 1; ...@@ -71,7 +72,7 @@ done > $data/feats.scp || exit 1;
nf=`cat $data/feats.scp | wc -l` nf=`cat $data/feats.scp | wc -l`
nu=`cat $data/utt2spk | wc -l` nu=`cat $data/utt2spk | wc -l`
if [ $nf -ne $nu ]; then if [ $nf -ne $nu ]; then
echo "It seems not all of the feature files were successfully ($nf != $nu);" echo "It seems not all of the feature files were successfully processed ($nf != $nu);"
echo "consider using utils/fix_data_dir.sh $data" echo "consider using utils/fix_data_dir.sh $data"
fi fi
......
#!/bin/bash
# Copyright 2014 Johns Hopkins University (Author: Daniel Povey)
# Apache 2.0
# This script selects some specified dimensions of the features in the
# input data directory.
# To be run from .. (one directory up from here)
# see ../run.sh for example
# Begin configuration section.
cmd=run.pl
nj=4
compress=true
# End configuration section.
echo "$0 $@" # Print the command line for logging
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
if [ $# -ne 5 ]; then
echo "usage: $0 [options] <selector> <src-data-dir> <dest-data-dir> <log-dir> <path-to-storage-dir>";
echo "e.g.: $0 0-12 data/train_mfcc_pitch data/train_mfcconly exp/select_pitch_train mfcc"
echo "options: "
echo " --cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs."
exit 1;
fi
selector="$1"
data_in=$2
data=$3
logdir=$4
ark_dir=$5
# make $ark_dir an absolute pathname.
ark_dir=`perl -e '($dir,$pwd)= @ARGV; if($dir!~m:^/:) { $dir = "$pwd/$dir"; } print $dir; ' $ark_dir ${PWD}`
utils/split_data.sh $data_in $nj || exit 1;
mkdir -p $ark_dir $logdir
mkdir -p $data
cp $data_in/* $data/ 2>/dev/null # so we get the other files, such as utt2spk.
rm $data/cmvn.scp 2>/dev/null
rm $data/feats.scp 2>/dev/null
# use "name" as part of name of the archive.
name=`basename $data`
$cmd JOB=1:$nj $logdir/append.JOB.log \
select-feats "$selector" scp:$data_in/split$nj/JOB/feats.scp ark:- \| \
copy-feats --compress=$compress ark:- \
ark,scp:$ark_dir/pasted_$name.JOB.ark,$ark_dir/pasted_$name.JOB.scp || exit 1;
# concatenate the .scp files together.
for ((n=1; n<=nj; n++)); do
cat $ark_dir/pasted_$name.$n.scp >> $data/feats.scp || exit 1;
done > $data/feats.scp || exit 1;
nf=`cat $data/feats.scp | wc -l`
nu=`cat $data/utt2spk | wc -l`
if [ $nf -ne $nu ]; then
echo "It seems not all of the feature files were successfully processed ($nf != $nu);"
exit 1;
fi
echo "Succeeded selecting features for $name into $data"
# This directory contains various scripts that can be used to maintain certain
# aspects of the Kaldi code: various things that are supposed to be run
# automatically.
# There are some things that are not included here that relate to maintenance and
# checking:
# ../src/doc/README,
# which Dan generally runs by doing:
# cd ../src
# . doc/README
# (but this won't work for most people, as relies on certain things being set up right
# on Sourceforge).
# also ../src/doc/cpplint.py is useful as a style checker.
#!/bin/bash
# This is really just some notes on how I updated .gitignore (at top level)
# using the previously listed things in the svn:ignore properties.
# # we'll first get a list of all directories in svn.
# svn list -R > listing
# grep '/$' listing > dirs
# for dir in $(cat dirs); do
# for prop in $(svn propget svn:ignore $dir); do
# echo $dir$prop
# done
# done > bar
# # Then I edited the file after I noticed some things that shouldn't have been in .svignore.
# for x in $(cat bar); do if ! $(grep "^/$x$" .gitignore >/dev/null); then echo $x; fi; done
# # this is all I got.
# egs/callhome_egyptian/s5/mfcc
# egs/callhome_egyptian/s5/data
# egs/callhome_egyptian/s5/exp/egs
# egs/callhome_egyptian/s5/exp/src
# the rest of this file updates the .gitignore with the names of new binaries
svn list -R > listing
for f in $(grep '.cc$' listing); do
binary=$(echo $f | sed s:.cc$::)
if [ -f $binary ] && ! grep "^/$binary$" .gitignore >/dev/null; then
echo /$binary
fi
done > new_binaries
cat new_binaries >> .gitignore
#!/bin/bash
# This script should be run from two levels up, as:
# misc/maintenance/svnignore.sh
# It takes the things listed in the .gitignore file (which is at
# the top level) and converts them into .svnignore properties
# in the subdirectories.
svn list -R > listing
grep '/$' listing > dirs
grep '^\*' .gitignore > patterns
for dir in $(cat dirs); do
cp patterns cur_ignore
grep -v '#' .gitignore | grep ^/$dir | sed s:^/$dir:: | sed s:/$:: >> cur_ignore
svn propset -F cur_ignore svn:ignore $dir
done
...@@ -118,6 +118,7 @@ void UnitTestIo(bool binary) { ...@@ -118,6 +118,7 @@ void UnitTestIo(bool binary) {
KALDI_ASSERT(Peek(infile, binary_in) == -1); KALDI_ASSERT(Peek(infile, binary_in) == -1);
KALDI_ASSERT(PeekToken(infile, binary_in) == -1); KALDI_ASSERT(PeekToken(infile, binary_in) == -1);
} }
unlink(filename);
} }
} }
......
...@@ -35,7 +35,7 @@ int main(int argc, char *argv[]) { ...@@ -35,7 +35,7 @@ int main(int argc, char *argv[]) {
const char *usage = const char *usage =
"Select certain dimensions of the feature file; think of it as the unix\n" "Select certain dimensions of the feature file; think of it as the unix\n"
"command cut -f ...\n" "command cut -f ...\n"
"Usage: select-feats selection in-rspecifier out-wspecifier\n" "Usage: select-feats <selection> <in-rspecifier> <out-wspecifier>\n"
" e.g. select-feats 0,24-22,3-12 scp:feats.scp ark,scp:feat-red.ark,feat-red.scp\n" " e.g. select-feats 0,24-22,3-12 scp:feats.scp ark,scp:feat-red.ark,feat-red.scp\n"
"See also copy-feats, extract-rows, subset-feats, subsample-feats\n"; "See also copy-feats, extract-rows, subset-feats, subsample-feats\n";
...@@ -60,8 +60,8 @@ int main(int argc, char *argv[]) { ...@@ -60,8 +60,8 @@ int main(int argc, char *argv[]) {
return 0; return 0;
} }
int32 dimIn = kaldi_reader.Value().NumCols(); int32 dim_in = kaldi_reader.Value().NumCols();
int32 dimOut = 0; int32 dim_out = 0;
// figure out the selected dimensions // figure out the selected dimensions
istringstream iss(sspecifier); istringstream iss(sspecifier);
...@@ -75,10 +75,10 @@ int main(int argc, char *argv[]) { ...@@ -75,10 +75,10 @@ int main(int argc, char *argv[]) {
istringstream(token.substr(0, token.length() - p - 1)) >> s; istringstream(token.substr(0, token.length() - p - 1)) >> s;
istringstream(token.substr(p+1)) >> e; istringstream(token.substr(p+1)) >> e;
if (s < 0 || s > (dimIn-1)) { if (s < 0 || s > (dim_in-1)) {
KALDI_ERR << "Invalid range start: " << s; KALDI_ERR << "Invalid range start: " << s;
return 1; return 1;
} else if (e < 0 || e > (dimIn-1)) { } else if (e < 0 || e > (dim_in-1)) {
KALDI_ERR << "Invalid range end: " << e; KALDI_ERR << "Invalid range end: " << e;
return 1; return 1;
} }
...@@ -87,26 +87,26 @@ int main(int argc, char *argv[]) { ...@@ -87,26 +87,26 @@ int main(int argc, char *argv[]) {
if (s > e) { if (s > e) {
for (int32 i = s; i >= e; --i) { for (int32 i = s; i >= e; --i) {
ranges.push_back(pair<int32, int32>(i, i)); ranges.push_back(pair<int32, int32>(i, i));
offsets.push_back(dimOut); offsets.push_back(dim_out);
dimOut += 1; dim_out += 1;
} }
} else { } else {
ranges.push_back(pair<int32, int32>(s, e)); ranges.push_back(pair<int32, int32>(s, e));
offsets.push_back(dimOut); offsets.push_back(dim_out);
dimOut += (e - s + 1); dim_out += (e - s + 1);
} }
} else { } else {
int i; int i;
istringstream(token) >> i; istringstream(token) >> i;
if (i < 0 || i > (dimIn-1)) { if (i < 0 || i > (dim_in - 1)) {
KALDI_ERR << "Invalid selection index: " << i; KALDI_ERR << "Invalid selection index: " << i;
return 1; return 1;