repo_id
stringlengths
18
103
file_path
stringlengths
30
136
content
stringlengths
2
3.36M
__index_level_0__
int64
0
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/script/print.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #ifndef FST_SCRIPT_PRINT_H_ #define FST_SCRIPT_PRINT_H_ #include <ostream> #include <fst/flags.h> #include <fst/script/fst-class.h> #include <fst/script/print-impl.h> DECLARE_string(fst_field_separator); namespace fst { namespace script { // Note: it is safe to pass these strings as references because // this struct is only used to pass them deeper in the call graph. // Be sure you understand why this is so before using this struct // for anything else! struct FstPrinterArgs { const FstClass &fst; const SymbolTable *isyms; const SymbolTable *osyms; const SymbolTable *ssyms; const bool accept; const bool show_weight_one; std::ostream *ostrm; const string &dest; const string &sep; // NOLINT const string &missing_symbol; FstPrinterArgs(const FstClass &fst, const SymbolTable *isyms, const SymbolTable *osyms, const SymbolTable *ssyms, bool accept, bool show_weight_one, std::ostream *ostrm, const string &dest, const string &sep, const string &missing_sym = "") : fst(fst), isyms(isyms), osyms(osyms), ssyms(ssyms), accept(accept), show_weight_one(show_weight_one), ostrm(ostrm), dest(dest), sep(sep), missing_symbol(missing_sym) {} }; template <class Arc> void PrintFst(FstPrinterArgs *args) { const Fst<Arc> &fst = *(args->fst.GetFst<Arc>()); FstPrinter<Arc> fstprinter(fst, args->isyms, args->osyms, args->ssyms, args->accept, args->show_weight_one, args->sep, args->missing_symbol); fstprinter.Print(args->ostrm, args->dest); } void PrintFst(const FstClass &fst, std::ostream &ostrm, const string &dest, const SymbolTable *isyms, const SymbolTable *osyms, const SymbolTable *ssyms, bool accept, bool show_weight_one, const string &missing_sym = ""); // The same, but with more sensible defaults. template <class Arc> void PrintFst(const Fst<Arc> &fst, std::ostream &ostrm, const string &dest = "", const SymbolTable *isyms = nullptr, const SymbolTable *osyms = nullptr, const SymbolTable *ssyms = nullptr) { const string sep = FLAGS_fst_field_separator.substr(0, 1); FstPrinter<Arc> fstprinter(fst, isyms, osyms, ssyms, true, true, sep); fstprinter.Print(&ostrm, dest); } } // namespace script } // namespace fst #endif // FST_SCRIPT_PRINT_H_
0
coqui_public_repos/STT-models/polish/jaco-assistant
coqui_public_repos/STT-models/polish/jaco-assistant/v0.0.1/MODEL_CARD.md
# Model card for Polish STT Jump to section: - [Model details](#model-details) - [Intended use](#intended-use) - [Performance Factors](#performance-factors) - [Metrics](#metrics) - [Training data](#training-data) - [Evaluation data](#evaluation-data) - [Ethical considerations](#ethical-considerations) - [Caveats and recommendations](#caveats-and-recommendations) ## Model details - Person or organization developing model: Originally trained by [DANBER](https://gitlab.com/DANBER) and released under the [Jaco-Assistant](https://gitlab.com/Jaco-Assistant) project. - Model date: Accessed from [Gitlab](https://gitlab.com/Jaco-Assistant/Scribosermo) on March 31, 2021 - Model type: `Speech-to-Text` - Model version: `v0.0.1` - Compatible with 🐸 STT version: `v0.9.3` - Code: [scribosermo](https://gitlab.com/Jaco-Assistant/Scribosermo/-/tree/master/#old-experiments) - License: GNU Lesser General Public License - Citation details: `@misc{polish-jaco, author = {DANBER}, title = {Polish Jaco-Assistant}, publisher = {Jaco-Assistant}, journal = {Gitlab}, howpublished = {\url{https://gitlab.com/Jaco-Assistant/Scribosermo}}, commit = {dfc541d2} }` - Where to send questions or comments about the model: You can leave an issue on [`STT-model` issues](https://github.com/coqui-ai/STT-models/issues), open a new discussion on [`STT-model` discussions](https://github.com/coqui-ai/STT-models/discussions), or chat with us on [Gitter](https://gitter.im/coqui-ai/). ## Intended use Speech-to-Text for the [Polish Language](https://en.wikipedia.org/wiki/Polish_language) on 16kHz, mono-channel audio. ## Performance Factors Factors relevant to Speech-to-Text performance include but are not limited to speaker demographics, recording quality, and background noise. Read more about STT performance factors [here](https://stt.readthedocs.io/en/latest/DEPLOYMENT.html#how-will-a-model-perform-on-my-data). ## Metrics STT models are usually evaluated in terms of their transcription accuracy, deployment Real-Time Factor, and model size on disk. #### Transcription Accuracy The following Word Error Rates and Character Error Rates are reported on [Jaco-Assistant](https://gitlab.com/Jaco-Assistant/Scribosermo/-/tree/master#old-experiments). |Test Corpus|WER|CER| |-----------|---|---| |Common Voice|3.4\%|2.0\%| #### Real-Time Factor Real-Time Factor (RTF) is defined as `processing-time / length-of-audio`. The exact real-time factor of an STT model will depend on the hardware setup, so you may experience a different RTF. Recorded average RTF on laptop CPU: `` #### Model Size `model.pbmm`: 181M `model.tflite`: 46M ### Approaches to uncertainty and variability Confidence scores and multiple paths from the decoding beam can be used to measure model uncertainty and provide multiple, variable transcripts for any processed audio. ## Training data This model was trained on the following corpora: Common Voice + LinguaLibre + Mailabs. Read more about training [here](https://gitlab.com/Jaco-Assistant/Scribosermo/-/tree/master#old-experiments). ## Evaluation data The Model was evaluated on the Common Voice corpus. Read more about evaluation [here](https://gitlab.com/Jaco-Assistant/Scribosermo/-/tree/master#old-experiments). ## Ethical considerations Deploying a Speech-to-Text model into any production setting has ethical implications. You should consider these implications before use. ### Demographic Bias You should assume every machine learning model has demographic bias unless proven otherwise. For STT models, it is often the case that transcription accuracy is better for men than it is for women. If you are using this model in production, you should acknowledge this as a potential issue. ### Surveillance Speech-to-Text may be mis-used to invade the privacy of others by recording and mining information from private conversations. This kind of individual privacy is protected by law in may countries. You should not assume consent to record and analyze private speech. ## Caveats and recommendations Machine learning models (like this STT model) perform best on data that is similar to the data on which they were trained. Read about what to expect from an STT model with regard to your data [here](https://stt.readthedocs.io/en/latest/DEPLOYMENT.html#how-will-a-model-perform-on-my-data). In most applications, it is recommended that you [train your own language model](https://stt.readthedocs.io/en/latest/LANGUAGE_MODEL.html) to improve transcription accuracy on your speech data.
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions/mpdt/Makefile.am
AM_CPPFLAGS = -I$(srcdir)/../../include $(ICU_CPPFLAGS) if HAVE_BIN bin_PROGRAMS = mpdtcompose mpdtexpand mpdtinfo mpdtreverse LDADD = libfstmpdtscript.la \ ../pdt/libfstpdtscript.la \ ../../script/libfstscript.la \ ../../lib/libfst.la -lm $(DL_LIBS) mpdtcompose_SOURCES = mpdtcompose.cc mpdtexpand_SOURCES = mpdtexpand.cc mpdtinfo_SOURCES = mpdtinfo.cc mpdtreverse_SOURCES = mpdtreverse.cc endif if HAVE_SCRIPT lib_LTLIBRARIES = libfstmpdtscript.la libfstmpdtscript_la_SOURCES = mpdtscript.cc libfstmpdtscript_la_LDFLAGS = -version-info 13:0:0 libfstmpdtscript_la_LIBADD = ../../script/libfstscript.la \ ../../lib/libfst.la -lm $(DL_LIBS) endif
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/test-python_38_tflite_16k-linux-amd64-opt.yml
build: template_file: test-linux-opt-base.tyml dependencies: - "linux-amd64-tflite-opt" - "test-training_16k-linux-amd64-py36m-opt" test_model_task: "test-training_16k-linux-amd64-py36m-opt" args: tests_cmdline: "${system.homedir.linux}/DeepSpeech/ds/taskcluster/tc-python_tflite-tests.sh 3.8.1: 16k" workerType: "${docker.dsTests}" metadata: name: "DeepSpeech Linux AMD64 TFLite Python v3.8 tests (16kHz)" description: "Testing DeepSpeech for Linux/AMD64 on Python v3.8 TFLite, optimized version (16kHz)"
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions/const/const16-fst.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/fst.h> #include <fst/const-fst.h> namespace fst { static FstRegisterer<ConstFst<StdArc, uint16>> ConstFst_StdArc_uint16_registerer; static FstRegisterer<ConstFst<LogArc, uint16>> ConstFst_LogArc_uint16_registerer; static FstRegisterer<ConstFst<Log64Arc, uint16>> ConstFst_Log64Arc_uint16_registerer; } // namespace fst
0
coqui_public_repos/STT/native_client/ctcdecode/third_party
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/Makefile.in
# Makefile.in generated by automake 1.15.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2017 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = . ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/ac_python_devel.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \ $(am__configure_deps) $(am__DIST_COMMON) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno config.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = config.h $(top_builddir)/src/include/fst/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ cscope distdir dist dist-all distcheck am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) \ $(LISP)config.h.in # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags CSCOPE = cscope DIST_SUBDIRS = $(SUBDIRS) am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/config.h.in \ $(top_srcdir)/src/include/fst/config.h.in AUTHORS COPYING \ INSTALL NEWS README ar-lib compile config.guess config.sub \ install-sh ltmain.sh missing DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ if test -d "$(distdir)"; then \ find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -rf "$(distdir)" \ || { sleep 5 && rm -rf "$(distdir)"; }; \ else :; fi am__post_remove_distdir = $(am__remove_distdir) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best DIST_TARGETS = dist-gzip distuninstallcheck_listfiles = find . -type f -print am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' distcleancheck_listfiles = find . -type f -print ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DL_LIBS = @DL_LIBS@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PYTHON = @PYTHON@ PYTHON_CPPFLAGS = @PYTHON_CPPFLAGS@ PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@ PYTHON_EXTRA_LDFLAGS = @PYTHON_EXTRA_LDFLAGS@ PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@ PYTHON_LDFLAGS = @PYTHON_LDFLAGS@ PYTHON_PLATFORM = @PYTHON_PLATFORM@ PYTHON_PREFIX = @PYTHON_PREFIX@ PYTHON_SITE_PKG = @PYTHON_SITE_PKG@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libfstdir = @libfstdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgpyexecdir = @pkgpyexecdir@ pkgpythondir = @pkgpythondir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ pyexecdir = @pyexecdir@ pythondir = @pythondir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ SUBDIRS = src ACLOCAL_AMFLAGS = -I m4 all: config.h $(MAKE) $(AM_MAKEFLAGS) all-recursive .SUFFIXES: am--refresh: Makefile @: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: $(am__configure_deps) $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): $(am__aclocal_m4_deps) $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) $(am__aclocal_m4_deps): config.h: stamp-h1 @test -f $@ || rm -f stamp-h1 @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) stamp-h1 stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status @rm -f stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config.h $(srcdir)/config.h.in: $(am__configure_deps) ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) rm -f stamp-h1 touch $@ src/include/fst/config.h: src/include/fst/stamp-h2 @test -f $@ || rm -f src/include/fst/stamp-h2 @test -f $@ || $(MAKE) $(AM_MAKEFLAGS) src/include/fst/stamp-h2 src/include/fst/stamp-h2: $(top_srcdir)/src/include/fst/config.h.in $(top_builddir)/config.status @rm -f src/include/fst/stamp-h2 cd $(top_builddir) && $(SHELL) ./config.status src/include/fst/config.h distclean-hdr: -rm -f config.h stamp-h1 src/include/fst/config.h src/include/fst/stamp-h2 mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs distclean-libtool: -rm -f libtool config.lt # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscope: cscope.files test ! -s cscope.files \ || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS) clean-cscope: -rm -f cscope.files cscope.files: clean-cscope cscopelist cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags -rm -f cscope.out cscope.in.out cscope.po.out cscope.files distdir: $(DISTFILES) $(am__remove_distdir) test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done -test -n "$(am__skip_mode_fix)" \ || find "$(distdir)" -type d ! -perm -755 \ -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).tar.gz $(am__post_remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 $(am__post_remove_distdir) dist-lzip: distdir tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz $(am__post_remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz $(am__post_remove_distdir) dist-tarZ: distdir @echo WARNING: "Support for distribution archives compressed with" \ "legacy program 'compress' is deprecated." >&2 @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__post_remove_distdir) dist-shar: distdir @echo WARNING: "Support for shar distribution archives is" \ "deprecated." >&2 @echo WARNING: "It will be removed altogether in Automake 2.0" >&2 shar $(distdir) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).shar.gz $(am__post_remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__post_remove_distdir) dist dist-all: $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' $(am__post_remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.lz*) \ lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ *.tar.xz*) \ xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir) chmod u+w $(distdir) mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build/sub \ && ../../configure \ $(AM_DISTCHECK_CONFIGURE_FLAGS) \ $(DISTCHECK_CONFIGURE_FLAGS) \ --srcdir=../.. --prefix="$$dc_install_base" \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 $(am__post_remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distuninstallcheck: @test -n '$(distuninstallcheck_dir)' || { \ echo 'ERROR: trying to run $@ with an empty' \ '$$(distuninstallcheck_dir)' >&2; \ exit 1; \ }; \ $(am__cd) '$(distuninstallcheck_dir)' || { \ echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \ exit 1; \ }; \ test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile config.h installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-hdr \ distclean-libtool distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) all install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \ am--refresh check check-am clean clean-cscope clean-generic \ clean-libtool cscope cscopelist-am ctags ctags-am dist \ dist-all dist-bzip2 dist-gzip dist-lzip dist-shar dist-tarZ \ dist-xz dist-zip distcheck distclean distclean-generic \ distclean-hdr distclean-libtool distclean-tags distcleancheck \ distdir distuninstallcheck dvi dvi-am html html-am info \ info-am install install-am install-data install-data-am \ install-dvi install-dvi-am install-exec install-exec-am \ install-html install-html-am install-info install-info-am \ install-man install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ uninstall-am .PRECIOUS: Makefile # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT:
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/extensions/pdt/replace.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Recursively replaces FST arcs with other FSTs, returning a PDT. #ifndef FST_EXTENSIONS_PDT_REPLACE_H_ #define FST_EXTENSIONS_PDT_REPLACE_H_ #include <map> #include <memory> #include <set> #include <unordered_map> #include <utility> #include <vector> #include <fst/replace.h> #include <fst/replace-util.h> #include <fst/symbol-table-ops.h> namespace fst { namespace internal { // Hash to paren IDs template <typename S> struct ReplaceParenHash { size_t operator()(const std::pair<size_t, S> &paren) const { static constexpr auto prime = 7853; return paren.first + paren.second * prime; } }; } // namespace internal // Parser types characterize the PDT construction method. When applied to a CFG, // each non-terminal is encoded as a DFA that accepts precisely the RHS's of // productions of that non-terminal. For parsing (rather than just recognition), // production numbers can used as outputs (placed as early as possible) in the // DFAs promoted to DFTs. For more information on the strongly regular // construction, see: // // Mohri, M., and Pereira, F. 1998. Dynamic compilation of weighted context-free // grammars. In Proc. ACL, pages 891-897. enum PdtParserType { // Top-down construction. Applied to a simple LL(1) grammar (among others), // gives a DPDA. If promoted to a DPDT, with outputs being production // numbers, gives a leftmost derivation. Left recursive grammars are // problematic in use. PDT_LEFT_PARSER, // Top-down construction. Similar to PDT_LEFT_PARSE except bounded-stack // (expandable as an FST) result with regular or, more generally, strongly // regular grammars. Epsilons may replace some parentheses, which may // introduce some non-determinism. PDT_LEFT_SR_PARSER, /* TODO(riley): // Bottom-up construction. Applied to a LR(0) grammar, gives a DPDA. // If promoted to a DPDT, with outputs being the production nubmers, // gives the reverse of a rightmost derivation. PDT_RIGHT_PARSER, */ }; template <class Arc> struct PdtReplaceOptions { using Label = typename Arc::Label; explicit PdtReplaceOptions(Label root, PdtParserType type = PDT_LEFT_PARSER, Label start_paren_labels = kNoLabel, string left_paren_prefix = "(_", string right_paren_prefix = ")_") : root(root), type(type), start_paren_labels(start_paren_labels), left_paren_prefix(std::move(left_paren_prefix)), right_paren_prefix(std::move(right_paren_prefix)) {} Label root; PdtParserType type; Label start_paren_labels; const string left_paren_prefix; const string right_paren_prefix; }; // PdtParser: Base PDT parser class common to specific parsers. template <class Arc> class PdtParser { public: using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using LabelFstPair = std::pair<Label, const Fst<Arc> *>; using LabelPair = std::pair<Label, Label>; using LabelStatePair = std::pair<Label, StateId>; using StateWeightPair = std::pair<StateId, Weight>; using ParenKey = std::pair<size_t, StateId>; using ParenMap = std::unordered_map<ParenKey, size_t, internal::ReplaceParenHash<StateId>>; PdtParser(const std::vector<LabelFstPair> &fst_array, const PdtReplaceOptions<Arc> &opts) : root_(opts.root), start_paren_labels_(opts.start_paren_labels), left_paren_prefix_(std::move(opts.left_paren_prefix)), right_paren_prefix_(std::move(opts.right_paren_prefix)), error_(false) { for (size_t i = 0; i < fst_array.size(); ++i) { if (!CompatSymbols(fst_array[0].second->InputSymbols(), fst_array[i].second->InputSymbols())) { FSTERROR() << "PdtParser: Input symbol table of input FST " << i << " does not match input symbol table of 0th input FST"; error_ = true; } if (!CompatSymbols(fst_array[0].second->OutputSymbols(), fst_array[i].second->OutputSymbols())) { FSTERROR() << "PdtParser: Output symbol table of input FST " << i << " does not match input symbol table of 0th input FST"; error_ = true; } fst_array_.emplace_back(fst_array[i].first, fst_array[i].second->Copy()); // Builds map from non-terminal label to FST ID. label2id_[fst_array[i].first] = i; } } virtual ~PdtParser() { for (auto &pair : fst_array_) delete pair.second; } // Constructs the output PDT, dependent on the derived parser type. virtual void GetParser(MutableFst<Arc> *ofst, std::vector<LabelPair> *parens) = 0; protected: const std::vector<LabelFstPair> &FstArray() const { return fst_array_; } Label Root() const { return root_; } // Maps from non-terminal label to corresponding FST ID, or returns // kNoStateId to signal lookup failure. StateId Label2Id(Label l) const { auto it = label2id_.find(l); return it == label2id_.end() ? kNoStateId : it->second; } // Maps from output state to input FST label, state pair, or returns a // (kNoLabel, kNoStateId) pair to signal lookup failure. LabelStatePair GetLabelStatePair(StateId os) const { if (os >= label_state_pairs_.size()) { static const LabelStatePair no_pair(kNoLabel, kNoLabel); return no_pair; } else { return label_state_pairs_[os]; } } // Maps to output state from input FST (label, state) pair, or returns // kNoStateId to signal lookup failure. StateId GetState(const LabelStatePair &lsp) const { auto it = state_map_.find(lsp); if (it == state_map_.end()) { return kNoStateId; } else { return it->second; } } // Builds single FST combining all referenced input FSTs, leaving in the // non-termnals for now; also tabulates the PDT states that correspond to the // start and final states of the input FSTs. void CreateFst(MutableFst<Arc> *ofst, std::vector<StateId> *open_dest, std::vector<std::vector<StateWeightPair>> *close_src); // Assigns parenthesis labels from total allocated paren IDs. void AssignParenLabels(size_t total_nparens, std::vector<LabelPair> *parens) { parens->clear(); for (size_t paren_id = 0; paren_id < total_nparens; ++paren_id) { const auto open_paren = start_paren_labels_ + paren_id; const auto close_paren = open_paren + total_nparens; parens->emplace_back(open_paren, close_paren); } } // Determines how non-terminal instances are assigned parentheses IDs. virtual size_t AssignParenIds(const Fst<Arc> &ofst, ParenMap *paren_map) const = 0; // Changes a non-terminal transition to an open parenthesis transition // redirected to the PDT state specified in the open_dest argument, when // indexed by the input FST ID for the non-terminal. Adds close parenthesis // transitions (with specified weights) from the PDT states specified in the // close_src argument, when indexed by the input FST ID for the non-terminal, // to the former destination state of the non-terminal transition. The // paren_map argument gives the parenthesis ID for a given non-terminal FST ID // and destination state pair. The close_non_term_weight vector specifies // non-terminals for which the non-terminal arc weight should be applied on // the close parenthesis (multiplying the close_src weight above) rather than // on the open parenthesis. If no paren ID is found, then an epsilon replaces // the parenthesis that would carry the non-terminal arc weight and the other // parenthesis is omitted (appropriate for the strongly-regular case). void AddParensToFst( const std::vector<LabelPair> &parens, const ParenMap &paren_map, const std::vector<StateId> &open_dest, const std::vector<std::vector<StateWeightPair>> &close_src, const std::vector<bool> &close_non_term_weight, MutableFst<Arc> *ofst); // Ensures that parentheses arcs are added to the symbol table. void AddParensToSymbolTables(const std::vector<LabelPair> &parens, MutableFst<Arc> *ofst); private: std::vector<LabelFstPair> fst_array_; Label root_; // Index to use for the first parenthesis. Label start_paren_labels_; const string left_paren_prefix_; const string right_paren_prefix_; // Maps from non-terminal label to FST ID. std::unordered_map<Label, StateId> label2id_; // Given an output state, specifies the input FST (label, state) pair. std::vector<LabelStatePair> label_state_pairs_; // Given an FST (label, state) pair, specifies the output FST state ID. std::map<LabelStatePair, StateId> state_map_; bool error_; }; template <class Arc> void PdtParser<Arc>::CreateFst( MutableFst<Arc> *ofst, std::vector<StateId> *open_dest, std::vector<std::vector<StateWeightPair>> *close_src) { ofst->DeleteStates(); if (error_) { ofst->SetProperties(kError, kError); return; } open_dest->resize(fst_array_.size(), kNoStateId); close_src->resize(fst_array_.size()); // Queue of non-terminals to replace. std::deque<Label> non_term_queue; non_term_queue.push_back(root_); // Has a non-terminal been enqueued? std::vector<bool> enqueued(fst_array_.size(), false); enqueued[label2id_[root_]] = true; Label max_label = kNoLabel; for (StateId soff = 0; !non_term_queue.empty(); soff = ofst->NumStates()) { const auto label = non_term_queue.front(); non_term_queue.pop_front(); StateId fst_id = Label2Id(label); const auto *ifst = fst_array_[fst_id].second; for (StateIterator<Fst<Arc>> siter(*ifst); !siter.Done(); siter.Next()) { const auto is = siter.Value(); const auto os = ofst->AddState(); const LabelStatePair lsp(label, is); label_state_pairs_.push_back(lsp); state_map_[lsp] = os; if (is == ifst->Start()) { (*open_dest)[fst_id] = os; if (label == root_) ofst->SetStart(os); } if (ifst->Final(is) != Weight::Zero()) { if (label == root_) ofst->SetFinal(os, ifst->Final(is)); (*close_src)[fst_id].emplace_back(os, ifst->Final(is)); } for (ArcIterator<Fst<Arc>> aiter(*ifst, is); !aiter.Done(); aiter.Next()) { auto arc = aiter.Value(); arc.nextstate += soff; if (max_label == kNoLabel || arc.olabel > max_label) max_label = arc.olabel; const auto nfst_id = Label2Id(arc.olabel); if (nfst_id != kNoStateId) { if (fst_array_[nfst_id].second->Start() == kNoStateId) continue; if (!enqueued[nfst_id]) { non_term_queue.push_back(arc.olabel); enqueued[nfst_id] = true; } } ofst->AddArc(os, arc); } } } if (start_paren_labels_ == kNoLabel) start_paren_labels_ = max_label + 1; } template <class Arc> void PdtParser<Arc>::AddParensToFst( const std::vector<LabelPair> &parens, const ParenMap &paren_map, const std::vector<StateId> &open_dest, const std::vector<std::vector<StateWeightPair>> &close_src, const std::vector<bool> &close_non_term_weight, MutableFst<Arc> *ofst) { StateId dead_state = kNoStateId; using MIter = MutableArcIterator<MutableFst<Arc>>; for (StateIterator<Fst<Arc>> siter(*ofst); !siter.Done(); siter.Next()) { StateId os = siter.Value(); std::unique_ptr<MIter> aiter(new MIter(ofst, os)); for (auto n = 0; !aiter->Done(); aiter->Next(), ++n) { const auto arc = aiter->Value(); // A reference here may go stale. StateId nfst_id = Label2Id(arc.olabel); if (nfst_id != kNoStateId) { // Gets parentheses. const ParenKey paren_key(nfst_id, arc.nextstate); auto it = paren_map.find(paren_key); Label open_paren = 0; Label close_paren = 0; if (it != paren_map.end()) { const auto paren_id = it->second; open_paren = parens[paren_id].first; close_paren = parens[paren_id].second; } // Sets open parenthesis. if (open_paren != 0 || !close_non_term_weight[nfst_id]) { const auto open_weight = close_non_term_weight[nfst_id] ? Weight::One() : arc.weight; const Arc sarc(open_paren, open_paren, open_weight, open_dest[nfst_id]); aiter->SetValue(sarc); } else { if (dead_state == kNoStateId) { dead_state = ofst->AddState(); } const Arc sarc(0, 0, Weight::One(), dead_state); aiter->SetValue(sarc); } // Adds close parentheses. if (close_paren != 0 || close_non_term_weight[nfst_id]) { for (size_t i = 0; i < close_src[nfst_id].size(); ++i) { const auto &pair = close_src[nfst_id][i]; const auto close_weight = close_non_term_weight[nfst_id] ? Times(arc.weight, pair.second) : pair.second; const Arc farc(close_paren, close_paren, close_weight, arc.nextstate); ofst->AddArc(pair.first, farc); if (os == pair.first) { // Invalidated iterator. aiter.reset(new MIter(ofst, os)); aiter->Seek(n); } } } } } } } template <class Arc> void PdtParser<Arc>::AddParensToSymbolTables( const std::vector<LabelPair> &parens, MutableFst<Arc> *ofst) { auto size = parens.size(); if (ofst->InputSymbols()) { if (!AddAuxiliarySymbols(left_paren_prefix_, start_paren_labels_, size, ofst->MutableInputSymbols())) { ofst->SetProperties(kError, kError); return; } if (!AddAuxiliarySymbols(right_paren_prefix_, start_paren_labels_ + size, size, ofst->MutableInputSymbols())) { ofst->SetProperties(kError, kError); return; } } if (ofst->OutputSymbols()) { if (!AddAuxiliarySymbols(left_paren_prefix_, start_paren_labels_, size, ofst->MutableOutputSymbols())) { ofst->SetProperties(kError, kError); return; } if (!AddAuxiliarySymbols(right_paren_prefix_, start_paren_labels_ + size, size, ofst->MutableOutputSymbols())) { ofst->SetProperties(kError, kError); return; } } } // Builds a PDT by recursive replacement top-down, where the call and return are // encoded in the parentheses. template <class Arc> class PdtLeftParser final : public PdtParser<Arc> { public: using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using LabelFstPair = typename PdtParser<Arc>::LabelFstPair; using LabelPair = typename PdtParser<Arc>::LabelPair; using LabelStatePair = typename PdtParser<Arc>::LabelStatePair; using StateWeightPair = typename PdtParser<Arc>::StateWeightPair; using ParenKey = typename PdtParser<Arc>::ParenKey; using ParenMap = typename PdtParser<Arc>::ParenMap; using PdtParser<Arc>::AddParensToFst; using PdtParser<Arc>::AddParensToSymbolTables; using PdtParser<Arc>::AssignParenLabels; using PdtParser<Arc>::CreateFst; using PdtParser<Arc>::FstArray; using PdtParser<Arc>::GetLabelStatePair; using PdtParser<Arc>::GetState; using PdtParser<Arc>::Label2Id; using PdtParser<Arc>::Root; PdtLeftParser(const std::vector<LabelFstPair> &fst_array, const PdtReplaceOptions<Arc> &opts) : PdtParser<Arc>(fst_array, opts) { } void GetParser(MutableFst<Arc> *ofst, std::vector<LabelPair> *parens) override; protected: // Assigns a unique parenthesis ID for each non-terminal, destination // state pair. size_t AssignParenIds(const Fst<Arc> &ofst, ParenMap *paren_map) const override; }; template <class Arc> void PdtLeftParser<Arc>::GetParser( MutableFst<Arc> *ofst, std::vector<LabelPair> *parens) { ofst->DeleteStates(); parens->clear(); const auto &fst_array = FstArray(); // Map that gives the paren ID for a (non-terminal, dest. state) pair // (which can be unique). ParenMap paren_map; // Specifies the open parenthesis destination state for a given non-terminal. // The source is the non-terminal instance source state. std::vector<StateId> open_dest(fst_array.size(), kNoStateId); // Specifies close parenthesis source states and weights for a given // non-terminal. The destination is the non-terminal instance destination // state. std::vector<std::vector<StateWeightPair>> close_src(fst_array.size()); // Specifies non-terminals for which the non-terminal arc weight // should be applied on the close parenthesis (multiplying the // 'close_src' weight above) rather than on the open parenthesis. std::vector<bool> close_non_term_weight(fst_array.size(), false); CreateFst(ofst, &open_dest, &close_src); auto total_nparens = AssignParenIds(*ofst, &paren_map); AssignParenLabels(total_nparens, parens); AddParensToFst(*parens, paren_map, open_dest, close_src, close_non_term_weight, ofst); if (!fst_array.empty()) { ofst->SetInputSymbols(fst_array[0].second->InputSymbols()); ofst->SetOutputSymbols(fst_array[0].second->OutputSymbols()); } AddParensToSymbolTables(*parens, ofst); } template <class Arc> size_t PdtLeftParser<Arc>::AssignParenIds( const Fst<Arc> &ofst, ParenMap *paren_map) const { // Number of distinct parenthesis pairs per FST. std::vector<size_t> nparens(FstArray().size(), 0); // Number of distinct parenthesis pairs overall. size_t total_nparens = 0; for (StateIterator<Fst<Arc>> siter(ofst); !siter.Done(); siter.Next()) { const auto os = siter.Value(); for (ArcIterator<Fst<Arc>> aiter(ofst, os); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); const auto nfst_id = Label2Id(arc.olabel); if (nfst_id != kNoStateId) { const ParenKey paren_key(nfst_id, arc.nextstate); auto it = paren_map->find(paren_key); if (it == paren_map->end()) { // Assigns new paren ID for this (FST, dest state) pair. (*paren_map)[paren_key] = nparens[nfst_id]++; if (nparens[nfst_id] > total_nparens) total_nparens = nparens[nfst_id]; } } } } return total_nparens; } // Similar to PdtLeftParser but: // // 1. Uses epsilons rather than parentheses labels for any non-terminal // instances within a left- (right-) linear dependency SCC, // 2. Allocates a paren ID uniquely for each such dependency SCC (rather than // non-terminal = dependency state) and destination state. template <class Arc> class PdtLeftSRParser final : public PdtParser<Arc> { public: using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using LabelFstPair = typename PdtParser<Arc>::LabelFstPair; using LabelPair = typename PdtParser<Arc>::LabelPair; using LabelStatePair = typename PdtParser<Arc>::LabelStatePair; using StateWeightPair = typename PdtParser<Arc>::StateWeightPair; using ParenKey = typename PdtParser<Arc>::ParenKey; using ParenMap = typename PdtParser<Arc>::ParenMap; using PdtParser<Arc>::AddParensToFst; using PdtParser<Arc>::AddParensToSymbolTables; using PdtParser<Arc>::AssignParenLabels; using PdtParser<Arc>::CreateFst; using PdtParser<Arc>::FstArray; using PdtParser<Arc>::GetLabelStatePair; using PdtParser<Arc>::GetState; using PdtParser<Arc>::Label2Id; using PdtParser<Arc>::Root; PdtLeftSRParser(const std::vector<LabelFstPair> &fst_array, const PdtReplaceOptions<Arc> &opts) : PdtParser<Arc>(fst_array, opts), replace_util_(fst_array, ReplaceUtilOptions(opts.root)) { } void GetParser(MutableFst<Arc> *ofst, std::vector<LabelPair> *parens) override; protected: // Assigns a unique parenthesis ID for each non-terminal, destination state // pair when the non-terminal refers to a non-linear FST. Otherwise, assigns // a unique parenthesis ID for each dependency SCC, destination state pair if // the non-terminal instance is between // SCCs. Otherwise does nothing. size_t AssignParenIds(const Fst<Arc> &ofst, ParenMap *paren_map) const override; // Returns dependency SCC for given label. size_t SCC(Label label) const { return replace_util_.SCC(label); } // Is a given dependency SCC left-linear? bool SCCLeftLinear(size_t scc_id) const { const auto ll_props = kReplaceSCCLeftLinear | kReplaceSCCNonTrivial; const auto scc_props = replace_util_.SCCProperties(scc_id); return (scc_props & ll_props) == ll_props; } // Is a given dependency SCC right-linear? bool SCCRightLinear(size_t scc_id) const { const auto lr_props = kReplaceSCCRightLinear | kReplaceSCCNonTrivial; const auto scc_props = replace_util_.SCCProperties(scc_id); return (scc_props & lr_props) == lr_props; } // Components of left- (right-) linear dependency SCC; empty o.w. const std::vector<size_t> &SCCComps(size_t scc_id) const { if (scc_comps_.empty()) GetSCCComps(); return scc_comps_[scc_id]; } // Returns the representative state of an SCC. For left-linear grammars, it // is one of the initial states. For right-linear grammars, it is one of the // non-terminal destination states; otherwise, it is kNoStateId. StateId RepState(size_t scc_id) const { if (SCCComps(scc_id).empty()) return kNoStateId; const auto fst_id = SCCComps(scc_id).front(); const auto &fst_array = FstArray(); const auto label = fst_array[fst_id].first; const auto *ifst = fst_array[fst_id].second; if (SCCLeftLinear(scc_id)) { const LabelStatePair lsp(label, ifst->Start()); return GetState(lsp); } else { // Right-linear. const LabelStatePair lsp(label, *NonTermDests(fst_id).begin()); return GetState(lsp); } return kNoStateId; } private: // Merges initial (final) states of in a left- (right-) linear dependency SCC // after dealing with the non-terminal arc and final weights. void ProcSCCs(MutableFst<Arc> *ofst, std::vector<StateId> *open_dest, std::vector<std::vector<StateWeightPair>> *close_src, std::vector<bool> *close_non_term_weight) const; // Computes components of left- (right-) linear dependency SCC. void GetSCCComps() const { const std::vector<LabelFstPair> &fst_array = FstArray(); for (size_t i = 0; i < fst_array.size(); ++i) { const auto label = fst_array[i].first; const auto scc_id = SCC(label); if (scc_comps_.size() <= scc_id) scc_comps_.resize(scc_id + 1); if (SCCLeftLinear(scc_id) || SCCRightLinear(scc_id)) { scc_comps_[scc_id].push_back(i); } } } const std::set<StateId> &NonTermDests(StateId fst_id) const { if (non_term_dests_.empty()) GetNonTermDests(); return non_term_dests_[fst_id]; } // Finds non-terminal destination states for right-linear FSTS, or does // nothing if not found. void GetNonTermDests() const; // Dependency SCC info. mutable ReplaceUtil<Arc> replace_util_; // Components of left- (right-) linear dependency SCCs, or empty otherwise. mutable std::vector<std::vector<size_t>> scc_comps_; // States that have non-terminals entering them for each (right-linear) FST. mutable std::vector<std::set<StateId>> non_term_dests_; }; template <class Arc> void PdtLeftSRParser<Arc>::GetParser( MutableFst<Arc> *ofst, std::vector<LabelPair> *parens) { ofst->DeleteStates(); parens->clear(); const auto &fst_array = FstArray(); // Map that gives the paren ID for a (non-terminal, dest. state) pair. ParenMap paren_map; // Specifies the open parenthesis destination state for a given non-terminal. // The source is the non-terminal instance source state. std::vector<StateId> open_dest(fst_array.size(), kNoStateId); // Specifies close parenthesis source states and weights for a given // non-terminal. The destination is the non-terminal instance destination // state. std::vector<std::vector<StateWeightPair>> close_src(fst_array.size()); // Specifies non-terminals for which the non-terminal arc weight should be // applied on the close parenthesis (multiplying the close_src weight above) // rather than on the open parenthesis. std::vector<bool> close_non_term_weight(fst_array.size(), false); CreateFst(ofst, &open_dest, &close_src); ProcSCCs(ofst, &open_dest, &close_src, &close_non_term_weight); const auto total_nparens = AssignParenIds(*ofst, &paren_map); AssignParenLabels(total_nparens, parens); AddParensToFst(*parens, paren_map, open_dest, close_src, close_non_term_weight, ofst); if (!fst_array.empty()) { ofst->SetInputSymbols(fst_array[0].second->InputSymbols()); ofst->SetOutputSymbols(fst_array[0].second->OutputSymbols()); } AddParensToSymbolTables(*parens, ofst); Connect(ofst); } template <class Arc> void PdtLeftSRParser<Arc>::ProcSCCs( MutableFst<Arc> *ofst, std::vector<StateId> *open_dest, std::vector<std::vector<StateWeightPair>> *close_src, std::vector<bool> *close_non_term_weight) const { const auto &fst_array = FstArray(); for (StateIterator<Fst<Arc>> siter(*ofst); !siter.Done(); siter.Next()) { const auto os = siter.Value(); const auto label = GetLabelStatePair(os).first; const auto is = GetLabelStatePair(os).second; const auto fst_id = Label2Id(label); const auto scc_id = SCC(label); const auto rs = RepState(scc_id); const auto *ifst = fst_array[fst_id].second; // SCC LEFT-LINEAR: puts non-terminal weights on close parentheses. Merges // initial states into SCC representative state and updates open_dest. if (SCCLeftLinear(scc_id)) { (*close_non_term_weight)[fst_id] = true; if (is == ifst->Start() && os != rs) { for (ArcIterator<Fst<Arc>> aiter(*ofst, os); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); ofst->AddArc(rs, arc); } ofst->DeleteArcs(os); if (os == ofst->Start()) ofst->SetStart(rs); (*open_dest)[fst_id] = rs; } } // SCC RIGHT-LINEAR: pushes back final weights onto non-terminals, if // possible, or adds weighted epsilons to the SCC representative state. // Merges final states into SCC representative state and updates close_src. if (SCCRightLinear(scc_id)) { for (MutableArcIterator<MutableFst<Arc>> aiter(ofst, os); !aiter.Done(); aiter.Next()) { auto arc = aiter.Value(); const auto idest = GetLabelStatePair(arc.nextstate).second; if (NonTermDests(fst_id).count(idest) > 0) { if (ofst->Final(arc.nextstate) != Weight::Zero()) { ofst->SetFinal(arc.nextstate, Weight::Zero()); ofst->SetFinal(rs, Weight::One()); } arc.weight = Times(arc.weight, ifst->Final(idest)); arc.nextstate = rs; aiter.SetValue(arc); } } const auto final_weight = ifst->Final(is); if (final_weight != Weight::Zero() && NonTermDests(fst_id).count(is) == 0) { ofst->AddArc(os, Arc(0, 0, final_weight, rs)); if (ofst->Final(os) != Weight::Zero()) { ofst->SetFinal(os, Weight::Zero()); ofst->SetFinal(rs, Weight::One()); } } if (is == ifst->Start()) { (*close_src)[fst_id].clear(); (*close_src)[fst_id].emplace_back(rs, Weight::One()); } } } } template <class Arc> void PdtLeftSRParser<Arc>::GetNonTermDests() const { const auto &fst_array = FstArray(); non_term_dests_.resize(fst_array.size()); for (size_t fst_id = 0; fst_id < fst_array.size(); ++fst_id) { const auto label = fst_array[fst_id].first; const auto scc_id = SCC(label); if (SCCRightLinear(scc_id)) { const auto *ifst = fst_array[fst_id].second; for (StateIterator<Fst<Arc>> siter(*ifst); !siter.Done(); siter.Next()) { const auto is = siter.Value(); for (ArcIterator<Fst<Arc>> aiter(*ifst, is); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); if (Label2Id(arc.olabel) != kNoStateId) { non_term_dests_[fst_id].insert(arc.nextstate); } } } } } } template <class Arc> size_t PdtLeftSRParser<Arc>::AssignParenIds( const Fst<Arc> &ofst, ParenMap *paren_map) const { const auto &fst_array = FstArray(); // Number of distinct parenthesis pairs per FST. std::vector<size_t> nparens(fst_array.size(), 0); // Number of distinct parenthesis pairs overall. size_t total_nparens = 0; for (StateIterator<Fst<Arc>> siter(ofst); !siter.Done(); siter.Next()) { const auto os = siter.Value(); const auto label = GetLabelStatePair(os).first; const auto scc_id = SCC(label); for (ArcIterator<Fst<Arc>> aiter(ofst, os); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); const auto nfst_id = Label2Id(arc.olabel); if (nfst_id != kNoStateId) { size_t nscc_id = SCC(arc.olabel); bool nscc_linear = !SCCComps(nscc_id).empty(); // Assigns a parenthesis ID for the non-terminal transition // if the non-terminal belongs to a (left-/right-) linear dependency // SCC or if the transition is in an FST from a different SCC if (!nscc_linear || scc_id != nscc_id) { // For (left-/right-) linear SCCs instead of using nfst_id, we // will use its SCC prototype pfst_id for assigning distinct // parenthesis IDs. const auto pfst_id = nscc_linear ? SCCComps(nscc_id).front() : nfst_id; ParenKey paren_key(pfst_id, arc.nextstate); const auto it = paren_map->find(paren_key); if (it == paren_map->end()) { // Assigns new paren ID for this (FST/SCC, dest. state) pair. if (nscc_linear) { // This is mapping we'll need, but we also store (harmlessly) // for the prototype below so we can easily keep count per SCC. const ParenKey nparen_key(nfst_id, arc.nextstate); (*paren_map)[nparen_key] = nparens[pfst_id]; } (*paren_map)[paren_key] = nparens[pfst_id]++; if (nparens[pfst_id] > total_nparens) { total_nparens = nparens[pfst_id]; } } } } } } return total_nparens; } // Builds a pushdown transducer (PDT) from an RTN specification. The result is // a PDT written to a mutable FST where some transitions are labeled with // open or close parentheses. To be interpreted as a PDT, the parens must // balance on a path (see PdtExpand()). The open/close parenthesis label pairs // are returned in the parens argument. template <class Arc> void Replace( const std::vector<std::pair<typename Arc::Label, const Fst<Arc> *>> &ifst_array, MutableFst<Arc> *ofst, std::vector<std::pair<typename Arc::Label, typename Arc::Label>> *parens, const PdtReplaceOptions<Arc> &opts) { switch (opts.type) { case PDT_LEFT_PARSER: { PdtLeftParser<Arc> pr(ifst_array, opts); pr.GetParser(ofst, parens); return; } case PDT_LEFT_SR_PARSER: { PdtLeftSRParser<Arc> pr(ifst_array, opts); pr.GetParser(ofst, parens); return; } default: FSTERROR() << "Replace: Unknown PDT parser type: " << opts.type; ofst->DeleteStates(); ofst->SetProperties(kError, kError); parens->clear(); return; } } // Variant where the only user-controlled arguments is the root ID. template <class Arc> void Replace( const std::vector<std::pair<typename Arc::Label, const Fst<Arc> *>> &ifst_array, MutableFst<Arc> *ofst, std::vector<std::pair<typename Arc::Label, typename Arc::Label>> *parens, typename Arc::Label root) { PdtReplaceOptions<Arc> opts(root); Replace(ifst_array, ofst, parens, opts); } } // namespace fst #endif // FST_EXTENSIONS_PDT_REPLACE_H_
0
coqui_public_repos/inference-engine/third_party/kenlm/lm
coqui_public_repos/inference-engine/third_party/kenlm/lm/common/special.hh
#ifndef LM_COMMON_SPECIAL_H #define LM_COMMON_SPECIAL_H #include "lm/word_index.hh" namespace lm { class SpecialVocab { public: SpecialVocab(WordIndex bos, WordIndex eos) : bos_(bos), eos_(eos) {} bool IsSpecial(WordIndex word) const { return word == kUNK || word == bos_ || word == eos_; } WordIndex UNK() const { return kUNK; } WordIndex BOS() const { return bos_; } WordIndex EOS() const { return eos_; } private: WordIndex bos_; WordIndex eos_; }; } // namespace lm #endif // LM_COMMON_SPECIAL_H
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/isomorphic.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Function to test two FSTs are isomorphic, i.e., they are equal up to a state // and arc re-ordering. FSTs should be deterministic when viewed as // unweighted automata. #ifndef FST_ISOMORPHIC_H_ #define FST_ISOMORPHIC_H_ #include <algorithm> #include <list> #include <type_traits> #include <vector> #include <fst/log.h> #include <fst/fst.h> namespace fst { namespace internal { // Orders weights for equality checking. template <class Weight, typename std::enable_if< IsIdempotent<Weight>::value>::type * = nullptr> bool WeightCompare(const Weight &w1, const Weight &w2, float delta, bool *error) { return NaturalLess<Weight>()(w1, w2); } template <class Weight, typename std::enable_if< !IsIdempotent<Weight>::value>::type * = nullptr> bool WeightCompare(const Weight &w1, const Weight &w2, float delta, bool *error) { // No natural order; use hash. const auto q1 = w1.Quantize(delta); const auto q2 = w2.Quantize(delta); auto n1 = q1.Hash(); auto n2 = q2.Hash(); // Hash not unique; very unlikely to happen. if (n1 == n2 && q1 != q2) { VLOG(1) << "Isomorphic: Weight hash collision"; *error = true; } return n1 < n2; } template <class Arc> class Isomorphism { using StateId = typename Arc::StateId; public: Isomorphism(const Fst<Arc> &fst1, const Fst<Arc> &fst2, float delta) : fst1_(fst1.Copy()), fst2_(fst2.Copy()), delta_(delta), error_(false), comp_(delta, &error_) {} // Checks if input FSTs are isomorphic. bool IsIsomorphic() { if (fst1_->Start() == kNoStateId && fst2_->Start() == kNoStateId) { return true; } if (fst1_->Start() == kNoStateId || fst2_->Start() == kNoStateId) { return false; } PairState(fst1_->Start(), fst2_->Start()); while (!queue_.empty()) { const auto &pr = queue_.front(); if (!IsIsomorphicState(pr.first, pr.second)) return false; queue_.pop_front(); } return true; } bool Error() const { return error_; } private: // Orders arcs for equality checking. class ArcCompare { public: ArcCompare(float delta, bool *error) : delta_(delta), error_(error) {} bool operator()(const Arc &arc1, const Arc &arc2) const { if (arc1.ilabel < arc2.ilabel) return true; if (arc1.ilabel > arc2.ilabel) return false; if (arc1.olabel < arc2.olabel) return true; if (arc1.olabel > arc2.olabel) return false; return WeightCompare(arc1.weight, arc2.weight, delta_, error_); } private: float delta_; bool *error_; }; // Maintains state correspondences and queue. bool PairState(StateId s1, StateId s2) { if (state_pairs_.size() <= s1) state_pairs_.resize(s1 + 1, kNoStateId); if (state_pairs_[s1] == s2) { return true; // already seen this pair } else if (state_pairs_[s1] != kNoStateId) { return false; // s1 already paired with another s2 } state_pairs_[s1] = s2; queue_.push_back(std::make_pair(s1, s2)); return true; } // Checks if state pair is isomorphic bool IsIsomorphicState(StateId s1, StateId s2); std::unique_ptr<Fst<Arc>> fst1_; std::unique_ptr<Fst<Arc>> fst2_; float delta_; // Weight equality delta. std::vector<Arc> arcs1_; // For sorting arcs on FST1. std::vector<Arc> arcs2_; // For sorting arcs on FST2. std::vector<StateId> state_pairs_; // Maintains state correspondences. std::list<std::pair<StateId, StateId>> queue_; // Queue of state pairs. bool error_; // Error flag. ArcCompare comp_; }; template <class Arc> bool Isomorphism<Arc>::IsIsomorphicState(StateId s1, StateId s2) { if (!ApproxEqual(fst1_->Final(s1), fst2_->Final(s2), delta_)) return false; auto narcs1 = fst1_->NumArcs(s1); auto narcs2 = fst2_->NumArcs(s2); if (narcs1 != narcs2) return false; ArcIterator<Fst<Arc>> aiter1(*fst1_, s1); ArcIterator<Fst<Arc>> aiter2(*fst2_, s2); arcs1_.clear(); arcs1_.reserve(narcs1); arcs2_.clear(); arcs2_.reserve(narcs2); for (; !aiter1.Done(); aiter1.Next(), aiter2.Next()) { arcs1_.push_back(aiter1.Value()); arcs2_.push_back(aiter2.Value()); } std::sort(arcs1_.begin(), arcs1_.end(), comp_); std::sort(arcs2_.begin(), arcs2_.end(), comp_); for (size_t i = 0; i < arcs1_.size(); ++i) { const auto &arc1 = arcs1_[i]; const auto &arc2 = arcs2_[i]; if (arc1.ilabel != arc2.ilabel) return false; if (arc1.olabel != arc2.olabel) return false; if (!ApproxEqual(arc1.weight, arc2.weight, delta_)) return false; if (!PairState(arc1.nextstate, arc2.nextstate)) return false; if (i > 0) { // Checks for non-determinism. const auto &arc0 = arcs1_[i - 1]; if (arc1.ilabel == arc0.ilabel && arc1.olabel == arc0.olabel && ApproxEqual(arc1.weight, arc0.weight, delta_)) { VLOG(1) << "Isomorphic: Non-determinism as an unweighted automaton"; error_ = true; return false; } } } return true; } } // namespace internal // Tests if two FSTs have the same states and arcs up to a reordering. // Inputs should be non-deterministic when viewed as unweighted automata. template <class Arc> bool Isomorphic(const Fst<Arc> &fst1, const Fst<Arc> &fst2, float delta = kDelta) { internal::Isomorphism<Arc> iso(fst1, fst2, delta); bool result = iso.IsIsomorphic(); if (iso.Error()) { FSTERROR() << "Isomorphic: Cannot determine if inputs are isomorphic"; return false; } else { return result; } } } // namespace fst #endif // FST_ISOMORPHIC_H_
0
coqui_public_repos/TTS/recipes/bel-alex73
coqui_public_repos/TTS/recipes/bel-alex73/docker-prepare/runtime.sh
#!/bin/bash cd /a/TTS pip install -e .[all,dev,notebooks] LANG=C.utf8 bash
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/test-python_35_16k-linux-amd64-prod_pbmodel-opt.yml
build: template_file: test-linux-opt-base.tyml dependencies: - "linux-amd64-cpu-opt" args: tests_cmdline: "${system.homedir.linux}/DeepSpeech/ds/taskcluster/tc-python-tests-prod.sh 3.5.8:m 16k" workerType: "${docker.dsTests}" metadata: name: "DeepSpeech Linux AMD64 CPU Python v3.5 prod tests (16kHz)" description: "Testing DeepSpeech for Linux/AMD64 on Python v3.5 on prod model, CPU only, optimized version (16kHz)"
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/test-electronjs_v11.0_multiarchpkg-win-tflite-opt.yml
build: template_file: test-win-opt-base.tyml dependencies: - "node-package-tflite" - "test-training_16k-linux-amd64-py36m-opt" test_model_task: "test-training_16k-linux-amd64-py36m-opt" system_setup: > ${system.sox_win} && ${nodejs.win.prep_12} args: tests_cmdline: "${system.homedir.win}/DeepSpeech/ds/taskcluster/tc-electron_tflite-tests.sh 12.x 11.0.0 16k" metadata: name: "DeepSpeech Windows AMD64 TFLite ElectronJS MultiArch Package v11.0 tests" description: "Testing DeepSpeech for Windows/AMD64 on ElectronJS MultiArch Package v11.0, TFLite only, optimized version"
0
coqui_public_repos/STT-examples/android_mic_streaming
coqui_public_repos/STT-examples/android_mic_streaming/app/proguard-rules.pro
# Add project specific ProGuard rules here. # You can control the set of applied configuration files using the # proguardFiles setting in build.gradle. # # For more details, see # http://developer.android.com/guide/developing/tools/proguard.html # If your project uses WebView with JS, uncomment the following # and specify the fully qualified class name to the JavaScript interface # class: #-keepclassmembers class fqcn.of.javascript.interface.for.webview { # public *; #} # Uncomment this to preserve the line number information for # debugging stack traces. #-keepattributes SourceFile,LineNumberTable # If you keep the line number information, uncomment this to # hide the original source file name. #-renamesourcefileattribute SourceFile
0
coqui_public_repos/STT-models/portuguese/itml
coqui_public_repos/STT-models/portuguese/itml/v0.1.1/MODEL_CARD.md
# Model card for Portuguese STT Jump to section: - [Model details](#model-details) - [Intended use](#intended-use) - [Performance Factors](#performance-factors) - [Metrics](#metrics) - [Training data](#training-data) - [Evaluation data](#evaluation-data) - [Ethical considerations](#ethical-considerations) - [Caveats and recommendations](#caveats-and-recommendations) ## Model details - Person or organization developing model: Originally trained by [Francis Tyers](https://scholar.google.fr/citations?user=o5HSM6cAAAAJ) and the [Inclusive Technology for Marginalised Languages](https://itml.cl.indiana.edu/) group. - Model language: Portuguese / Português / `pt` - Model date: April 26, 2021 - Model type: `Speech-to-Text` - Model version: `v0.1.1` - Compatible with 🐸 STT version: `v0.9.3` - License: AGPL - Citation details: `@techreport{portuguese-stt, author = {Tyers,Francis}, title = {Portuguese STT 0.1}, institution = {Coqui}, address = {\url{https://github.com/coqui-ai/STT-models}} year = {2021}, month = {April}, number = {STT-CV6.1-PT-0.1} }` - Where to send questions or comments about the model: You can leave an issue on [`STT-model` issues](https://github.com/coqui-ai/STT-models/issues), open a new discussion on [`STT-model` discussions](https://github.com/coqui-ai/STT-models/discussions), or chat with us on [Gitter](https://gitter.im/coqui-ai/). ## Intended use Speech-to-Text for the [Portuguese Language](https://en.wikipedia.org/wiki/Portuguese_language) on 16kHz, mono-channel audio. ## Performance Factors Factors relevant to Speech-to-Text performance include but are not limited to speaker demographics, recording quality, and background noise. Read more about STT performance factors [here](https://stt.readthedocs.io/en/latest/DEPLOYMENT.html#how-will-a-model-perform-on-my-data). ## Metrics STT models are usually evaluated in terms of their transcription accuracy, deployment Real-Time Factor, and model size on disk. #### Transcription Accuracy The following Word Error Rates and Character Error Rates are reported on [omnilingo](https://tepozcatl.omnilingo.cc/pt/). |Test Corpus|WER|CER| |-----------|---|---| |Common Voice|73.2\%|26.7\%| #### Real-Time Factor Real-Time Factor (RTF) is defined as `processing-time / length-of-audio`. The exact real-time factor of an STT model will depend on the hardware setup, so you may experience a different RTF. Recorded average RTF on laptop CPU: `` #### Model Size `model.pbmm`: 181M `model.tflite`: 46M ### Approaches to uncertainty and variability Confidence scores and multiple paths from the decoding beam can be used to measure model uncertainty and provide multiple, variable transcripts for any processed audio. ## Training data This model was trained on Common Voice 6.1 train. ## Evaluation data The Model was evaluated on Common Voice 6.1 test. ## Ethical considerations Deploying a Speech-to-Text model into any production setting has ethical implications. You should consider these implications before use. ### Demographic Bias You should assume every machine learning model has demographic bias unless proven otherwise. For STT models, it is often the case that transcription accuracy is better for men than it is for women. If you are using this model in production, you should acknowledge this as a potential issue. ### Surveillance Speech-to-Text may be mis-used to invade the privacy of others by recording and mining information from private conversations. This kind of individual privacy is protected by law in may countries. You should not assume consent to record and analyze private speech. ## Caveats and recommendations Machine learning models (like this STT model) perform best on data that is similar to the data on which they were trained. Read about what to expect from an STT model with regard to your data [here](https://stt.readthedocs.io/en/latest/DEPLOYMENT.html#how-will-a-model-perform-on-my-data). In most applications, it is recommended that you [train your own language model](https://stt.readthedocs.io/en/latest/LANGUAGE_MODEL.html) to improve transcription accuracy on your speech data.
0
coqui_public_repos/STT-examples/django_api_streaming
coqui_public_repos/STT-examples/django_api_streaming/stt_app/apps.py
from django.apps import AppConfig from stt import Model from stt_app.config import config # These constants control the beam search decoder # Beam width used in the CTC decoder when building candidate transcriptions BEAM_WIDTH = 500 # The alpha hyperparameter of the CTC decoder. Language Model weight LM_ALPHA = 0.75 # The beta hyperparameter of the CTC decoder. Word insertion bonus. LM_BETA = 1.85 # These constants are tied to the shape of the graph used (changing them changes # the geometry of the first layer), so make sure you use the same constants that # were used during training # Number of MFCC features to use # N_FEATURES = 26 # Size of the context window used for producing timesteps in the input vector # N_CONTEXT = 9 class STT(AppConfig): name = 'stt_app' conf = config.ConfigSTT() model = conf.get_config('model') scorer = conf.get_config('lm') stt_model = Model(model) stt_model.setBeamWidth(BEAM_WIDTH) if scorer: stt_model.enableExternalScorer(scorer) stt_model.setScorerAlphaBeta(LM_ALPHA, LM_BETA) def ready(self): print("STT Server Initialization")
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/test/rand-fst.h
#ifndef FST_TEST_RAND_FST_H_ #define FST_TEST_RAND_FST_H_ #include <fst/log.h> #include <fst/mutable-fst.h> #include <fst/verify.h> namespace fst { // Generates a random FST. template <class Arc, class WeightGenerator> void RandFst(const int num_random_states, const int num_random_arcs, const int num_random_labels, const float acyclic_prob, WeightGenerator *weight_generator, MutableFst<Arc> *fst) { typedef typename Arc::Label Label; typedef typename Arc::StateId StateId; typedef typename Arc::Weight Weight; // Determines direction of the arcs wrt state numbering. This way we // can force acyclicity when desired. enum ArcDirection { ANY_DIRECTION = 0, FORWARD_DIRECTION = 1, REVERSE_DIRECTION = 2, NUM_DIRECTIONS = 3 }; ArcDirection arc_direction = ANY_DIRECTION; if (rand() / (RAND_MAX + 1.0) < acyclic_prob) arc_direction = rand() % 2 ? FORWARD_DIRECTION : REVERSE_DIRECTION; fst->DeleteStates(); StateId ns = rand() % num_random_states; if (ns == 0) return; for (StateId s = 0; s < ns; ++s) fst->AddState(); StateId start = rand() % ns; fst->SetStart(start); size_t na = rand() % num_random_arcs; for (size_t n = 0; n < na; ++n) { StateId s = rand() % ns; Arc arc; arc.ilabel = rand() % num_random_labels; arc.olabel = rand() % num_random_labels; arc.weight = (*weight_generator)(); arc.nextstate = rand() % ns; if ((arc_direction == FORWARD_DIRECTION || arc_direction == REVERSE_DIRECTION) && s == arc.nextstate) { continue; // skips self-loops } if ((arc_direction == FORWARD_DIRECTION && s > arc.nextstate) || (arc_direction == REVERSE_DIRECTION && s < arc.nextstate)) { StateId t = s; // reverses arcs s = arc.nextstate; arc.nextstate = t; } fst->AddArc(s, arc); } StateId nf = rand() % (ns + 1); for (StateId n = 0; n < nf; ++n) { StateId s = rand() % ns; Weight final = (*weight_generator)(); fst->SetFinal(s, final); } VLOG(1) << "Check FST for sanity (including property bits)."; CHECK(Verify(*fst)); // Get/compute all properties. uint64 props = fst->Properties(kFstProperties, true); // Select random set of properties to be unknown. uint64 mask = 0; for (int n = 0; n < 8; ++n) { mask |= rand() & 0xff; mask <<= 8; } mask &= ~kTrinaryProperties; fst->SetProperties(props & ~mask, mask); } } // namespace fst #endif // FST_TEST_RAND_FST_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/bin/fstprune-main.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Prunes states and arcs of an FST w.r.t. the shortest path weight. #include <cstring> #include <memory> #include <string> #include <fst/flags.h> #include <fst/script/prune.h> DECLARE_double(delta); DECLARE_int64(nstate); DECLARE_string(weight); int fstprune_main(int argc, char **argv) { namespace s = fst::script; using fst::script::MutableFstClass; using fst::script::WeightClass; string usage = "Prunes states and arcs of an FST.\n\n Usage: "; usage += argv[0]; usage += " [in.fst [out.fst]]\n"; std::set_new_handler(FailedNewHandler); SET_FLAGS(usage.c_str(), &argc, &argv, true); if (argc > 3) { ShowUsage(); return 1; } const string in_name = (argc > 1 && strcmp(argv[1], "-") != 0) ? argv[1] : ""; const string out_name = argc > 2 ? argv[2] : ""; std::unique_ptr<MutableFstClass> fst(MutableFstClass::Read(in_name, true)); if (!fst) return 1; const auto weight_threshold = FLAGS_weight.empty() ? WeightClass::Zero(fst->WeightType()) : WeightClass(fst->WeightType(), FLAGS_weight); s::Prune(fst.get(), weight_threshold, FLAGS_nstate, FLAGS_delta); return !fst->Write(out_name); }
0
coqui_public_repos/STT/native_client/kenlm/lm
coqui_public_repos/STT/native_client/kenlm/lm/interpolate/split_worker.hh
#ifndef KENLM_INTERPOLATE_SPLIT_WORKER_H_ #define KENLM_INTERPOLATE_SPLIT_WORKER_H_ #include "../../util/stream/chain.hh" #include "../../util/stream/stream.hh" namespace lm { namespace interpolate { class SplitWorker { public: /** * Constructs a split worker for a particular order. It writes the * split-off backoff values to the backoff chain and the ngram id and * probability to the sort chain for each ngram in the input. */ SplitWorker(std::size_t order, util::stream::Chain &backoff_chain, util::stream::Chain &sort_chain); /** * The callback invoked to handle the input from the ngram intermediate * files. */ void Run(const util::stream::ChainPosition& position); private: /** * The ngram order we are reading/writing for. */ std::size_t order_; /** * The stream to write to for the backoff values. */ util::stream::Stream backoff_input_; /** * The stream to write to for the ngram id + probability values. */ util::stream::Stream sort_input_; }; } } #endif
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions/far/script-impl.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Definitions and functions for invoking and using Far main functions that // support multiple and extensible arc types. #include <fst/extensions/far/script-impl.h> #include <string> #include <fst/extensions/far/far.h> #include <fstream> namespace fst { namespace script { string LoadArcTypeFromFar(const string &far_fname) { FarHeader hdr; if (!hdr.Read(far_fname)) { LOG(ERROR) << "Error reading FAR: " << far_fname; return ""; } string atype = hdr.ArcType(); if (atype == "unknown") { LOG(ERROR) << "Empty FST archive: " << far_fname; return ""; } return atype; } string LoadArcTypeFromFst(const string &fst_fname) { FstHeader hdr; std::ifstream in(fst_fname, std::ios_base::in | std::ios_base::binary); if (!hdr.Read(in, fst_fname)) { LOG(ERROR) << "Error reading FST: " << fst_fname; return ""; } return hdr.ArcType(); } } // namespace script } // namespace fst
0
coqui_public_repos/open-bible-scripts
coqui_public_repos/open-bible-scripts/extra-preprocess/kikuyu.sh
#!/bin/bash echo "I: extra pre-processing for Kikuyu to fix filenames." mv 01_Genesis GEN; rename 's/01_Genesis/GEN/' GEN/*.wav; mv 02_Exodus EXO; rename 's/02_Exodus/EXO/' EXO/*.wav; mv 03_Leviticus LEV; rename 's/03_Leviticus/LEV/' LEV/*.wav; mv 04_Numbers NUM; rename 's/04_Numbers/NUM/' NUM/*.wav; mv 05_Deuteronomy DEU; rename 's/05_Deutronomy/DEU/' DEU/*.wav; mv 06_Joshua JOS; rename 's/06_Joshua/JOS/' JOS/*.wav; mv 07_Judges JDG; rename 's/07_Judges/JDG/' JDG/*.wav; mv 08_Ruth RUT; rename 's/08_Ruth/RUT/' RUT/*.wav; mv 09_1Samuel 1SA; rename 's/09_1Samuel/1SA/' 1SA/*.wav; mv 10_2Samuel 2SA; rename 's/10_2Samuel/2SA/' 2SA/*.wav; mv 11_1Kings 1KI; rename 's/11_1Kings/1KI/' 1KI/*.wav; mv 12_2Kings 2KI; rename 's/12_2Kings/2KI/' 2KI/*.wav; mv 13_1Chronicles 1CH; rename 's/13_1Chronicles/1CH/' 1CH/*.wav; mv 14_2Chronicles 2CH; rename 's/14_2Chronicles/2CH/' 2CH/*.wav; mv 15_Ezra EZR; rename 's/15_Ezra/EZR/' EZR/*.wav; mv 16_Nehemiah NEH; rename 's/16_Nehemiah/NEH/' NEH/*.wav; mv 17_Esther EST; rename 's/17_Esther/EST/' EST/*.wav; mv 18_Job JOB; rename 's/18_Job/JOB/' JOB/*.wav; mv 19_Psalms PSA; rename 's/19_Psalms/PSA/' PSA/*.wav; mv 20_Proverbs PRO; rename 's/20_Proverbs/PRO/' PRO/*.wav; mv 21_Ecclesiastes ECC; rename 's/21_Ecclesiastes/ECC/' ECC/*.wav; mv 22_Song\ of\ Songs SOS; rename 's/22_Song\ of\ Songs/SOS/' SOS/*.wav; mv 23_Isaiah ISA; rename 's/23_Isaiah/ISA/' ISA/*.wav; mv 24_Jeremiah JER; rename 's/24_Jeremiah/JER/' JER/*.wav; mv 25_Lamentations LAM; rename 's/25_Lamentations/LAM/' LAM/*.wav; mv 26_Ezekiel EZK; rename 's/26_Ezekiel/EZK/' EZK/*.wav; mv 27_Daniel DAN; rename 's/27_Daniel/DAN/' DAN/*.wav; mv 28_Hosea HOS; rename 's/28_Hosea/HOS/' HOS/*.wav; mv 29_Joel JOL; rename 's/29_Joel/JOL/' JOL/*.wav; mv 30_Amos AMO; rename 's/30_Amos/AMO/' AMO/*.wav; mv 31_Obadiah OBA; rename 's/31_Obadiah/OBA/' OBA/*.wav; mv 32_Jonah JON; rename 's/32_Jonah/JON/' JON/*.wav; mv 33_Micah MIC; rename 's/33_Micah/MIC/' MIC/*.wav; mv 34_Nahum NAH; rename 's/34_Nahum/NAH/' NAH/*.wav; mv 35_Habakkuk HAB; rename 's/35_Habakkuk/HAB/' HAB/*.wav; mv 36_Zephaniah ZEP; rename 's/36_Zephaniah/ZEP/' ZEP/*.wav; mv 37_Haggai HAG; rename 's/37_Haggai/HAG/' HAG/*.wav; mv 38_Zechariah ZEC; rename 's/38_Zechariah/ZEC/' ZEC/*.wav; mv 39_Malachi MAL; rename 's/39_Malachi/MAL/' MAL/*.wav; mv 40_Matthew MAT; rename 's/40_Matthew/MAT/' MAT/*.wav; mv 41_Mark MRK; rename 's/41_Mark/MRK/' MRK/*.wav; mv 42_Luke LUK; rename 's/42_Luke/LUK/' LUK/*.wav; mv 43_John JHN; rename 's/43_John/JHN/' JHN/*.wav; mv 44_Acts ACT; rename 's/44_Acts/ACT/' ACT/*.wav; mv 45_Romans ROM; rename 's/45_Romans/ROM/' ROM/*.wav; mv 46_1Corinthians 1CO; rename 's/46_1Corinthians/1CO/' 1CO/*.wav; mv 47_2Corinthians 2CO; rename 's/47_2Corinthians/2CO/' 2CO/*.wav; mv 48_Galatians GAL; rename 's/48_Galatians/GAL/' GAL/*.wav; mv 49_Ephesians EPH; rename 's/49_Ephesians/EPH/' EPH/*.wav; mv 50_Philippians PHP; rename 's/50_Philippians/PHP/' PHP/*.wav; mv 51_Colossians COL; rename 's/51_Colossians/COL/' COL/*.wav; mv 52_1Thessalonians 1TH; rename 's/52_1Thessalonians/1TH/' 1TH/*.wav; mv 52_2Thessalonians 2TH; rename 's/53_2Thessalonians/2TH/' 2TH/*.wav; mv 54_1Timothy 1TI; rename 's/54_1Timothy/1TI/' 1TI/*.wav; mv 55_2Timothy 2TI; rename 's/55_2Timothy/2TI/' 2TI/*.wav; mv 56_Titus TIT; rename 's/56_Titus/TIT/' TIT/*.wav; mv 57_Philemon PHM; rename 's/57_Philemon/PHM/' PHM/*.wav; mv 58_Hebrews HEB; rename 's/58_Hebrews/HEB/' HEB/*.wav; mv 59_James JAS; rename 's/59_James/JAS/' JAS/*.wav; mv 60_1Peter 1PE; rename 's/60_1Peter/1PE/' 1PE/*.wav; mv 61_2Peter 2PE; rename 's/61_2Peter/2PE/' 2PE/*.wav; mv 62_1John 1JN; rename 's/62_1John/1JN/' 1JN/*.wav; mv 63_2John 2JN; rename 's/63_2John/2JN/' 2JN/*.wav; mv 64_3John 3JN; rename 's/64_3John/3JN/' 3JN/*.wav; mv 65_Jude JUD; rename 's/65_Jude/JUD/' JUD/*.wav; mv 66_Revelation REV; rename 's/66_Revelation/REV/' REV/*.wav; rename 's/ /_/' */*.wav; rename 's/_V[0-9]//' */*.wav; rename 's/ //' PSA/PSA_121.wav rename 's/_0+/_/' */*.wav rename 's/__/_/' */*.wav
0
coqui_public_repos/TTS/docs
coqui_public_repos/TTS/docs/source/implementing_a_new_language_frontend.md
# Implementing a New Language Frontend - Language frontends are located under `TTS.tts.utils.text` - Each special language has a separate folder. - Each folder contains all the utilities for processing the text input. - `TTS.tts.utils.text.phonemizers` contains the main phonemizer for a language. This is the class that uses the utilities from the previous step and used to convert the text to phonemes or graphemes for the model. - After you implement your phonemizer, you need to add it to the `TTS/tts/utils/text/phonemizers/__init__.py` to be able to map the language code in the model config - `config.phoneme_language` - to the phonemizer class and initiate the phonemizer automatically. - You should also add tests to `tests/text_tests` if you want to make a PR. We suggest you to check the available implementations as reference. Good luck!
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/fst-decl.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // This file contains declarations of classes in the Fst template library. #ifndef FST_FST_DECL_H_ #define FST_FST_DECL_H_ #include <sys/types.h> #include <memory> // for allocator<> #include <fst/types.h> namespace fst { // Symbol table and iterator. class SymbolTable; class SymbolTableIterator; // Weight templates and weights. template <class T> class FloatWeightTpl; template <class T> class TropicalWeightTpl; template <class T> class LogWeightTpl; template <class T> class MinMaxWeightTpl; using FloatWeight = FloatWeightTpl<float>; using TropicalWeight = TropicalWeightTpl<float>; using LogWeight = LogWeightTpl<float>; using MinMaxWeight = MinMaxWeightTpl<float>; // Arc templates and arcs. template <class Weight> struct ArcTpl; using StdArc = ArcTpl<TropicalWeight>; using LogArc = ArcTpl<LogWeight>; // Stores. template <class Element, class U> class DefaultCompactStore; template <class Arc> class DefaultCacheStore; // FST templates. template <class Arc, class Compactor, class U = uint32, class CompactStore = DefaultCompactStore<typename Compactor::Element, U>, class CacheStore = DefaultCacheStore<Arc>> class CompactFst; template <class Arc, class U = uint32> class ConstFst; template <class Arc, class Weight, class Matcher> class EditFst; template <class Arc> class ExpandedFst; template <class Arc> class Fst; template <class Arc> class MutableFst; template <class Arc, class Allocator = std::allocator<Arc>> class VectorState; template <class Arc, class State = VectorState<Arc>> class VectorFst; template <class Arc, class U = ssize_t> class DefaultReplaceStateTable; // On-the-fly operations. template <class Arc, class Compare> class ArcSortFst; template <class Arc> class ClosureFst; template <class Arc, class Store = DefaultCacheStore<Arc>> class ComposeFst; template <class Arc> class ConcatFst; template <class Arc> class DeterminizeFst; template <class Arc> class DifferenceFst; template <class Arc> class IntersectFst; template <class Arc> class InvertFst; template <class AArc, class BArc, class Mapper> class ArcMapFst; template <class Arc> class ProjectFst; template <class AArc, class BArc, class Selector> class RandGenFst; template <class Arc> class RelabelFst; template <class Arc, class StateTable = DefaultReplaceStateTable<Arc>, class Store = DefaultCacheStore<Arc>> class ReplaceFst; template <class Arc> class RmEpsilonFst; template <class Arc> class UnionFst; // Heap. template <class T, class Compare> class Heap; // Compactors. template <class Arc> class AcceptorCompactor; template <class Arc> class StringCompactor; template <class Arc> class UnweightedAcceptorCompactor; template <class Arc> class UnweightedCompactor; template <class Arc> class WeightedStringCompactor; // Compact FSTs. template <class Arc, class U = uint32> using CompactStringFst = CompactFst<Arc, StringCompactor<Arc>, U>; template <class Arc, class U = uint32> using CompactWeightedStringFst = CompactFst<Arc, WeightedStringCompactor<Arc>, U>; template <class Arc, class U = uint32> using CompactAcceptorFst = CompactFst<Arc, AcceptorCompactor<Arc>, U>; template <class Arc, class U = uint32> using CompactUnweightedFst = CompactFst<Arc, UnweightedCompactor<Arc>, U>; template <class Arc, class U = uint32> using CompactUnweightedAcceptorFst = CompactFst<Arc, UnweightedAcceptorCompactor<Arc>, U>; // StdArc aliases for FSTs. using StdConstFst = ConstFst<StdArc>; using StdExpandedFst = ExpandedFst<StdArc>; using StdFst = Fst<StdArc>; using StdMutableFst = MutableFst<StdArc>; using StdVectorFst = VectorFst<StdArc>; // StdArc aliases for on-the-fly operations. template <class Compare> using StdArcSortFst = ArcSortFst<StdArc, Compare>; using StdClosureFst = ClosureFst<StdArc>; using StdComposeFst = ComposeFst<StdArc>; using StdConcatFst = ConcatFst<StdArc>; using StdDeterminizeFst = DeterminizeFst<StdArc>; using StdDifferenceFst = DifferenceFst<StdArc>; using StdIntersectFst = IntersectFst<StdArc>; using StdInvertFst = InvertFst<StdArc>; using StdProjectFst = ProjectFst<StdArc>; using StdRelabelFst = RelabelFst<StdArc>; using StdReplaceFst = ReplaceFst<StdArc>; using StdRmEpsilonFst = RmEpsilonFst<StdArc>; using StdUnionFst = UnionFst<StdArc>; // Filter states. template <class T> class IntegerFilterState; using CharFilterState = IntegerFilterState<signed char>; using ShortFilterState = IntegerFilterState<short>; // NOLINT using IntFilterState = IntegerFilterState<int>; // Matchers and filters. template <class FST> class Matcher; template <class Matcher1, class Matcher2 = Matcher1> class NullComposeFilter; template <class Matcher1, class Matcher2 = Matcher1> class TrivialComposeFilter; template <class Matcher1, class Matcher2 = Matcher1> class SequenceComposeFilter; template <class Matcher1, class Matcher2 = Matcher1> class AltSequenceComposeFilter; template <class Matcher1, class Matcher2 = Matcher1> class MatchComposeFilter; } // namespace fst #endif // FST_FST_DECL_H_
0
coqui_public_repos/inference-engine/third_party/kenlm/lm
coqui_public_repos/inference-engine/third_party/kenlm/lm/common/model_buffer_test.cc
#include "lm/common/model_buffer.hh" #include "lm/model.hh" #include "lm/state.hh" #define BOOST_TEST_MODULE ModelBufferTest #include <boost/test/unit_test.hpp> namespace lm { namespace { BOOST_AUTO_TEST_CASE(Query) { std::string dir("test_data/"); if (boost::unit_test::framework::master_test_suite().argc == 2) { dir = boost::unit_test::framework::master_test_suite().argv[1]; } ngram::Model ref((dir + "/toy0.arpa").c_str()); ModelBuffer test(dir + "/toy0"); ngram::State ref_state, test_state; WordIndex a = ref.GetVocabulary().Index("a"); BOOST_CHECK_CLOSE( ref.FullScore(ref.BeginSentenceState(), a, ref_state).prob, test.SlowQuery(ref.BeginSentenceState(), a, test_state), 0.001); BOOST_CHECK_EQUAL((unsigned)ref_state.length, (unsigned)test_state.length); BOOST_CHECK_EQUAL(ref_state.words[0], test_state.words[0]); BOOST_CHECK_EQUAL(ref_state.backoff[0], test_state.backoff[0]); BOOST_CHECK(ref_state == test_state); ngram::State ref_state2, test_state2; WordIndex b = ref.GetVocabulary().Index("b"); BOOST_CHECK_CLOSE( ref.FullScore(ref_state, b, ref_state2).prob, test.SlowQuery(test_state, b, test_state2), 0.001); BOOST_CHECK(ref_state2 == test_state2); BOOST_CHECK_EQUAL(ref_state2.backoff[0], test_state2.backoff[0]); BOOST_CHECK_CLOSE( ref.FullScore(ref_state2, 0, ref_state).prob, test.SlowQuery(test_state2, 0, test_state), 0.001); // The reference does state minimization but this doesn't. } }} // namespaces
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/extensions/far/far-class.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Scripting API support for FarReader and FarWriter. #ifndef FST_EXTENSIONS_FAR_FAR_CLASS_H_ #define FST_EXTENSIONS_FAR_FAR_CLASS_H_ #include <memory> #include <string> #include <vector> #include <fst/extensions/far/far.h> #include <fst/script/arg-packs.h> #include <fst/script/fstscript.h> namespace fst { namespace script { // FarReader API. // Virtual interface implemented by each concrete FarReaderImpl<A>. // See the FarReader interface in far.h for the exact semantics. class FarReaderImplBase { public: virtual const string &ArcType() const = 0; virtual bool Done() const = 0; virtual bool Error() const = 0; virtual const string &GetKey() const = 0; virtual const FstClass *GetFstClass() const = 0; virtual bool Find(const string &key) = 0; virtual void Next() = 0; virtual void Reset() = 0; virtual FarType Type() const = 0; virtual ~FarReaderImplBase() {} }; // Templated implementation. template <class Arc> class FarReaderClassImpl : public FarReaderImplBase { public: explicit FarReaderClassImpl(const string &filename) : impl_(FarReader<Arc>::Open(filename)) {} explicit FarReaderClassImpl(const std::vector<string> &filenames) : impl_(FarReader<Arc>::Open(filenames)) {} const string &ArcType() const final { return Arc::Type(); } bool Done() const final { return impl_->Done(); } bool Error() const final { return impl_->Error(); } bool Find(const string &key) final { return impl_->Find(key); } const FstClass *GetFstClass() const final { fstc_.reset(new FstClass(*impl_->GetFst())); return fstc_.get(); } const string &GetKey() const final { return impl_->GetKey(); } void Next() final { return impl_->Next(); } void Reset() final { impl_->Reset(); } FarType Type() const final { return impl_->Type(); } const FarReader<Arc> *GetImpl() const { return impl_.get(); } FarReader<Arc> *GetImpl() { return impl_.get(); } private: std::unique_ptr<FarReader<Arc>> impl_; mutable std::unique_ptr<FstClass> fstc_; }; class FarReaderClass; using OpenFarReaderClassArgs1 = WithReturnValue<FarReaderClass *, const string &>; using OpenFarReaderClassArgs2 = WithReturnValue<FarReaderClass *, const std::vector<string> &>; // Untemplated user-facing class holding a templated pimpl. class FarReaderClass { public: const string &ArcType() const { return impl_->ArcType(); } bool Done() const { return impl_->Done(); } // Returns True if the impl is null (i.e., due to read failure). // Attempting to call any other function will result in null dereference. bool Error() const { return (impl_) ? impl_->Error() : true; } bool Find(const string &key) { return impl_->Find(key); } const FstClass *GetFstClass() const { return impl_->GetFstClass(); } const string &GetKey() const { return impl_->GetKey(); } void Next() { impl_->Next(); } void Reset() { impl_->Reset(); } FarType Type() const { return impl_->Type(); } template <class Arc> const FarReader<Arc> *GetFarReader() const { if (Arc::Type() != ArcType()) return nullptr; const FarReaderClassImpl<Arc> *typed_impl = static_cast<FarReaderClassImpl<Arc> *>(impl_.get()); return typed_impl->GetImpl(); } template <class Arc> FarReader<Arc> *GetFarReader() { if (Arc::Type() != ArcType()) return nullptr; FarReaderClassImpl<Arc> *typed_impl = static_cast<FarReaderClassImpl<Arc> *>(impl_.get()); return typed_impl->GetImpl(); } template <class Arc> friend void OpenFarReaderClass(OpenFarReaderClassArgs1 *args); template <class Arc> friend void OpenFarReaderClass(OpenFarReaderClassArgs2 *args); // Defined in the CC. static FarReaderClass *Open(const string &filename); static FarReaderClass *Open(const std::vector<string> &filenames); private: template <class Arc> explicit FarReaderClass(FarReaderClassImpl<Arc> *impl) : impl_(impl) {} std::unique_ptr<FarReaderImplBase> impl_; }; // These exist solely for registration purposes; users should call the // static method FarReaderClass::Open instead. template <class Arc> void OpenFarReaderClass(OpenFarReaderClassArgs1 *args) { args->retval = new FarReaderClass(new FarReaderClassImpl<Arc>(args->args)); } template <class Arc> void OpenFarReaderClass(OpenFarReaderClassArgs2 *args) { args->retval = new FarReaderClass(new FarReaderClassImpl<Arc>(args->args)); } // FarWriter API. // Virtual interface implemented by each concrete FarWriterImpl<A>. class FarWriterImplBase { public: // Unlike the lower-level library, this returns a boolean to signal failure // due to non-conformant arc types. virtual bool Add(const string &key, const FstClass &fst) = 0; virtual const string &ArcType() const = 0; virtual bool Error() const = 0; virtual FarType Type() const = 0; virtual ~FarWriterImplBase() {} }; // Templated implementation. template <class Arc> class FarWriterClassImpl : public FarWriterImplBase { public: explicit FarWriterClassImpl(const string &filename, FarType type = FAR_DEFAULT) : impl_(FarWriter<Arc>::Create(filename, type)) {} bool Add(const string &key, const FstClass &fst) final { if (ArcType() != fst.ArcType()) { FSTERROR() << "Cannot write FST with " << fst.ArcType() << " arcs to " << "FAR with " << ArcType() << " arcs"; return false; } impl_->Add(key, *(fst.GetFst<Arc>())); return true; } const string &ArcType() const final { return Arc::Type(); } bool Error() const final { return impl_->Error(); } FarType Type() const final { return impl_->Type(); } const FarWriter<Arc> *GetImpl() const { return impl_.get(); } FarWriter<Arc> *GetImpl() { return impl_.get(); } private: std::unique_ptr<FarWriter<Arc>> impl_; }; class FarWriterClass; using CreateFarWriterClassInnerArgs = std::pair<const string &, FarType>; using CreateFarWriterClassArgs = WithReturnValue<FarWriterClass *, CreateFarWriterClassInnerArgs>; // Untemplated user-facing class holding a templated pimpl. class FarWriterClass { public: static FarWriterClass *Create(const string &filename, const string &arc_type, FarType type = FAR_DEFAULT); bool Add(const string &key, const FstClass &fst) { return impl_->Add(key, fst); } // Returns True if the impl is null (i.e., due to construction failure). // Attempting to call any other function will result in null dereference. bool Error() const { return (impl_) ? impl_->Error() : true; } const string &ArcType() const { return impl_->ArcType(); } FarType Type() const { return impl_->Type(); } template <class Arc> const FarWriter<Arc> *GetFarWriter() const { if (Arc::Type() != ArcType()) return nullptr; const FarWriterClassImpl<Arc> *typed_impl = static_cast<FarWriterClassImpl<Arc> *>(impl_.get()); return typed_impl->GetImpl(); } template <class Arc> FarWriter<Arc> *GetFarWriter() { if (Arc::Type() != ArcType()) return nullptr; FarWriterClassImpl<Arc> *typed_impl = static_cast<FarWriterClassImpl<Arc> *>(impl_.get()); return typed_impl->GetImpl(); } template <class Arc> friend void CreateFarWriterClass(CreateFarWriterClassArgs *args); private: template <class Arc> explicit FarWriterClass(FarWriterClassImpl<Arc> *impl) : impl_(impl) {} std::unique_ptr<FarWriterImplBase> impl_; }; // This exists solely for registration purposes; users should call the // static method FarWriterClass::Create instead. template <class Arc> void CreateFarWriterClass(CreateFarWriterClassArgs *args) { args->retval = new FarWriterClass(new FarWriterClassImpl<Arc>( std::get<0>(args->args), std::get<1>(args->args))); } } // namespace script } // namespace fst #endif // FST_EXTENSIONS_FAR_FAR_CLASS_H_
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/extensions/pdt/paren.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Common classes for PDT parentheses. #ifndef FST_EXTENSIONS_PDT_PAREN_H_ #define FST_EXTENSIONS_PDT_PAREN_H_ #include <algorithm> #include <set> #include <unordered_map> #include <unordered_set> #include <fst/log.h> #include <fst/extensions/pdt/collection.h> #include <fst/extensions/pdt/pdt.h> #include <fst/dfs-visit.h> #include <fst/fst.h> namespace fst { namespace internal { // ParenState: Pair of an open (close) parenthesis and its destination (source) // state. template <class Arc> struct ParenState { using Label = typename Arc::Label; using StateId = typename Arc::StateId; Label paren_id; // ID of open (close) paren. StateId state_id; // Destination (source) state of open (close) paren. explicit ParenState(Label paren_id = kNoLabel, StateId state_id = kNoStateId) : paren_id(paren_id), state_id(state_id) {} bool operator==(const ParenState<Arc> &other) const { if (&other == this) return true; return other.paren_id == paren_id && other.state_id == state_id; } bool operator!=(const ParenState<Arc> &other) const { return !(other == *this); } struct Hash { size_t operator()(const ParenState<Arc> &pstate) const { static constexpr auto prime = 7853; return pstate.paren_id + pstate.state_id * prime; } }; }; // Creates an FST-style const iterator from an STL-style map. template <class Map> class MapIterator { public: using StlIterator = typename Map::const_iterator; using ValueType = typename Map::mapped_type; MapIterator(const Map &map, StlIterator it) : begin_(it), end_(map.end()), it_(it) {} bool Done() const { return it_ == end_ || it_->first != begin_->first; } ValueType Value() const { return it_->second; } void Next() { ++it_; } void Reset() { it_ = begin_; } private: const StlIterator begin_; const StlIterator end_; StlIterator it_; }; // PdtParenReachable: Provides various parenthesis reachability information. template <class Arc> class PdtParenReachable { public: using Label = typename Arc::Label; using StateId = typename Arc::StateId; using State = ParenState<Arc>; using StateHash = typename State::Hash; // Maps from state ID to reachable paren IDs from (to) that state. using ParenMultimap = std::unordered_multimap<StateId, Label>; // Maps from paren ID and state ID to reachable state set ID. using StateSetMap = std::unordered_map<State, std::ptrdiff_t, StateHash>; // Maps from paren ID and state ID to arcs exiting that state with that // Label. using ParenArcMultimap = std::unordered_map<State, Arc, StateHash>; using ParenIterator = MapIterator<ParenMultimap>; using ParenArcIterator = MapIterator<ParenArcMultimap>; using SetIterator = typename Collection<std::ptrdiff_t, StateId>::SetIterator; // Computes close (open) parenthesis reachability information for a PDT with // bounded stack. PdtParenReachable(const Fst<Arc> &fst, const std::vector<std::pair<Label, Label>> &parens, bool close) : fst_(fst), parens_(parens), close_(close), error_(false) { paren_map_.reserve(2 * parens.size()); for (size_t i = 0; i < parens.size(); ++i) { const auto &pair = parens[i]; paren_map_[pair.first] = i; paren_map_[pair.second] = i; } if (close_) { const auto start = fst.Start(); if (start == kNoStateId) return; if (!DFSearch(start)) { FSTERROR() << "PdtReachable: Underlying cyclicity not supported"; error_ = true; } } else { FSTERROR() << "PdtParenReachable: Open paren info not implemented"; error_ = true; } } bool Error() const { return error_; } // Given a state ID, returns an iterator over paren IDs for close (open) // parens reachable from that state along balanced paths. ParenIterator FindParens(StateId s) const { return ParenIterator(paren_multimap_, paren_multimap_.find(s)); } // Given a paren ID and a state ID s, returns an iterator over states that can // be reached along balanced paths from (to) s that have have close (open) // parentheses matching the paren ID exiting (entering) those states. SetIterator FindStates(Label paren_id, StateId s) const { const State paren_state(paren_id, s); const auto it = set_map_.find(paren_state); if (it == set_map_.end()) { return state_sets_.FindSet(-1); } else { return state_sets_.FindSet(it->second); } } // Given a paren ID and a state ID s, return an iterator over arcs that exit // (enter) s and are labeled with a close (open) parenthesis matching the // paren ID. ParenArcIterator FindParenArcs(Label paren_id, StateId s) const { const State paren_state(paren_id, s); return ParenArcIterator(paren_arc_multimap_, paren_arc_multimap_.find(paren_state)); } private: // Returns false when cycle detected during DFS gathering paren and state set // information. bool DFSearch(StateId s); // Unions state sets together gathered by the DFS. void ComputeStateSet(StateId s); // Gathers state set(s) from state. void UpdateStateSet(StateId nextstate, std::set<Label> *paren_set, std::vector<std::set<StateId>> *state_sets) const; const Fst<Arc> &fst_; // Paren IDs to labels. const std::vector<std::pair<Label, Label>> &parens_; // Close/open paren info? const bool close_; // Labels to paren IDs. std::unordered_map<Label, Label> paren_map_; // Paren reachability. ParenMultimap paren_multimap_; // Paren arcs. ParenArcMultimap paren_arc_multimap_; // DFS states. std::vector<uint8_t> state_color_; // Reachable states to IDs. mutable Collection<std::ptrdiff_t, StateId> state_sets_; // IDs to reachable states. StateSetMap set_map_; bool error_; PdtParenReachable(const PdtParenReachable &) = delete; PdtParenReachable &operator=(const PdtParenReachable &) = delete; }; // Gathers paren and state set information. template <class Arc> bool PdtParenReachable<Arc>::DFSearch(StateId s) { static constexpr uint8_t kWhiteState = 0x01; // Undiscovered. static constexpr uint8_t kGreyState = 0x02; // Discovered & unfinished. static constexpr uint8_t kBlackState = 0x04; // Finished. if (s >= state_color_.size()) state_color_.resize(s + 1, kWhiteState); if (state_color_[s] == kBlackState) return true; if (state_color_[s] == kGreyState) return false; state_color_[s] = kGreyState; for (ArcIterator<Fst<Arc>> aiter(fst_, s); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); const auto it = paren_map_.find(arc.ilabel); if (it != paren_map_.end()) { // Paren? const auto paren_id = it->second; if (arc.ilabel == parens_[paren_id].first) { // Open paren? if (!DFSearch(arc.nextstate)) return false; for (auto set_iter = FindStates(paren_id, arc.nextstate); !set_iter.Done(); set_iter.Next()) { for (auto paren_arc_iter = FindParenArcs(paren_id, set_iter.Element()); !paren_arc_iter.Done(); paren_arc_iter.Next()) { const auto &cparc = paren_arc_iter.Value(); if (!DFSearch(cparc.nextstate)) return false; } } } } else if (!DFSearch(arc.nextstate)) { // Non-paren. return false; } } ComputeStateSet(s); state_color_[s] = kBlackState; return true; } // Unions state sets. template <class Arc> void PdtParenReachable<Arc>::ComputeStateSet(StateId s) { std::set<Label> paren_set; std::vector<std::set<StateId>> state_sets(parens_.size()); for (ArcIterator<Fst<Arc>> aiter(fst_, s); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); const auto it = paren_map_.find(arc.ilabel); if (it != paren_map_.end()) { // Paren? const auto paren_id = it->second; if (arc.ilabel == parens_[paren_id].first) { // Open paren? for (auto set_iter = FindStates(paren_id, arc.nextstate); !set_iter.Done(); set_iter.Next()) { for (auto paren_arc_iter = FindParenArcs(paren_id, set_iter.Element()); !paren_arc_iter.Done(); paren_arc_iter.Next()) { const auto &cparc = paren_arc_iter.Value(); UpdateStateSet(cparc.nextstate, &paren_set, &state_sets); } } } else { // Close paren. paren_set.insert(paren_id); state_sets[paren_id].insert(s); const State paren_state(paren_id, s); paren_arc_multimap_.insert(std::make_pair(paren_state, arc)); } } else { // Non-paren. UpdateStateSet(arc.nextstate, &paren_set, &state_sets); } } std::vector<StateId> state_set; for (auto paren_iter = paren_set.begin(); paren_iter != paren_set.end(); ++paren_iter) { state_set.clear(); const auto paren_id = *paren_iter; paren_multimap_.insert(std::make_pair(s, paren_id)); for (auto state_iter = state_sets[paren_id].begin(); state_iter != state_sets[paren_id].end(); ++state_iter) { state_set.push_back(*state_iter); } const State paren_state(paren_id, s); set_map_[paren_state] = state_sets_.FindId(state_set); } } // Gathers state sets. template <class Arc> void PdtParenReachable<Arc>::UpdateStateSet( StateId nextstate, std::set<Label> *paren_set, std::vector<std::set<StateId>> *state_sets) const { for (auto paren_iter = FindParens(nextstate); !paren_iter.Done(); paren_iter.Next()) { const auto paren_id = paren_iter.Value(); paren_set->insert(paren_id); for (auto set_iter = FindStates(paren_id, nextstate); !set_iter.Done(); set_iter.Next()) { (*state_sets)[paren_id].insert(set_iter.Element()); } } } // Stores balancing parenthesis data for a PDT. Unlike PdtParenReachable above // this allows on-the-fly construction (e.g., in PdtShortestPath). template <class Arc> class PdtBalanceData { public: using Label = typename Arc::Label; using StateId = typename Arc::StateId; using State = ParenState<Arc>; using StateHash = typename State::Hash; // Set for open parens. using OpenParenSet = std::unordered_set<State, StateHash>; // Maps from open paren destination state to parenthesis ID. using OpenParenMap = std::unordered_multimap<StateId, Label>; // Maps from open paren state to source states of matching close parens using CloseParenMap = std::unordered_multimap<State, StateId, StateHash>; // Maps from open paren state to close source set ID. using CloseSourceMap = std::unordered_map<State, std::ptrdiff_t, StateHash>; using SetIterator = typename Collection<std::ptrdiff_t, StateId>::SetIterator; PdtBalanceData() {} void Clear() { open_paren_map_.clear(); close_paren_map_.clear(); } // Adds an open parenthesis with destination state open_dest. void OpenInsert(Label paren_id, StateId open_dest) { const State key(paren_id, open_dest); if (!open_paren_set_.count(key)) { open_paren_set_.insert(key); open_paren_map_.emplace(open_dest, paren_id); } } // Adds a matching closing parenthesis with source state close_source // balancing an open_parenthesis with destination state open_dest if // OpenInsert() previously called. void CloseInsert(Label paren_id, StateId open_dest, StateId close_source) { const State key(paren_id, open_dest); if (open_paren_set_.count(key)) { close_paren_map_.emplace(key, close_source); } } // Finds close paren source states matching an open parenthesis. The following // methods are then used to iterate through those matching states. Should be // called only after FinishInsert(open_dest). SetIterator Find(Label paren_id, StateId open_dest) { const State key(paren_id, open_dest); const auto it = close_source_map_.find(key); if (it == close_source_map_.end()) { return close_source_sets_.FindSet(-1); } else { return close_source_sets_.FindSet(it->second); } } // Called when all open and close parenthesis insertions (w.r.t. open // parentheses entering state open_dest) are finished. Must be called before // Find(open_dest). void FinishInsert(StateId open_dest) { std::vector<StateId> close_sources; for (auto oit = open_paren_map_.find(open_dest); oit != open_paren_map_.end() && oit->first == open_dest;) { const auto paren_id = oit->second; close_sources.clear(); const State key(paren_id, open_dest); open_paren_set_.erase(open_paren_set_.find(key)); for (auto cit = close_paren_map_.find(key); cit != close_paren_map_.end() && cit->first == key;) { close_sources.push_back(cit->second); close_paren_map_.erase(cit++); } std::sort(close_sources.begin(), close_sources.end()); auto unique_end = std::unique(close_sources.begin(), close_sources.end()); close_sources.resize(unique_end - close_sources.begin()); if (!close_sources.empty()) { close_source_map_[key] = close_source_sets_.FindId(close_sources); } open_paren_map_.erase(oit++); } } // Returns a new balance data object representing the reversed balance // information. PdtBalanceData<Arc> *Reverse(StateId num_states, StateId num_split, StateId state_id_shift) const; private: // Open paren at destintation state? OpenParenSet open_paren_set_; // Open parens per state. OpenParenMap open_paren_map_; // Current open destination state. State open_dest_; // Current open paren/state. typename OpenParenMap::const_iterator open_iter_; // Close states to (open paren, state). CloseParenMap close_paren_map_; // (Paren, state) to set ID. CloseSourceMap close_source_map_; mutable Collection<std::ptrdiff_t, StateId> close_source_sets_; }; // Return a new balance data object representing the reversed balance // information. template <class Arc> PdtBalanceData<Arc> *PdtBalanceData<Arc>::Reverse( StateId num_states, StateId num_split, StateId state_id_shift) const { auto *bd = new PdtBalanceData<Arc>; std::unordered_set<StateId> close_sources; const auto split_size = num_states / num_split; for (StateId i = 0; i < num_states; i += split_size) { close_sources.clear(); for (auto it = close_source_map_.begin(); it != close_source_map_.end(); ++it) { const auto &okey = it->first; const auto open_dest = okey.state_id; const auto paren_id = okey.paren_id; for (auto set_iter = close_source_sets_.FindSet(it->second); !set_iter.Done(); set_iter.Next()) { const auto close_source = set_iter.Element(); if ((close_source < i) || (close_source >= i + split_size)) continue; close_sources.insert(close_source + state_id_shift); bd->OpenInsert(paren_id, close_source + state_id_shift); bd->CloseInsert(paren_id, close_source + state_id_shift, open_dest + state_id_shift); } } for (auto it = close_sources.begin(); it != close_sources.end(); ++it) { bd->FinishInsert(*it); } } return bd; } } // namespace internal } // namespace fst #endif // FST_EXTENSIONS_PDT_PAREN_H_
0
coqui_public_repos
coqui_public_repos/open-bible-scripts/clean.sh
#!/bin/bash INFILE=$1 LANGUAGE_ISO=$2 # INFILE == a text file from Biblica with "readaloud" in the name # the punctuation removal was a manual process, not sure if there's a better way # I got the list of punctuation by looking at all chars in the texts with: # cat *.txt | grep -o . |sort|uniq -c|sort -n # covo takes care of some punctuation, but not all, and we need all verses to be # processed through covo. If covo can't process some text, then it returns None, # which means issues for our alignments down stream # also removing numbers is not ideal at all. they should be replaced with words FILENAME=$(echo $INFILE|rev |cut -d'/' -f1|rev) cat $INFILE | \ sed -E 's/[0-9]+\./ /g' | \ awk '$1=$1' > /tmp/$FILENAME.org cat $INFILE | dos2unix | \ sed 's/[\-\:\-\—\!\;\‘\’\(\)\?\-\”\“\,\.]/ /g' | \ tr '[0-9]' ' ' | \ tr -s ' ' | \ awk '$1=$1' | \ covo validate $LANGUAGE_ISO > /tmp/$FILENAME.clean if [[ "$(wc -l < /tmp/$FILENAME.org)" -eq "$(wc -l < /tmp/$FILENAME.clean)" ]]; then paste <(cat /tmp/$FILENAME.org) <(cat /tmp/$FILENAME.clean) | sed "s/^/$FILENAME /g" else >&2 echo "ERROR: $INFILE" fi
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/extensions/pdt/info.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Prints information about a PDT. #ifndef FST_EXTENSIONS_PDT_INFO_H_ #define FST_EXTENSIONS_PDT_INFO_H_ #include <unordered_map> #include <unordered_set> #include <vector> #include <fst/extensions/pdt/pdt.h> #include <fst/fst.h> namespace fst { // Compute various information about PDTs. template <class Arc> class PdtInfo { public: using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; PdtInfo(const Fst<Arc> &fst, const std::vector<std::pair<Label, Label>> &parents); const string &FstType() const { return fst_type_; } const string &ArcType() const { return Arc::Type(); } int64_t NumStates() const { return nstates_; } int64_t NumArcs() const { return narcs_; } int64_t NumOpenParens() const { return nopen_parens_; } int64_t NumCloseParens() const { return nclose_parens_; } int64_t NumUniqueOpenParens() const { return nuniq_open_parens_; } int64_t NumUniqueCloseParens() const { return nuniq_close_parens_; } int64_t NumOpenParenStates() const { return nopen_paren_states_; } int64_t NumCloseParenStates() const { return nclose_paren_states_; } private: string fst_type_; int64_t nstates_; int64_t narcs_; int64_t nopen_parens_; int64_t nclose_parens_; int64_t nuniq_open_parens_; int64_t nuniq_close_parens_; int64_t nopen_paren_states_; int64_t nclose_paren_states_; }; template <class Arc> PdtInfo<Arc>::PdtInfo( const Fst<Arc> &fst, const std::vector<std::pair<typename Arc::Label, typename Arc::Label>> &parens) : fst_type_(fst.Type()), nstates_(0), narcs_(0), nopen_parens_(0), nclose_parens_(0), nuniq_open_parens_(0), nuniq_close_parens_(0), nopen_paren_states_(0), nclose_paren_states_(0) { std::unordered_map<Label, size_t> paren_map; std::unordered_set<Label> paren_set; std::unordered_set<StateId> open_paren_state_set; std::unordered_set<StateId> close_paren_state_set; for (size_t i = 0; i < parens.size(); ++i) { const auto &pair = parens[i]; paren_map[pair.first] = i; paren_map[pair.second] = i; } for (StateIterator<Fst<Arc>> siter(fst); !siter.Done(); siter.Next()) { ++nstates_; const auto s = siter.Value(); for (ArcIterator<Fst<Arc>> aiter(fst, s); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); ++narcs_; const auto it = paren_map.find(arc.ilabel); if (it != paren_map.end()) { const auto open_paren = parens[it->second].first; const auto close_paren = parens[it->second].second; if (arc.ilabel == open_paren) { ++nopen_parens_; if (!paren_set.count(open_paren)) { ++nuniq_open_parens_; paren_set.insert(open_paren); } if (!open_paren_state_set.count(arc.nextstate)) { ++nopen_paren_states_; open_paren_state_set.insert(arc.nextstate); } } else { ++nclose_parens_; if (!paren_set.count(close_paren)) { ++nuniq_close_parens_; paren_set.insert(close_paren); } if (!close_paren_state_set.count(s)) { ++nclose_paren_states_; close_paren_state_set.insert(s); } } } } } } template <class Arc> void PrintPdtInfo(const PdtInfo<Arc> &info) { const auto old = std::cout.setf(std::ios::left); std::cout.width(50); std::cout << "fst type" << info.FstType() << std::endl; std::cout.width(50); std::cout << "arc type" << info.ArcType() << std::endl; std::cout.width(50); std::cout << "# of states" << info.NumStates() << std::endl; std::cout.width(50); std::cout << "# of arcs" << info.NumArcs() << std::endl; std::cout.width(50); std::cout << "# of open parentheses" << info.NumOpenParens() << std::endl; std::cout.width(50); std::cout << "# of close parentheses" << info.NumCloseParens() << std::endl; std::cout.width(50); std::cout << "# of unique open parentheses" << info.NumUniqueOpenParens() << std::endl; std::cout.width(50); std::cout << "# of unique close parentheses" << info.NumUniqueCloseParens() << std::endl; std::cout.width(50); std::cout << "# of open parenthesis dest. states" << info.NumOpenParenStates() << std::endl; std::cout.width(50); std::cout << "# of close parenthesis source states" << info.NumCloseParenStates() << std::endl; std::cout.setf(old); } } // namespace fst #endif // FST_EXTENSIONS_PDT_INFO_H_
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/tc-transfer-tests.sh
#!/bin/bash set -xe source $(dirname "$0")/tc-tests-utils.sh extract_python_versions "$1" "pyver" "pyver_pkg" "py_unicode_type" "pyconf" "pyalias" mkdir -p ${TASKCLUSTER_ARTIFACTS} || true mkdir -p /tmp/train || true mkdir -p /tmp/train_tflite || true virtualenv_activate "${pyalias}" "deepspeech" set -o pipefail pip install --upgrade pip==19.3.1 setuptools==45.0.0 wheel==0.33.6 | cat pushd ${HOME}/DeepSpeech/ds DS_NODECODER=1 pip install --upgrade . | cat popd set +o pipefail pushd ${HOME}/DeepSpeech/ds/ time ./bin/run-tc-transfer.sh popd virtualenv_deactivate "${pyalias}" "deepspeech"
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/extensions/ngram/ngram-fst.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // NgramFst implements a n-gram language model based upon the LOUDS data // structure. Please refer to "Unary Data Structures for Language Models" // http://research.google.com/pubs/archive/37218.pdf #ifndef FST_EXTENSIONS_NGRAM_NGRAM_FST_H_ #define FST_EXTENSIONS_NGRAM_NGRAM_FST_H_ #include <stddef.h> #include <string.h> #include <algorithm> #include <iostream> #include <string> #include <utility> #include <vector> #include <fst/compat.h> #include <fst/log.h> #include <fstream> #include <fst/extensions/ngram/bitmap-index.h> #include <fst/fstlib.h> #include <fst/mapped-file.h> namespace fst { template <class A> class NGramFst; template <class A> class NGramFstMatcher; // Instance data containing mutable state for bookkeeping repeated access to // the same state. template <class A> struct NGramFstInst { typedef typename A::Label Label; typedef typename A::StateId StateId; typedef typename A::Weight Weight; StateId state_; size_t num_futures_; size_t offset_; size_t node_; StateId node_state_; std::vector<Label> context_; StateId context_state_; NGramFstInst() : state_(kNoStateId), node_state_(kNoStateId), context_state_(kNoStateId) {} }; namespace internal { // Implementation class for LOUDS based NgramFst interface. template <class A> class NGramFstImpl : public FstImpl<A> { using FstImpl<A>::SetInputSymbols; using FstImpl<A>::SetOutputSymbols; using FstImpl<A>::SetType; using FstImpl<A>::WriteHeader; friend class ArcIterator<NGramFst<A>>; friend class NGramFstMatcher<A>; public: using FstImpl<A>::InputSymbols; using FstImpl<A>::SetProperties; using FstImpl<A>::Properties; typedef A Arc; typedef typename A::Label Label; typedef typename A::StateId StateId; typedef typename A::Weight Weight; NGramFstImpl() { SetType("ngram"); SetInputSymbols(nullptr); SetOutputSymbols(nullptr); SetProperties(kStaticProperties); } NGramFstImpl(const Fst<A> &fst, std::vector<StateId> *order_out); explicit NGramFstImpl(const Fst<A> &fst) : NGramFstImpl(fst, nullptr) {} NGramFstImpl(const NGramFstImpl &other) { FSTERROR() << "Copying NGramFst Impls is not supported, use safe = false."; SetProperties(kError, kError); } ~NGramFstImpl() override { if (owned_) { delete[] data_; } } static NGramFstImpl<A> *Read(std::istream &strm, // NOLINT const FstReadOptions &opts) { NGramFstImpl<A> *impl = new NGramFstImpl(); FstHeader hdr; if (!impl->ReadHeader(strm, opts, kMinFileVersion, &hdr)) return 0; uint64 num_states, num_futures, num_final; const size_t offset = sizeof(num_states) + sizeof(num_futures) + sizeof(num_final); // Peek at num_states and num_futures to see how much more needs to be read. strm.read(reinterpret_cast<char *>(&num_states), sizeof(num_states)); strm.read(reinterpret_cast<char *>(&num_futures), sizeof(num_futures)); strm.read(reinterpret_cast<char *>(&num_final), sizeof(num_final)); size_t size = Storage(num_states, num_futures, num_final); MappedFile *data_region = MappedFile::Allocate(size); char *data = reinterpret_cast<char *>(data_region->mutable_data()); // Copy num_states, num_futures and num_final back into data. memcpy(data, reinterpret_cast<char *>(&num_states), sizeof(num_states)); memcpy(data + sizeof(num_states), reinterpret_cast<char *>(&num_futures), sizeof(num_futures)); memcpy(data + sizeof(num_states) + sizeof(num_futures), reinterpret_cast<char *>(&num_final), sizeof(num_final)); strm.read(data + offset, size - offset); if (strm.fail()) { delete impl; return nullptr; } impl->Init(data, false, data_region); return impl; } bool Write(std::ostream &strm, // NOLINT const FstWriteOptions &opts) const { FstHeader hdr; hdr.SetStart(Start()); hdr.SetNumStates(num_states_); WriteHeader(strm, opts, kFileVersion, &hdr); strm.write(data_, StorageSize()); return !strm.fail(); } StateId Start() const { return start_; } Weight Final(StateId state) const { if (final_index_.Get(state)) { return final_probs_[final_index_.Rank1(state)]; } else { return Weight::Zero(); } } size_t NumArcs(StateId state, NGramFstInst<A> *inst = nullptr) const { if (inst == nullptr) { const std::pair<size_t, size_t> zeros = (state == 0) ? select_root_ : future_index_.Select0s(state); return zeros.second - zeros.first - 1; } SetInstFuture(state, inst); return inst->num_futures_ + ((state == 0) ? 0 : 1); } size_t NumInputEpsilons(StateId state) const { // State 0 has no parent, thus no backoff. if (state == 0) return 0; return 1; } size_t NumOutputEpsilons(StateId state) const { return NumInputEpsilons(state); } StateId NumStates() const { return num_states_; } void InitStateIterator(StateIteratorData<A> *data) const { data->base = 0; data->nstates = num_states_; } static size_t Storage(uint64 num_states, uint64 num_futures, uint64 num_final) { uint64 b64; Weight weight; Label label; size_t offset = sizeof(num_states) + sizeof(num_futures) + sizeof(num_final); offset += sizeof(b64) * (BitmapIndex::StorageSize(num_states * 2 + 1) + BitmapIndex::StorageSize(num_futures + num_states + 1) + BitmapIndex::StorageSize(num_states)); offset += (num_states + 1) * sizeof(label) + num_futures * sizeof(label); // Pad for alignemnt, see // http://en.wikipedia.org/wiki/Data_structure_alignment#Computing_padding offset = (offset + sizeof(weight) - 1) & ~(sizeof(weight) - 1); offset += (num_states + 1) * sizeof(weight) + num_final * sizeof(weight) + (num_futures + 1) * sizeof(weight); return offset; } void SetInstFuture(StateId state, NGramFstInst<A> *inst) const { if (inst->state_ != state) { inst->state_ = state; const std::pair<size_t, size_t> zeros = future_index_.Select0s(state); inst->num_futures_ = zeros.second - zeros.first - 1; inst->offset_ = future_index_.Rank1(zeros.first + 1); } } void SetInstNode(NGramFstInst<A> *inst) const { if (inst->node_state_ != inst->state_) { inst->node_state_ = inst->state_; inst->node_ = context_index_.Select1(inst->state_); } } void SetInstContext(NGramFstInst<A> *inst) const { SetInstNode(inst); if (inst->context_state_ != inst->state_) { inst->context_state_ = inst->state_; inst->context_.clear(); size_t node = inst->node_; while (node != 0) { inst->context_.push_back(context_words_[context_index_.Rank1(node)]); node = context_index_.Select1(context_index_.Rank0(node) - 1); } } } // Access to the underlying representation const char *GetData(size_t *data_size) const { *data_size = StorageSize(); return data_; } void Init(const char *data, bool owned, MappedFile *file = nullptr); const std::vector<Label> &GetContext(StateId s, NGramFstInst<A> *inst) const { SetInstFuture(s, inst); SetInstContext(inst); return inst->context_; } size_t StorageSize() const { return Storage(num_states_, num_futures_, num_final_); } void GetStates(const std::vector<Label> &context, std::vector<StateId> *states) const; private: StateId Transition(const std::vector<Label> &context, Label future) const; // Properties always true for this Fst class. static const uint64 kStaticProperties = kAcceptor | kIDeterministic | kODeterministic | kEpsilons | kIEpsilons | kOEpsilons | kILabelSorted | kOLabelSorted | kWeighted | kCyclic | kInitialAcyclic | kNotTopSorted | kAccessible | kCoAccessible | kNotString | kExpanded; // Current file format version. static const int kFileVersion = 4; // Minimum file format version supported. static const int kMinFileVersion = 4; std::unique_ptr<MappedFile> data_region_; const char *data_ = nullptr; bool owned_ = false; // True if we own data_ StateId start_ = fst::kNoStateId; uint64 num_states_ = 0; uint64 num_futures_ = 0; uint64 num_final_ = 0; std::pair<size_t, size_t> select_root_; const Label *root_children_ = nullptr; // borrowed references const uint64 *context_ = nullptr; const uint64 *future_ = nullptr; const uint64 *final_ = nullptr; const Label *context_words_ = nullptr; const Label *future_words_ = nullptr; const Weight *backoff_ = nullptr; const Weight *final_probs_ = nullptr; const Weight *future_probs_ = nullptr; BitmapIndex context_index_; BitmapIndex future_index_; BitmapIndex final_index_; }; template <typename A> inline void NGramFstImpl<A>::GetStates( const std::vector<Label> &context, std::vector<typename A::StateId> *states) const { states->clear(); states->push_back(0); typename std::vector<Label>::const_reverse_iterator cit = context.rbegin(); const Label *children = root_children_; size_t num_children = select_root_.second - 2; const Label *loc = std::lower_bound(children, children + num_children, *cit); if (loc == children + num_children || *loc != *cit) return; size_t node = 2 + loc - children; states->push_back(context_index_.Rank1(node)); if (context.size() == 1) return; size_t node_rank = context_index_.Rank1(node); std::pair<size_t, size_t> zeros = node_rank == 0 ? select_root_ : context_index_.Select0s(node_rank); size_t first_child = zeros.first + 1; ++cit; if (context_index_.Get(first_child) != false) { size_t last_child = zeros.second - 1; while (cit != context.rend()) { children = context_words_ + context_index_.Rank1(first_child); loc = std::lower_bound(children, children + last_child - first_child + 1, *cit); if (loc == children + last_child - first_child + 1 || *loc != *cit) { break; } ++cit; node = first_child + loc - children; states->push_back(context_index_.Rank1(node)); node_rank = context_index_.Rank1(node); zeros = node_rank == 0 ? select_root_ : context_index_.Select0s(node_rank); first_child = zeros.first + 1; if (context_index_.Get(first_child) == false) break; last_child = zeros.second - 1; } } } } // namespace internal /*****************************************************************************/ template <class A> class NGramFst : public ImplToExpandedFst<internal::NGramFstImpl<A>> { friend class ArcIterator<NGramFst<A>>; friend class NGramFstMatcher<A>; public: typedef A Arc; typedef typename A::StateId StateId; typedef typename A::Label Label; typedef typename A::Weight Weight; typedef internal::NGramFstImpl<A> Impl; explicit NGramFst(const Fst<A> &dst) : ImplToExpandedFst<Impl>(std::make_shared<Impl>(dst, nullptr)) {} NGramFst(const Fst<A> &fst, std::vector<StateId> *order_out) : ImplToExpandedFst<Impl>(std::make_shared<Impl>(fst, order_out)) {} // Because the NGramFstImpl is a const stateless data structure, there // is never a need to do anything beside copy the reference. NGramFst(const NGramFst<A> &fst, bool safe = false) : ImplToExpandedFst<Impl>(fst, false) {} NGramFst() : ImplToExpandedFst<Impl>(std::make_shared<Impl>()) {} // Non-standard constructor to initialize NGramFst directly from data. NGramFst(const char *data, bool owned) : ImplToExpandedFst<Impl>(std::make_shared<Impl>()) { GetMutableImpl()->Init(data, owned, nullptr); } // Get method that gets the data associated with Init(). const char *GetData(size_t *data_size) const { return GetImpl()->GetData(data_size); } const std::vector<Label> GetContext(StateId s) const { return GetImpl()->GetContext(s, &inst_); } // Consumes as much as possible of context from right to left, returns the // the states corresponding to the increasingly conditioned input sequence. void GetStates(const std::vector<Label> &context, std::vector<StateId> *state) const { return GetImpl()->GetStates(context, state); } size_t NumArcs(StateId s) const override { return GetImpl()->NumArcs(s, &inst_); } NGramFst<A> *Copy(bool safe = false) const override { return new NGramFst(*this, safe); } static NGramFst<A> *Read(std::istream &strm, const FstReadOptions &opts) { Impl *impl = Impl::Read(strm, opts); return impl ? new NGramFst<A>(std::shared_ptr<Impl>(impl)) : nullptr; } static NGramFst<A> *Read(const string &filename) { if (!filename.empty()) { std::ifstream strm(filename, std::ios_base::in | std::ios_base::binary); if (!strm.good()) { LOG(ERROR) << "NGramFst::Read: Can't open file: " << filename; return nullptr; } return Read(strm, FstReadOptions(filename)); } else { return Read(std::cin, FstReadOptions("standard input")); } } bool Write(std::ostream &strm, const FstWriteOptions &opts) const override { return GetImpl()->Write(strm, opts); } bool Write(const string &filename) const override { return Fst<A>::WriteFile(filename); } inline void InitStateIterator(StateIteratorData<A> *data) const override { GetImpl()->InitStateIterator(data); } inline void InitArcIterator(StateId s, ArcIteratorData<A> *data) const override; MatcherBase<A> *InitMatcher(MatchType match_type) const override { return new NGramFstMatcher<A>(this, match_type); } size_t StorageSize() const { return GetImpl()->StorageSize(); } static bool HasRequiredProps(const Fst<A> &fst) { int64 props = kAcceptor | kIDeterministic | kILabelSorted | kIEpsilons | kAccessible; return fst.Properties(props, true) == props; } static bool HasRequiredStructure(const Fst<A> &fst) { if (!HasRequiredProps(fst)) { return false; } typename A::StateId unigram = fst.Start(); while (true) { // Follows epsilon arc chain to find unigram state. if (unigram == fst::kNoStateId) return false; // No unigram state. typename fst::ArcIterator<Fst<A>> aiter(fst, unigram); if (aiter.Done() || aiter.Value().ilabel != 0) break; unigram = aiter.Value().nextstate; aiter.Next(); } // Other requirement: all states other than unigram an epsilon arc. for (fst::StateIterator<Fst<A>> siter(fst); !siter.Done(); siter.Next()) { const typename A::StateId &state = siter.Value(); fst::ArcIterator<Fst<A>> aiter(fst, state); if (state != unigram) { if (aiter.Done()) return false; if (aiter.Value().ilabel != 0) return false; aiter.Next(); if (!aiter.Done() && aiter.Value().ilabel == 0) return false; } } return true; } private: using ImplToExpandedFst<Impl, ExpandedFst<A>>::GetImpl; using ImplToExpandedFst<Impl, ExpandedFst<A>>::GetMutableImpl; explicit NGramFst(std::shared_ptr<Impl> impl) : ImplToExpandedFst<Impl>(impl) {} mutable NGramFstInst<A> inst_; }; template <class A> inline void NGramFst<A>::InitArcIterator(StateId s, ArcIteratorData<A> *data) const { GetImpl()->SetInstFuture(s, &inst_); GetImpl()->SetInstNode(&inst_); data->base = new ArcIterator<NGramFst<A>>(*this, s); } namespace internal { template <typename A> NGramFstImpl<A>::NGramFstImpl(const Fst<A> &fst, std::vector<StateId> *order_out) { typedef A Arc; typedef typename Arc::Label Label; typedef typename Arc::Weight Weight; typedef typename Arc::StateId StateId; SetType("ngram"); SetInputSymbols(fst.InputSymbols()); SetOutputSymbols(fst.OutputSymbols()); SetProperties(kStaticProperties); // Check basic requirements for an OpenGrm language model Fst. if (!NGramFst<A>::HasRequiredProps(fst)) { FSTERROR() << "NGramFst only accepts OpenGrm language models as input"; SetProperties(kError, kError); return; } int64 num_states = CountStates(fst); Label *context = new Label[num_states]; // Find the unigram state by starting from the start state, following // epsilons. StateId unigram = fst.Start(); while (1) { if (unigram == kNoStateId) { FSTERROR() << "Could not identify unigram state"; SetProperties(kError, kError); return; } ArcIterator<Fst<A>> aiter(fst, unigram); if (aiter.Done()) { LOG(WARNING) << "Unigram state " << unigram << " has no arcs."; break; } if (aiter.Value().ilabel != 0) break; unigram = aiter.Value().nextstate; } // Each state's context is determined by the subtree it is under from the // unigram state. std::queue<std::pair<StateId, Label>> label_queue; std::vector<bool> visited(num_states); // Force an epsilon link to the start state. label_queue.push(std::make_pair(fst.Start(), 0)); for (ArcIterator<Fst<A>> aiter(fst, unigram); !aiter.Done(); aiter.Next()) { label_queue.push( std::make_pair(aiter.Value().nextstate, aiter.Value().ilabel)); } // investigate states in breadth first fashion to assign context words. while (!label_queue.empty()) { std::pair<StateId, Label> &now = label_queue.front(); if (!visited[now.first]) { context[now.first] = now.second; visited[now.first] = true; for (ArcIterator<Fst<A>> aiter(fst, now.first); !aiter.Done(); aiter.Next()) { const Arc &arc = aiter.Value(); if (arc.ilabel != 0) { label_queue.push(std::make_pair(arc.nextstate, now.second)); } } } label_queue.pop(); } visited.clear(); // The arc from the start state should be assigned an epsilon to put it // in front of the all other labels (which makes Start state 1 after // unigram which is state 0). context[fst.Start()] = 0; // Build the tree of contexts fst by reversing the epsilon arcs from fst. VectorFst<Arc> context_fst; uint64 num_final = 0; for (int i = 0; i < num_states; ++i) { if (fst.Final(i) != Weight::Zero()) { ++num_final; } context_fst.SetFinal(context_fst.AddState(), fst.Final(i)); } context_fst.SetStart(unigram); context_fst.SetInputSymbols(fst.InputSymbols()); context_fst.SetOutputSymbols(fst.OutputSymbols()); int64 num_context_arcs = 0; int64 num_futures = 0; for (StateIterator<Fst<A>> siter(fst); !siter.Done(); siter.Next()) { const StateId &state = siter.Value(); num_futures += fst.NumArcs(state) - fst.NumInputEpsilons(state); ArcIterator<Fst<A>> aiter(fst, state); if (!aiter.Done()) { const Arc &arc = aiter.Value(); // this arc goes from state to arc.nextstate, so create an arc from // arc.nextstate to state to reverse it. if (arc.ilabel == 0) { context_fst.AddArc(arc.nextstate, Arc(context[state], context[state], arc.weight, state)); num_context_arcs++; } } } if (num_context_arcs != context_fst.NumStates() - 1) { FSTERROR() << "Number of contexts arcs != number of states - 1"; SetProperties(kError, kError); return; } if (context_fst.NumStates() != num_states) { FSTERROR() << "Number of contexts != number of states"; SetProperties(kError, kError); return; } int64 context_props = context_fst.Properties(kIDeterministic | kILabelSorted, true); if (!(context_props & kIDeterministic)) { FSTERROR() << "Input Fst is not structured properly"; SetProperties(kError, kError); return; } if (!(context_props & kILabelSorted)) { ArcSort(&context_fst, ILabelCompare<Arc>()); } delete[] context; uint64 b64; Weight weight; Label label = kNoLabel; const size_t storage = Storage(num_states, num_futures, num_final); MappedFile *data_region = MappedFile::Allocate(storage); char *data = reinterpret_cast<char *>(data_region->mutable_data()); memset(data, 0, storage); size_t offset = 0; memcpy(data + offset, reinterpret_cast<char *>(&num_states), sizeof(num_states)); offset += sizeof(num_states); memcpy(data + offset, reinterpret_cast<char *>(&num_futures), sizeof(num_futures)); offset += sizeof(num_futures); memcpy(data + offset, reinterpret_cast<char *>(&num_final), sizeof(num_final)); offset += sizeof(num_final); uint64 *context_bits = reinterpret_cast<uint64 *>(data + offset); offset += BitmapIndex::StorageSize(num_states * 2 + 1) * sizeof(b64); uint64 *future_bits = reinterpret_cast<uint64 *>(data + offset); offset += BitmapIndex::StorageSize(num_futures + num_states + 1) * sizeof(b64); uint64 *final_bits = reinterpret_cast<uint64 *>(data + offset); offset += BitmapIndex::StorageSize(num_states) * sizeof(b64); Label *context_words = reinterpret_cast<Label *>(data + offset); offset += (num_states + 1) * sizeof(label); Label *future_words = reinterpret_cast<Label *>(data + offset); offset += num_futures * sizeof(label); offset = (offset + sizeof(weight) - 1) & ~(sizeof(weight) - 1); Weight *backoff = reinterpret_cast<Weight *>(data + offset); offset += (num_states + 1) * sizeof(weight); Weight *final_probs = reinterpret_cast<Weight *>(data + offset); offset += num_final * sizeof(weight); Weight *future_probs = reinterpret_cast<Weight *>(data + offset); int64 context_arc = 0, future_arc = 0, context_bit = 0, future_bit = 0, final_bit = 0; // pseudo-root bits BitmapIndex::Set(context_bits, context_bit++); ++context_bit; context_words[context_arc] = label; backoff[context_arc] = Weight::Zero(); context_arc++; ++future_bit; if (order_out) { order_out->clear(); order_out->resize(num_states); } std::queue<StateId> context_q; context_q.push(context_fst.Start()); StateId state_number = 0; while (!context_q.empty()) { const StateId &state = context_q.front(); if (order_out) { (*order_out)[state] = state_number; } const Weight final_weight = context_fst.Final(state); if (final_weight != Weight::Zero()) { BitmapIndex::Set(final_bits, state_number); final_probs[final_bit] = final_weight; ++final_bit; } for (ArcIterator<VectorFst<A>> aiter(context_fst, state); !aiter.Done(); aiter.Next()) { const Arc &arc = aiter.Value(); context_words[context_arc] = arc.ilabel; backoff[context_arc] = arc.weight; ++context_arc; BitmapIndex::Set(context_bits, context_bit++); context_q.push(arc.nextstate); } ++context_bit; for (ArcIterator<Fst<A>> aiter(fst, state); !aiter.Done(); aiter.Next()) { const Arc &arc = aiter.Value(); if (arc.ilabel != 0) { future_words[future_arc] = arc.ilabel; future_probs[future_arc] = arc.weight; ++future_arc; BitmapIndex::Set(future_bits, future_bit++); } } ++future_bit; ++state_number; context_q.pop(); } if ((state_number != num_states) || (context_bit != num_states * 2 + 1) || (context_arc != num_states) || (future_arc != num_futures) || (future_bit != num_futures + num_states + 1) || (final_bit != num_final)) { FSTERROR() << "Structure problems detected during construction"; SetProperties(kError, kError); return; } Init(data, false, data_region); } template <typename A> inline void NGramFstImpl<A>::Init(const char *data, bool owned, MappedFile *data_region) { if (owned_) { delete[] data_; } data_region_.reset(data_region); owned_ = owned; data_ = data; size_t offset = 0; num_states_ = *(reinterpret_cast<const uint64 *>(data_ + offset)); offset += sizeof(num_states_); num_futures_ = *(reinterpret_cast<const uint64 *>(data_ + offset)); offset += sizeof(num_futures_); num_final_ = *(reinterpret_cast<const uint64 *>(data_ + offset)); offset += sizeof(num_final_); uint64 bits; size_t context_bits = num_states_ * 2 + 1; size_t future_bits = num_futures_ + num_states_ + 1; context_ = reinterpret_cast<const uint64 *>(data_ + offset); offset += BitmapIndex::StorageSize(context_bits) * sizeof(bits); future_ = reinterpret_cast<const uint64 *>(data_ + offset); offset += BitmapIndex::StorageSize(future_bits) * sizeof(bits); final_ = reinterpret_cast<const uint64 *>(data_ + offset); offset += BitmapIndex::StorageSize(num_states_) * sizeof(bits); context_words_ = reinterpret_cast<const Label *>(data_ + offset); offset += (num_states_ + 1) * sizeof(*context_words_); future_words_ = reinterpret_cast<const Label *>(data_ + offset); offset += num_futures_ * sizeof(*future_words_); offset = (offset + sizeof(*backoff_) - 1) & ~(sizeof(*backoff_) - 1); backoff_ = reinterpret_cast<const Weight *>(data_ + offset); offset += (num_states_ + 1) * sizeof(*backoff_); final_probs_ = reinterpret_cast<const Weight *>(data_ + offset); offset += num_final_ * sizeof(*final_probs_); future_probs_ = reinterpret_cast<const Weight *>(data_ + offset); context_index_.BuildIndex(context_, context_bits); future_index_.BuildIndex(future_, future_bits); final_index_.BuildIndex(final_, num_states_); select_root_ = context_index_.Select0s(0); if (context_index_.Rank1(0) != 0 || select_root_.first != 1 || context_index_.Get(2) == false) { FSTERROR() << "Malformed file"; SetProperties(kError, kError); return; } root_children_ = context_words_ + context_index_.Rank1(2); start_ = 1; } template <typename A> inline typename A::StateId NGramFstImpl<A>::Transition( const std::vector<Label> &context, Label future) const { const Label *children = root_children_; size_t num_children = select_root_.second - 2; const Label *loc = std::lower_bound(children, children + num_children, future); if (loc == children + num_children || *loc != future) { return context_index_.Rank1(0); } size_t node = 2 + loc - children; size_t node_rank = context_index_.Rank1(node); std::pair<size_t, size_t> zeros = (node_rank == 0) ? select_root_ : context_index_.Select0s(node_rank); size_t first_child = zeros.first + 1; if (context_index_.Get(first_child) == false) { return context_index_.Rank1(node); } size_t last_child = zeros.second - 1; for (int word = context.size() - 1; word >= 0; --word) { children = context_words_ + context_index_.Rank1(first_child); loc = std::lower_bound(children, children + last_child - first_child + 1, context[word]); if (loc == children + last_child - first_child + 1 || *loc != context[word]) { break; } node = first_child + loc - children; node_rank = context_index_.Rank1(node); zeros = (node_rank == 0) ? select_root_ : context_index_.Select0s(node_rank); first_child = zeros.first + 1; if (context_index_.Get(first_child) == false) break; last_child = zeros.second - 1; } return context_index_.Rank1(node); } } // namespace internal /*****************************************************************************/ template <class A> class NGramFstMatcher : public MatcherBase<A> { public: typedef A Arc; typedef typename A::Label Label; typedef typename A::StateId StateId; typedef typename A::Weight Weight; // This makes a copy of the FST. NGramFstMatcher(const NGramFst<A> &fst, MatchType match_type) : owned_fst_(fst.Copy()), fst_(*owned_fst_), inst_(fst_.inst_), match_type_(match_type), current_loop_(false), loop_(kNoLabel, 0, A::Weight::One(), kNoStateId) { if (match_type_ == MATCH_OUTPUT) { std::swap(loop_.ilabel, loop_.olabel); } } // This doesn't copy the FST. NGramFstMatcher(const NGramFst<A> *fst, MatchType match_type) : fst_(*fst), inst_(fst_.inst_), match_type_(match_type), current_loop_(false), loop_(kNoLabel, 0, A::Weight::One(), kNoStateId) { if (match_type_ == MATCH_OUTPUT) { std::swap(loop_.ilabel, loop_.olabel); } } // This makes a copy of the FST. NGramFstMatcher(const NGramFstMatcher<A> &matcher, bool safe = false) : owned_fst_(matcher.fst_.Copy(safe)), fst_(*owned_fst_), inst_(matcher.inst_), match_type_(matcher.match_type_), current_loop_(false), loop_(kNoLabel, 0, A::Weight::One(), kNoStateId) { if (match_type_ == MATCH_OUTPUT) { std::swap(loop_.ilabel, loop_.olabel); } } NGramFstMatcher<A> *Copy(bool safe = false) const override { return new NGramFstMatcher<A>(*this, safe); } MatchType Type(bool test) const override { return match_type_; } const Fst<A> &GetFst() const override { return fst_; } uint64 Properties(uint64 props) const override { return props; } void SetState(StateId s) final { fst_.GetImpl()->SetInstFuture(s, &inst_); current_loop_ = false; } bool Find(Label label) final { const Label nolabel = kNoLabel; done_ = true; if (label == 0 || label == nolabel) { if (label == 0) { current_loop_ = true; loop_.nextstate = inst_.state_; } // The unigram state has no epsilon arc. if (inst_.state_ != 0) { arc_.ilabel = arc_.olabel = 0; fst_.GetImpl()->SetInstNode(&inst_); arc_.nextstate = fst_.GetImpl()->context_index_.Rank1( fst_.GetImpl()->context_index_.Select1( fst_.GetImpl()->context_index_.Rank0(inst_.node_) - 1)); arc_.weight = fst_.GetImpl()->backoff_[inst_.state_]; done_ = false; } } else { current_loop_ = false; const Label *start = fst_.GetImpl()->future_words_ + inst_.offset_; const Label *end = start + inst_.num_futures_; const Label *search = std::lower_bound(start, end, label); if (search != end && *search == label) { size_t state = search - start; arc_.ilabel = arc_.olabel = label; arc_.weight = fst_.GetImpl()->future_probs_[inst_.offset_ + state]; fst_.GetImpl()->SetInstContext(&inst_); arc_.nextstate = fst_.GetImpl()->Transition(inst_.context_, label); done_ = false; } } return !Done(); } bool Done() const final { return !current_loop_ && done_; } const Arc &Value() const final { return (current_loop_) ? loop_ : arc_; } void Next() final { if (current_loop_) { current_loop_ = false; } else { done_ = true; } } ssize_t Priority(StateId s) final { return fst_.NumArcs(s); } private: std::unique_ptr<NGramFst<A>> owned_fst_; const NGramFst<A> &fst_; NGramFstInst<A> inst_; MatchType match_type_; // Supplied by caller bool done_; Arc arc_; bool current_loop_; // Current arc is the implicit loop Arc loop_; }; /*****************************************************************************/ // Specialization for NGramFst; see generic version in fst.h // for sample usage (but use the ProdLmFst type!). This version // should inline. template <class A> class StateIterator<NGramFst<A>> : public StateIteratorBase<A> { public: typedef typename A::StateId StateId; explicit StateIterator(const NGramFst<A> &fst) : s_(0), num_states_(fst.NumStates()) {} bool Done() const final { return s_ >= num_states_; } StateId Value() const final { return s_; } void Next() final { ++s_; } void Reset() final { s_ = 0; } private: StateId s_; StateId num_states_; }; /*****************************************************************************/ template <class A> class ArcIterator<NGramFst<A>> : public ArcIteratorBase<A> { public: typedef A Arc; typedef typename A::Label Label; typedef typename A::StateId StateId; typedef typename A::Weight Weight; ArcIterator(const NGramFst<A> &fst, StateId state) : lazy_(~0), impl_(fst.GetImpl()), i_(0), flags_(kArcValueFlags) { inst_ = fst.inst_; impl_->SetInstFuture(state, &inst_); impl_->SetInstNode(&inst_); } bool Done() const final { return i_ >= ((inst_.node_ == 0) ? inst_.num_futures_ : inst_.num_futures_ + 1); } const Arc &Value() const final { bool eps = (inst_.node_ != 0 && i_ == 0); StateId state = (inst_.node_ == 0) ? i_ : i_ - 1; if (flags_ & lazy_ & (kArcILabelValue | kArcOLabelValue)) { arc_.ilabel = arc_.olabel = eps ? 0 : impl_->future_words_[inst_.offset_ + state]; lazy_ &= ~(kArcILabelValue | kArcOLabelValue); } if (flags_ & lazy_ & kArcNextStateValue) { if (eps) { arc_.nextstate = impl_->context_index_.Rank1(impl_->context_index_.Select1( impl_->context_index_.Rank0(inst_.node_) - 1)); } else { if (lazy_ & kArcNextStateValue) { impl_->SetInstContext(&inst_); // first time only. } arc_.nextstate = impl_->Transition( inst_.context_, impl_->future_words_[inst_.offset_ + state]); } lazy_ &= ~kArcNextStateValue; } if (flags_ & lazy_ & kArcWeightValue) { arc_.weight = eps ? impl_->backoff_[inst_.state_] : impl_->future_probs_[inst_.offset_ + state]; lazy_ &= ~kArcWeightValue; } return arc_; } void Next() final { ++i_; lazy_ = ~0; } size_t Position() const final { return i_; } void Reset() final { i_ = 0; lazy_ = ~0; } void Seek(size_t a) final { if (i_ != a) { i_ = a; lazy_ = ~0; } } uint32 Flags() const final { return flags_; } void SetFlags(uint32 flags, uint32 mask) final { flags_ &= ~mask; flags_ |= (flags & kArcValueFlags); } private: mutable Arc arc_; mutable uint32 lazy_; const internal::NGramFstImpl<A> *impl_; // Borrowed reference. mutable NGramFstInst<A> inst_; size_t i_; uint32 flags_; }; } // namespace fst #endif // FST_EXTENSIONS_NGRAM_NGRAM_FST_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions/compact/compact64_weighted_string-fst.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/fst.h> #include <fst/compact-fst.h> namespace fst { static FstRegisterer< CompactWeightedStringFst<StdArc, uint64>> CompactWeightedStringFst_StdArc_uint64_registerer; static FstRegisterer< CompactWeightedStringFst<LogArc, uint64>> CompactWeightedStringFst_LogArc_uint64_registerer; } // namespace fst
0
coqui_public_repos
coqui_public_repos/STT/lm_optimizer.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function if __name__ == "__main__": print( "Using the top level lm_optimizer.py script is deprecated and will be removed " "in a future release. Instead use: python -m coqui_stt_training.util.lm_optimize" ) try: from coqui_stt_training.util import lm_optimize except ImportError: print("Training package is not installed. See training documentation.") raise lm_optimize.main()
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/test-python_37m-armbian-arm64-prod-opt.yml
build: template_file: test-armbian-opt-base.tyml dependencies: - "linux-arm64-cpu-opt" - "test-training_16k-linux-amd64-py36m-opt" test_model_task: "test-training_16k-linux-amd64-py36m-opt" system_setup: > apt-get -qq -y install ${python.packages_buster.apt} args: tests_cmdline: "${system.homedir.linux}/DeepSpeech/ds/taskcluster/tc-python_tflite-tests-prod.sh 3.7.6:m 16k" metadata: name: "DeepSpeech ARMbian ARM64 Cortex-A53 CPU Python v3.7 prod tests" description: "Testing DeepSpeech for ARMbian ARM64 Cortex-A53 on prod model Python v3.7, CPU only, optimized version"
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/script/invert.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #ifndef FST_SCRIPT_INVERT_H_ #define FST_SCRIPT_INVERT_H_ #include <fst/invert.h> #include <fst/script/fst-class.h> namespace fst { namespace script { template <class Arc> void Invert(MutableFstClass *fst) { Invert(fst->GetMutableFst<Arc>()); } void Invert(MutableFstClass *fst); } // namespace script } // namespace fst #endif // FST_SCRIPT_INVERT_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/matcher-fst.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Class to add a matcher to an FST. #ifndef FST_MATCHER_FST_H_ #define FST_MATCHER_FST_H_ #include <memory> #include <string> #include <fst/add-on.h> #include <fst/const-fst.h> #include <fst/lookahead-matcher.h> namespace fst { // Writeable matchers have the same interface as Matchers (as defined in // matcher.h) along with the following additional methods: // // template <class F> // class Matcher { // public: // using FST = F; // ... // using MatcherData = ...; // Initialization data. // // // Constructor with additional argument for external initialization data; // // matcher increments its reference count on construction and decrements // // the reference count, and deletes once the reference count has reached // // zero. // Matcher(const FST &fst, MatchType type, MatcherData *data); // // // Returns pointer to initialization data that can be passed to a Matcher // // constructor. // MatcherData *GetData() const; // }; // The matcher initialization data class must also provide the following // interface: // // class MatcherData { // public: // // Required copy constructor. // MatcherData(const MatcherData &); // // // Required I/O methods. // static MatcherData *Read(std::istream &istrm, const FstReadOptions &opts); // bool Write(std::ostream &ostrm, const FstWriteOptions &opts) const; // }; // Trivial (no-op) MatcherFst initializer functor. template <class M> class NullMatcherFstInit { public: using MatcherData = typename M::MatcherData; using Data = AddOnPair<MatcherData, MatcherData>; using Impl = internal::AddOnImpl<typename M::FST, Data>; explicit NullMatcherFstInit(std::shared_ptr<Impl> *) {} }; // Class adding a matcher to an FST type. Creates a new FST whose name is given // by N. An optional functor Init can be used to initialize the FST. The Data // template parameter allows the user to select the type of the add-on. template < class F, class M, const char *Name, class Init = NullMatcherFstInit<M>, class Data = AddOnPair<typename M::MatcherData, typename M::MatcherData>> class MatcherFst : public ImplToExpandedFst<internal::AddOnImpl<F, Data>> { public: using FST = F; using Arc = typename FST::Arc; using StateId = typename Arc::StateId; using FstMatcher = M; using MatcherData = typename FstMatcher::MatcherData; using Impl = internal::AddOnImpl<FST, Data>; using D = Data; friend class StateIterator<MatcherFst<FST, FstMatcher, Name, Init, Data>>; friend class ArcIterator<MatcherFst<FST, FstMatcher, Name, Init, Data>>; MatcherFst() : ImplToExpandedFst<Impl>(std::make_shared<Impl>(FST(), Name)) {} explicit MatcherFst(const FST &fst, std::shared_ptr<Data> data = nullptr) : ImplToExpandedFst<Impl>(data ? CreateImpl(fst, Name, data) : CreateDataAndImpl(fst, Name)) {} explicit MatcherFst(const Fst<Arc> &fst) : ImplToExpandedFst<Impl>(CreateDataAndImpl(fst, Name)) {} // See Fst<>::Copy() for doc. MatcherFst(const MatcherFst<FST, FstMatcher, Name, Init, Data> &fst, bool safe = false) : ImplToExpandedFst<Impl>(fst, safe) {} // Get a copy of this MatcherFst. See Fst<>::Copy() for further doc. MatcherFst<FST, FstMatcher, Name, Init, Data> *Copy( bool safe = false) const override { return new MatcherFst<FST, FstMatcher, Name, Init, Data>(*this, safe); } // Read a MatcherFst from an input stream; return nullptr on error static MatcherFst<FST, M, Name, Init, Data> *Read( std::istream &strm, const FstReadOptions &opts) { auto *impl = Impl::Read(strm, opts); return impl ? new MatcherFst<FST, FstMatcher, Name, Init, Data>( std::shared_ptr<Impl>(impl)) : nullptr; } // Read a MatcherFst from a file; return nullptr on error // Empty filename reads from standard input static MatcherFst<FST, FstMatcher, Name, Init, Data> *Read( const string &filename) { auto *impl = ImplToExpandedFst<Impl>::Read(filename); return impl ? new MatcherFst<FST, FstMatcher, Name, Init, Data>( std::shared_ptr<Impl>(impl)) : nullptr; } bool Write(std::ostream &strm, const FstWriteOptions &opts) const override { return GetImpl()->Write(strm, opts); } bool Write(const string &filename) const override { return Fst<Arc>::WriteFile(filename); } void InitStateIterator(StateIteratorData<Arc> *data) const override { return GetImpl()->InitStateIterator(data); } void InitArcIterator(StateId s, ArcIteratorData<Arc> *data) const override { return GetImpl()->InitArcIterator(s, data); } FstMatcher *InitMatcher(MatchType match_type) const override { return new FstMatcher(&GetFst(), match_type, GetSharedData(match_type)); } const FST &GetFst() const { return GetImpl()->GetFst(); } const Data *GetAddOn() const { return GetImpl()->GetAddOn(); } std::shared_ptr<Data> GetSharedAddOn() const { return GetImpl()->GetSharedAddOn(); } const MatcherData *GetData(MatchType match_type) const { const auto *data = GetAddOn(); return match_type == MATCH_INPUT ? data->First() : data->Second(); } std::shared_ptr<MatcherData> GetSharedData(MatchType match_type) const { const auto *data = GetAddOn(); return match_type == MATCH_INPUT ? data->SharedFirst() : data->SharedSecond(); } protected: using ImplToFst<Impl, ExpandedFst<Arc>>::GetImpl; static std::shared_ptr<Impl> CreateDataAndImpl(const FST &fst, const string &name) { FstMatcher imatcher(fst, MATCH_INPUT); FstMatcher omatcher(fst, MATCH_OUTPUT); return CreateImpl(fst, name, std::make_shared<Data>(imatcher.GetSharedData(), omatcher.GetSharedData())); } static std::shared_ptr<Impl> CreateDataAndImpl(const Fst<Arc> &fst, const string &name) { FST result(fst); return CreateDataAndImpl(result, name); } static std::shared_ptr<Impl> CreateImpl(const FST &fst, const string &name, std::shared_ptr<Data> data) { auto impl = std::make_shared<Impl>(fst, name); impl->SetAddOn(data); Init init(&impl); return impl; } explicit MatcherFst(std::shared_ptr<Impl> impl) : ImplToExpandedFst<Impl>(impl) {} private: MatcherFst &operator=(const MatcherFst &) = delete; }; // Specialization for MatcherFst. template <class FST, class M, const char *Name, class Init> class StateIterator<MatcherFst<FST, M, Name, Init>> : public StateIterator<FST> { public: explicit StateIterator(const MatcherFst<FST, M, Name, Init> &fst) : StateIterator<FST>(fst.GetImpl()->GetFst()) {} }; // Specialization for MatcherFst. template <class FST, class M, const char *Name, class Init> class ArcIterator<MatcherFst<FST, M, Name, Init>> : public ArcIterator<FST> { public: using StateId = typename FST::Arc::StateId; ArcIterator(const MatcherFst<FST, M, Name, Init> &fst, typename FST::Arc::StateId s) : ArcIterator<FST>(fst.GetImpl()->GetFst(), s) {} }; // Specialization for MatcherFst. template <class F, class M, const char *Name, class Init> class Matcher<MatcherFst<F, M, Name, Init>> { public: using FST = MatcherFst<F, M, Name, Init>; using Arc = typename F::Arc; using Label = typename Arc::Label; using StateId = typename Arc::StateId; Matcher(const FST &fst, MatchType match_type) : matcher_(fst.InitMatcher(match_type)) {} Matcher(const Matcher<FST> &matcher) : matcher_(matcher.matcher_->Copy()) {} Matcher<FST> *Copy() const { return new Matcher<FST>(*this); } MatchType Type(bool test) const { return matcher_->Type(test); } void SetState(StateId s) { matcher_->SetState(s); } bool Find(Label label) { return matcher_->Find(label); } bool Done() const { return matcher_->Done(); } const Arc &Value() const { return matcher_->Value(); } void Next() { matcher_->Next(); } uint64 Properties(uint64 props) const { return matcher_->Properties(props); } uint32 Flags() const { return matcher_->Flags(); } private: std::unique_ptr<M> matcher_; }; // Specialization for MatcherFst. template <class F, class M, const char *Name, class Init> class LookAheadMatcher<MatcherFst<F, M, Name, Init>> { public: using FST = MatcherFst<F, M, Name, Init>; using Arc = typename F::Arc; using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; LookAheadMatcher(const FST &fst, MatchType match_type) : matcher_(fst.InitMatcher(match_type)) {} LookAheadMatcher(const LookAheadMatcher<FST> &matcher, bool safe = false) : matcher_(matcher.matcher_->Copy(safe)) {} // General matcher methods. LookAheadMatcher<FST> *Copy(bool safe = false) const { return new LookAheadMatcher<FST>(*this, safe); } MatchType Type(bool test) const { return matcher_->Type(test); } void SetState(StateId s) { matcher_->SetState(s); } bool Find(Label label) { return matcher_->Find(label); } bool Done() const { return matcher_->Done(); } const Arc &Value() const { return matcher_->Value(); } void Next() { matcher_->Next(); } const FST &GetFst() const { return matcher_->GetFst(); } uint64 Properties(uint64 props) const { return matcher_->Properties(props); } uint32 Flags() const { return matcher_->Flags(); } bool LookAheadLabel(Label label) const { return matcher_->LookAheadLabel(label); } bool LookAheadFst(const Fst<Arc> &fst, StateId s) { return matcher_->LookAheadFst(fst, s); } Weight LookAheadWeight() const { return matcher_->LookAheadWeight(); } bool LookAheadPrefix(Arc *arc) const { return matcher_->LookAheadPrefix(arc); } void InitLookAheadFst(const Fst<Arc> &fst, bool copy = false) { matcher_->InitLookAheadFst(fst, copy); } private: std::unique_ptr<M> matcher_; }; // Useful aliases when using StdArc. extern const char arc_lookahead_fst_type[]; using StdArcLookAheadFst = MatcherFst<ConstFst<StdArc>, ArcLookAheadMatcher<SortedMatcher<ConstFst<StdArc>>>, arc_lookahead_fst_type>; extern const char ilabel_lookahead_fst_type[]; extern const char olabel_lookahead_fst_type[]; constexpr auto ilabel_lookahead_flags = kInputLookAheadMatcher | kLookAheadWeight | kLookAheadPrefix | kLookAheadEpsilons | kLookAheadNonEpsilonPrefix; constexpr auto olabel_lookahead_flags = kOutputLookAheadMatcher | kLookAheadWeight | kLookAheadPrefix | kLookAheadEpsilons | kLookAheadNonEpsilonPrefix; using StdILabelLookAheadFst = MatcherFst< ConstFst<StdArc>, LabelLookAheadMatcher<SortedMatcher<ConstFst<StdArc>>, ilabel_lookahead_flags, FastLogAccumulator<StdArc>>, ilabel_lookahead_fst_type, LabelLookAheadRelabeler<StdArc>>; using StdOLabelLookAheadFst = MatcherFst< ConstFst<StdArc>, LabelLookAheadMatcher<SortedMatcher<ConstFst<StdArc>>, olabel_lookahead_flags, FastLogAccumulator<StdArc>>, olabel_lookahead_fst_type, LabelLookAheadRelabeler<StdArc>>; } // namespace fst #endif // FST_MATCHER_FST_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/test/weight-tester.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Utility class for regression testing of FST weights. #ifndef FST_TEST_WEIGHT_TESTER_H_ #define FST_TEST_WEIGHT_TESTER_H_ #include <iostream> #include <sstream> #include <utility> #include <fst/log.h> #include <fst/weight.h> namespace fst { // This class tests a variety of identities and properties that must // hold for the Weight class to be well-defined. It calls function object // WEIGHT_GENERATOR to select weights that are used in the tests. template <class Weight, class WeightGenerator> class WeightTester { public: WeightTester(WeightGenerator generator) : weight_generator_(std::move(generator)) {} void Test(int iterations, bool test_division = true) { for (int i = 0; i < iterations; ++i) { // Selects the test weights. const Weight w1(weight_generator_()); const Weight w2(weight_generator_()); const Weight w3(weight_generator_()); VLOG(1) << "weight type = " << Weight::Type(); VLOG(1) << "w1 = " << w1; VLOG(1) << "w2 = " << w2; VLOG(1) << "w3 = " << w3; TestSemiring(w1, w2, w3); if (test_division) TestDivision(w1, w2); TestReverse(w1, w2); TestEquality(w1, w2, w3); TestIO(w1); TestCopy(w1); } } private: // Note in the tests below we use ApproxEqual rather than == and add // kDelta to inequalities where the weights might be inexact. // Tests (Plus, Times, Zero, One) defines a commutative semiring. void TestSemiring(Weight w1, Weight w2, Weight w3) { // Checks that the operations are closed. CHECK(Plus(w1, w2).Member()); CHECK(Times(w1, w2).Member()); // Checks that the operations are associative. CHECK(ApproxEqual(Plus(w1, Plus(w2, w3)), Plus(Plus(w1, w2), w3))); CHECK(ApproxEqual(Times(w1, Times(w2, w3)), Times(Times(w1, w2), w3))); // Checks the identity elements. CHECK(Plus(w1, Weight::Zero()) == w1); CHECK(Plus(Weight::Zero(), w1) == w1); CHECK(Times(w1, Weight::One()) == w1); CHECK(Times(Weight::One(), w1) == w1); // Check the no weight element. CHECK(!Weight::NoWeight().Member()); CHECK(!Plus(w1, Weight::NoWeight()).Member()); CHECK(!Plus(Weight::NoWeight(), w1).Member()); CHECK(!Times(w1, Weight::NoWeight()).Member()); CHECK(!Times(Weight::NoWeight(), w1).Member()); // Checks that the operations commute. CHECK(ApproxEqual(Plus(w1, w2), Plus(w2, w1))); if (Weight::Properties() & kCommutative) CHECK(ApproxEqual(Times(w1, w2), Times(w2, w1))); // Checks Zero() is the annihilator. CHECK(Times(w1, Weight::Zero()) == Weight::Zero()); CHECK(Times(Weight::Zero(), w1) == Weight::Zero()); // Check Power(w, 0) is Weight::One() CHECK(Power(w1, 0) == Weight::One()); // Check Power(w, 1) is w CHECK(Power(w1, 1) == w1); // Check Power(w, 3) is Times(w, Times(w, w)) CHECK(Power(w1, 3) == Times(w1, Times(w1, w1))); // Checks distributivity. if (Weight::Properties() & kLeftSemiring) { CHECK(ApproxEqual(Times(w1, Plus(w2, w3)), Plus(Times(w1, w2), Times(w1, w3)))); } if (Weight::Properties() & kRightSemiring) CHECK(ApproxEqual(Times(Plus(w1, w2), w3), Plus(Times(w1, w3), Times(w2, w3)))); if (Weight::Properties() & kIdempotent) CHECK(Plus(w1, w1) == w1); if (Weight::Properties() & kPath) CHECK(Plus(w1, w2) == w1 || Plus(w1, w2) == w2); // Ensure weights form a left or right semiring. CHECK(Weight::Properties() & (kLeftSemiring | kRightSemiring)); // Check when Times() is commutative that it is marked as a semiring. if (Weight::Properties() & kCommutative) CHECK(Weight::Properties() & kSemiring); } // Tests division operation. void TestDivision(Weight w1, Weight w2) { Weight p = Times(w1, w2); if (Weight::Properties() & kLeftSemiring) { Weight d = Divide(p, w1, DIVIDE_LEFT); if (d.Member()) CHECK(ApproxEqual(p, Times(w1, d))); CHECK(!Divide(w1, Weight::NoWeight(), DIVIDE_LEFT).Member()); CHECK(!Divide(Weight::NoWeight(), w1, DIVIDE_LEFT).Member()); } if (Weight::Properties() & kRightSemiring) { Weight d = Divide(p, w2, DIVIDE_RIGHT); if (d.Member()) CHECK(ApproxEqual(p, Times(d, w2))); CHECK(!Divide(w1, Weight::NoWeight(), DIVIDE_RIGHT).Member()); CHECK(!Divide(Weight::NoWeight(), w1, DIVIDE_RIGHT).Member()); } if (Weight::Properties() & kCommutative) { Weight d = Divide(p, w1, DIVIDE_RIGHT); if (d.Member()) CHECK(ApproxEqual(p, Times(d, w1))); } } // Tests reverse operation. void TestReverse(Weight w1, Weight w2) { typedef typename Weight::ReverseWeight ReverseWeight; ReverseWeight rw1 = w1.Reverse(); ReverseWeight rw2 = w2.Reverse(); CHECK(rw1.Reverse() == w1); CHECK(Plus(w1, w2).Reverse() == Plus(rw1, rw2)); CHECK(Times(w1, w2).Reverse() == Times(rw2, rw1)); } // Tests == is an equivalence relation. void TestEquality(Weight w1, Weight w2, Weight w3) { // Checks reflexivity. CHECK(w1 == w1); // Checks symmetry. CHECK((w1 == w2) == (w2 == w1)); // Checks transitivity. if (w1 == w2 && w2 == w3) CHECK(w1 == w3); } // Tests binary serialization and textual I/O. void TestIO(Weight w) { // Tests binary I/O { std::ostringstream os; w.Write(os); os.flush(); std::istringstream is(os.str()); Weight v; v.Read(is); CHECK_EQ(w, v); } // Tests textual I/O. { std::ostringstream os; os << w; std::istringstream is(os.str()); Weight v(Weight::One()); is >> v; CHECK(ApproxEqual(w, v)); } } // Tests copy constructor and assignment operator void TestCopy(Weight w) { Weight x = w; CHECK(w == x); x = Weight(w); CHECK(w == x); x.operator=(x); CHECK(w == x); } // Generates weights used in testing. WeightGenerator weight_generator_; }; } // namespace fst #endif // FST_TEST_WEIGHT_TESTER_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/bin/fstisomorphic.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/flags.h> #include <fst/weight.h> DEFINE_double(delta, fst::kDelta, "Comparison/quantization delta"); int fstisomorphic_main(int argc, char **argv); int main(int argc, char **argv) { return fstisomorphic_main(argc, argv); }
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/complement.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Class to complement an FST. #ifndef FST_COMPLEMENT_H_ #define FST_COMPLEMENT_H_ #include <algorithm> #include <string> #include <vector> #include <fst/log.h> #include <fst/fst.h> #include <fst/test-properties.h> namespace fst { template <class Arc> class ComplementFst; namespace internal { // Implementation of delayed ComplementFst. The algorithm used completes the // (deterministic) FSA and then exchanges final and non-final states. // Completion, i.e. ensuring that all labels can be read from every state, is // accomplished by using ρ-labels, which match all labels that are otherwise // not found leaving a state. The first state in the output is reserved to be a // new state that is the destination of all ρ-labels. Each remaining output // state s corresponds to input state s - 1. The first arc in the output at // these states is the ρ-label, the remaining arcs correspond to the input // arcs. template <class A> class ComplementFstImpl : public FstImpl<A> { public: using Arc = A; using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using FstImpl<A>::SetType; using FstImpl<A>::SetProperties; using FstImpl<A>::SetInputSymbols; using FstImpl<A>::SetOutputSymbols; friend class StateIterator<ComplementFst<Arc>>; friend class ArcIterator<ComplementFst<Arc>>; explicit ComplementFstImpl(const Fst<Arc> &fst) : fst_(fst.Copy()) { SetType("complement"); uint64 props = fst.Properties(kILabelSorted, false); SetProperties(ComplementProperties(props), kCopyProperties); SetInputSymbols(fst.InputSymbols()); SetOutputSymbols(fst.OutputSymbols()); } ComplementFstImpl(const ComplementFstImpl<Arc> &impl) : fst_(impl.fst_->Copy()) { SetType("complement"); SetProperties(impl.Properties(), kCopyProperties); SetInputSymbols(impl.InputSymbols()); SetOutputSymbols(impl.OutputSymbols()); } StateId Start() const { if (Properties(kError)) return kNoStateId; auto start = fst_->Start(); return start != kNoStateId ? start + 1 : 0; } // Exchange final and non-final states; makes ρ-destination state final. Weight Final(StateId s) const { if (s == 0 || fst_->Final(s - 1) == Weight::Zero()) { return Weight::One(); } else { return Weight::Zero(); } } size_t NumArcs(StateId s) const { return s == 0 ? 1 : fst_->NumArcs(s - 1) + 1; } size_t NumInputEpsilons(StateId s) const { return s == 0 ? 0 : fst_->NumInputEpsilons(s - 1); } size_t NumOutputEpsilons(StateId s) const { return s == 0 ? 0 : fst_->NumOutputEpsilons(s - 1); } uint64 Properties() const override { return Properties(kFstProperties); } // Sets error if found, and returns other FST impl properties. uint64 Properties(uint64 mask) const override { if ((mask & kError) && fst_->Properties(kError, false)) { SetProperties(kError, kError); } return FstImpl<Arc>::Properties(mask); } private: std::unique_ptr<const Fst<Arc>> fst_; }; } // namespace internal // Complements an automaton. This is a library-internal operation that // introduces a (negative) ρ-label; use Difference/DifferenceFst in user code, // which will not see this label. This version is a delayed FST. // // This class attaches interface to implementation and handles // reference counting, delegating most methods to ImplToFst. template <class A> class ComplementFst : public ImplToFst<internal::ComplementFstImpl<A>> { public: using Arc = A; using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Impl = internal::ComplementFstImpl<Arc>; friend class StateIterator<ComplementFst<Arc>>; friend class ArcIterator<ComplementFst<Arc>>; explicit ComplementFst(const Fst<Arc> &fst) : ImplToFst<Impl>(std::make_shared<Impl>(fst)) { static constexpr auto props = kUnweighted | kNoEpsilons | kIDeterministic | kAcceptor; if (fst.Properties(props, true) != props) { FSTERROR() << "ComplementFst: Argument not an unweighted " << "epsilon-free deterministic acceptor"; GetImpl()->SetProperties(kError, kError); } } // See Fst<>::Copy() for doc. ComplementFst(const ComplementFst<Arc> &fst, bool safe = false) : ImplToFst<Impl>(fst, safe) {} // Gets a copy of this FST. See Fst<>::Copy() for further doc. ComplementFst<Arc> *Copy(bool safe = false) const override { return new ComplementFst<Arc>(*this, safe); } inline void InitStateIterator(StateIteratorData<Arc> *data) const override; inline void InitArcIterator(StateId s, ArcIteratorData<Arc> *data) const override; // Label that represents the ρ-transition; we use a negative value private to // the library and which will preserve FST label sort order. static const Label kRhoLabel = -2; private: using ImplToFst<Impl>::GetImpl; ComplementFst &operator=(const ComplementFst &) = delete; }; template <class Arc> const typename Arc::Label ComplementFst<Arc>::kRhoLabel; // Specialization for ComplementFst. template <class Arc> class StateIterator<ComplementFst<Arc>> : public StateIteratorBase<Arc> { public: using StateId = typename Arc::StateId; explicit StateIterator(const ComplementFst<Arc> &fst) : siter_(*fst.GetImpl()->fst_), s_(0) {} bool Done() const final { return s_ > 0 && siter_.Done(); } StateId Value() const final { return s_; } void Next() final { if (s_ != 0) siter_.Next(); ++s_; } void Reset() final { siter_.Reset(); s_ = 0; } private: StateIterator<Fst<Arc>> siter_; StateId s_; }; // Specialization for ComplementFst. template <class Arc> class ArcIterator<ComplementFst<Arc>> : public ArcIteratorBase<Arc> { public: using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; ArcIterator(const ComplementFst<Arc> &fst, StateId s) : s_(s), pos_(0) { if (s_ != 0) { aiter_.reset(new ArcIterator<Fst<Arc>>(*fst.GetImpl()->fst_, s - 1)); } } bool Done() const final { if (s_ != 0) { return pos_ > 0 && aiter_->Done(); } else { return pos_ > 0; } } // Adds the ρ-label to the ρ destination state. const Arc &Value() const final { if (pos_ == 0) { arc_.ilabel = arc_.olabel = ComplementFst<Arc>::kRhoLabel; arc_.weight = Weight::One(); arc_.nextstate = 0; } else { arc_ = aiter_->Value(); ++arc_.nextstate; } return arc_; } void Next() final { if (s_ != 0 && pos_ > 0) aiter_->Next(); ++pos_; } size_t Position() const final { return pos_; } void Reset() final { if (s_ != 0) aiter_->Reset(); pos_ = 0; } void Seek(size_t a) final { if (s_ != 0) { if (a == 0) { aiter_->Reset(); } else { aiter_->Seek(a - 1); } } pos_ = a; } uint32 Flags() const final { return kArcValueFlags; } void SetFlags(uint32, uint32) final {} private: std::unique_ptr<ArcIterator<Fst<Arc>>> aiter_; StateId s_; size_t pos_; mutable Arc arc_; }; template <class Arc> inline void ComplementFst<Arc>::InitStateIterator( StateIteratorData<Arc> *data) const { data->base = new StateIterator<ComplementFst<Arc>>(*this); } template <class Arc> inline void ComplementFst<Arc>::InitArcIterator(StateId s, ArcIteratorData<Arc> *data) const { data->base = new ArcIterator<ComplementFst<Arc>>(*this, s); } // Useful alias when using StdArc. using StdComplementFst = ComplementFst<StdArc>; } // namespace fst #endif // FST_COMPLEMENT_H_
0
coqui_public_repos
coqui_public_repos/STT/bazel.patch
diff --git a/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java b/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java index c7aa4cb63..e084bc27c 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java @@ -28,6 +28,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.io.PrintWriter; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; @@ -73,6 +74,8 @@ public final class FileWriteAction extends AbstractFileWriteAction { */ private final CharSequence fileContents; + private final Artifact output; + /** Minimum length (in chars) for content to be eligible for compression. */ private static final int COMPRESS_CHARS_THRESHOLD = 256; @@ -90,6 +93,7 @@ public final class FileWriteAction extends AbstractFileWriteAction { fileContents = new CompressedString((String) fileContents); } this.fileContents = fileContents; + this.output = output; } /** @@ -230,11 +234,32 @@ public final class FileWriteAction extends AbstractFileWriteAction { */ @Override protected String computeKey() { + // System.err.println("src/main/java/com/google/devtools/build/lib/analysis/actions/FileWriteAction.java => output: " + output.getExecPath()); + // ".ckd" Compute Key Debug + PrintWriter computeKeyDebugWriter = null; + String computeKeyDebugFile = output.getExecPath() + ".FileWriteAction.ckd"; + try { + computeKeyDebugWriter = new PrintWriter(computeKeyDebugFile, "UTF-8"); + } catch (java.io.FileNotFoundException ex) { + System.err.println("Unable to create " + computeKeyDebugFile); + } catch (java.io.UnsupportedEncodingException ex) { + System.err.println("Unsupported encoding"); + } + Fingerprint f = new Fingerprint(); f.addString(GUID); + computeKeyDebugWriter.println("GUID: " + GUID); + f.addString(String.valueOf(makeExecutable)); + computeKeyDebugWriter.println("MAKEEXECUTABLE: " + String.valueOf(makeExecutable)); + f.addString(getFileContents()); - return f.hexDigestAndReset(); + computeKeyDebugWriter.println("FILECONTENTS: " + getFileContents()); + + String rv = f.hexDigestAndReset(); + computeKeyDebugWriter.println("KEY: " + rv); + computeKeyDebugWriter.close(); + return rv; } /** diff --git a/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java b/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java index 580788160..26883eb92 100644 --- a/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java +++ b/src/main/java/com/google/devtools/build/lib/analysis/actions/SpawnAction.java @@ -60,6 +60,7 @@ import com.google.devtools.build.lib.util.ShellEscaper; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.protobuf.GeneratedMessage.GeneratedExtension; import java.nio.charset.Charset; +import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; @@ -91,6 +92,9 @@ public class SpawnAction extends AbstractAction implements ExecutionInfoSpecifie private final CommandLine argv; + private final Iterable<Artifact> inputs; + private final Iterable<Artifact> outputs; + private final boolean executeUnconditionally; private final boolean isShellCommand; private final String progressMessage; @@ -197,6 +201,9 @@ public class SpawnAction extends AbstractAction implements ExecutionInfoSpecifie this.mnemonic = mnemonic; this.executeUnconditionally = executeUnconditionally; this.extraActionInfoSupplier = extraActionInfoSupplier; + + this.inputs = inputs; + this.outputs = outputs; } @Override @@ -312,23 +319,89 @@ public class SpawnAction extends AbstractAction implements ExecutionInfoSpecifie @Override protected String computeKey() { + boolean genruleSetup = String.valueOf(Iterables.get(inputs, 0).getExecPath()).contains("genrule/genrule-setup.sh"); + boolean validGenrule = genruleSetup && (Iterables.size(inputs) > 1); + + String genruleScript = null; + if (validGenrule) { + genruleScript = String.valueOf(Iterables.get(inputs, 1).getExecPath()); + } + + // ".ckd" Compute Key Debug + PrintWriter computeKeyDebugWriter = null; + if (validGenrule) { + String computeKeyDebugFile = genruleScript + ".SpawnAction.ckd"; + try { + computeKeyDebugWriter = new PrintWriter(computeKeyDebugFile, "UTF-8"); + } catch (java.io.FileNotFoundException ex) { + System.err.println("Unable to create " + computeKeyDebugFile); + } catch (java.io.UnsupportedEncodingException ex) { + System.err.println("Unsupported encoding"); + } + } + + validGenrule = validGenrule && (computeKeyDebugWriter != null); + Fingerprint f = new Fingerprint(); f.addString(GUID); + if (validGenrule) { computeKeyDebugWriter.println("GUID: " + GUID); } + f.addStrings(argv.arguments()); + if (validGenrule) { + for (String input : argv.arguments()) { + computeKeyDebugWriter.println("ARGUMENTS: " + input); + } + } + f.addString(getMnemonic()); + if (validGenrule) { computeKeyDebugWriter.println("MNEMONIC: " + getMnemonic()); } + // We don't need the toolManifests here, because they are a subset of the inputManifests by // definition and the output of an action shouldn't change whether something is considered a // tool or not. f.addPaths(getRunfilesSupplier().getRunfilesDirs()); + if (validGenrule) { + for (PathFragment path : getRunfilesSupplier().getRunfilesDirs()) { + computeKeyDebugWriter.println("RUNFILESDIRS: " + path.getPathString()); + } + } + ImmutableList<Artifact> runfilesManifests = getRunfilesSupplier().getManifests(); f.addInt(runfilesManifests.size()); + if (validGenrule) { computeKeyDebugWriter.println("RUNFILESMANIFESTSSIZE: " + runfilesManifests.size()); } + for (Artifact runfilesManifest : runfilesManifests) { f.addPath(runfilesManifest.getExecPath()); + if (validGenrule) { computeKeyDebugWriter.println("RUNFILESMANIFEST: " + runfilesManifest.getExecPath().getPathString()); } } + f.addStringMap(getEnvironment()); + if (validGenrule) { + for (Map.Entry<String, String> entry : getEnvironment().entrySet()) { + computeKeyDebugWriter.println("ENV: " + entry.getKey() + "=" + entry.getValue()); + } + } + f.addStrings(getClientEnvironmentVariables()); + if (validGenrule) { + for (String input : argv.arguments()) { + computeKeyDebugWriter.println("CLIENTENV: " + input); + } + } + f.addStringMap(getExecutionInfo()); - return f.hexDigestAndReset(); + if (validGenrule) { + for (Map.Entry<String, String> entry : executionInfo.entrySet()) { + computeKeyDebugWriter.println("EXECINFO: " + entry.getKey() + "=" + entry.getValue()); + } + } + + String rv = f.hexDigestAndReset(); + if (validGenrule) { + computeKeyDebugWriter.println("KEY: " + rv); + computeKeyDebugWriter.close(); + } + return rv; } @Override diff --git a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java index 3559fffde..3ba39617c 100644 --- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java +++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java @@ -1111,10 +1111,30 @@ public class CppCompileAction extends AbstractAction @Override public String computeKey() { + // ".ckd" Compute Key Debug + PrintWriter computeKeyDebugWriter = null; + String computeKeyDebugFile = getInternalOutputFile() + ".CppCompileAction.ckd"; + try { + computeKeyDebugWriter = new PrintWriter(computeKeyDebugFile, "UTF-8"); + } catch (java.io.FileNotFoundException ex) { + System.err.println("Unable to create " + computeKeyDebugFile); + } catch (java.io.UnsupportedEncodingException ex) { + System.err.println("Unsupported encoding"); + } + Fingerprint f = new Fingerprint(); f.addUUID(actionClassId); + computeKeyDebugWriter.println("UUID: " + actionClassId); + f.addStringMap(getEnvironment()); + for (Map.Entry<String, String> entry : getEnvironment().entrySet()) { + computeKeyDebugWriter.println("ENV: " + entry.getKey() + "=" + entry.getValue()); + } + f.addStringMap(executionInfo); + for (Map.Entry<String, String> entry : executionInfo.entrySet()) { + computeKeyDebugWriter.println("EXECINFO: " + entry.getKey() + "=" + entry.getValue()); + } // For the argv part of the cache key, ignore all compiler flags that explicitly denote module // file (.pcm) inputs. Depending on input discovery, some of the unused ones are removed from @@ -1124,6 +1144,9 @@ public class CppCompileAction extends AbstractAction // A better long-term solution would be to make the compiler to find them automatically and // never hand in the .pcm files explicitly on the command line in the first place. f.addStrings(compileCommandLine.getArgv(getInternalOutputFile(), null)); + for (String input : compileCommandLine.getArgv(getInternalOutputFile(), null)) { + computeKeyDebugWriter.println("COMMAND: " + input); + } /* * getArgv() above captures all changes which affect the compilation @@ -1133,19 +1156,31 @@ public class CppCompileAction extends AbstractAction * have changed, otherwise we might miss some errors. */ f.addPaths(context.getDeclaredIncludeDirs()); + for (PathFragment path : context.getDeclaredIncludeDirs()) { + computeKeyDebugWriter.println("DECLAREDINCLUDEDIRS: " + path.getPathString()); + } f.addPaths(context.getDeclaredIncludeWarnDirs()); + for (PathFragment path : context.getDeclaredIncludeWarnDirs()) { + computeKeyDebugWriter.println("DECLAREDINCLUDEWARNDIRS: " + path.getPathString()); + } for (Artifact declaredIncludeSrc : context.getDeclaredIncludeSrcs()) { f.addPath(declaredIncludeSrc.getExecPath()); + computeKeyDebugWriter.println("DECLAREDINCLUDESRCS: " + declaredIncludeSrc.getExecPath().getPathString()); } f.addInt(0); // mark the boundary between input types for (Artifact input : getMandatoryInputs()) { f.addPath(input.getExecPath()); + computeKeyDebugWriter.println("MANDATORYINPUTS: " + input.getExecPath().getPathString()); } f.addInt(0); for (Artifact input : prunableInputs) { f.addPath(input.getExecPath()); + computeKeyDebugWriter.println("PRUNABLEINPUTS: " + input.getExecPath().getPathString()); } - return f.hexDigestAndReset(); + String rv = f.hexDigestAndReset(); + computeKeyDebugWriter.println("KEY: " + rv); + computeKeyDebugWriter.close(); + return rv; } @Override
0
coqui_public_repos/inference-engine/third_party/kenlm
coqui_public_repos/inference-engine/third_party/kenlm/lm/partial.hh
#ifndef LM_PARTIAL_H #define LM_PARTIAL_H #include "lm/return.hh" #include "lm/state.hh" #include <algorithm> #include <cassert> namespace lm { namespace ngram { struct ExtendReturn { float adjust; bool make_full; unsigned char next_use; }; template <class Model> ExtendReturn ExtendLoop( const Model &model, unsigned char seen, const WordIndex *add_rbegin, const WordIndex *add_rend, const float *backoff_start, const uint64_t *pointers, const uint64_t *pointers_end, uint64_t *&pointers_write, float *backoff_write) { unsigned char add_length = add_rend - add_rbegin; float backoff_buf[2][KENLM_MAX_ORDER - 1]; float *backoff_in = backoff_buf[0], *backoff_out = backoff_buf[1]; std::copy(backoff_start, backoff_start + add_length, backoff_in); ExtendReturn value; value.make_full = false; value.adjust = 0.0; value.next_use = add_length; unsigned char i = 0; unsigned char length = pointers_end - pointers; // pointers_write is NULL means that the existing left state is full, so we should use completed probabilities. if (pointers_write) { // Using full context, writing to new left state. for (; i < length; ++i) { FullScoreReturn ret(model.ExtendLeft( add_rbegin, add_rbegin + value.next_use, backoff_in, pointers[i], i + seen + 1, backoff_out, value.next_use)); std::swap(backoff_in, backoff_out); if (ret.independent_left) { value.adjust += ret.prob; value.make_full = true; ++i; break; } value.adjust += ret.rest; *pointers_write++ = ret.extend_left; if (value.next_use != add_length) { value.make_full = true; ++i; break; } } } // Using some of the new context. for (; i < length && value.next_use; ++i) { FullScoreReturn ret(model.ExtendLeft( add_rbegin, add_rbegin + value.next_use, backoff_in, pointers[i], i + seen + 1, backoff_out, value.next_use)); std::swap(backoff_in, backoff_out); value.adjust += ret.prob; } float unrest = model.UnRest(pointers + i, pointers_end, i + seen + 1); // Using none of the new context. value.adjust += unrest; std::copy(backoff_in, backoff_in + value.next_use, backoff_write); return value; } template <class Model> float RevealBefore(const Model &model, const Right &reveal, const unsigned char seen, bool reveal_full, Left &left, Right &right) { assert(seen < reveal.length || reveal_full); uint64_t *pointers_write = reveal_full ? NULL : left.pointers; float backoff_buffer[KENLM_MAX_ORDER - 1]; ExtendReturn value(ExtendLoop( model, seen, reveal.words + seen, reveal.words + reveal.length, reveal.backoff + seen, left.pointers, left.pointers + left.length, pointers_write, left.full ? backoff_buffer : (right.backoff + right.length))); if (reveal_full) { left.length = 0; value.make_full = true; } else { left.length = pointers_write - left.pointers; value.make_full |= (left.length == model.Order() - 1); } if (left.full) { for (unsigned char i = 0; i < value.next_use; ++i) value.adjust += backoff_buffer[i]; } else { // If left wasn't full when it came in, put words into right state. std::copy(reveal.words + seen, reveal.words + seen + value.next_use, right.words + right.length); right.length += value.next_use; left.full = value.make_full || (right.length == model.Order() - 1); } return value.adjust; } template <class Model> float RevealAfter(const Model &model, Left &left, Right &right, const Left &reveal, unsigned char seen) { assert(seen < reveal.length || reveal.full); uint64_t *pointers_write = left.full ? NULL : (left.pointers + left.length); ExtendReturn value(ExtendLoop( model, seen, right.words, right.words + right.length, right.backoff, reveal.pointers + seen, reveal.pointers + reveal.length, pointers_write, right.backoff)); if (reveal.full) { for (unsigned char i = 0; i < value.next_use; ++i) value.adjust += right.backoff[i]; right.length = 0; value.make_full = true; } else { right.length = value.next_use; value.make_full |= (right.length == model.Order() - 1); } if (!left.full) { left.length = pointers_write - left.pointers; left.full = value.make_full || (left.length == model.Order() - 1); } return value.adjust; } template <class Model> float Subsume(const Model &model, Left &first_left, const Right &first_right, const Left &second_left, Right &second_right, const unsigned int between_length) { assert(first_right.length < KENLM_MAX_ORDER); assert(second_left.length < KENLM_MAX_ORDER); assert(between_length < KENLM_MAX_ORDER - 1); uint64_t *pointers_write = first_left.full ? NULL : (first_left.pointers + first_left.length); float backoff_buffer[KENLM_MAX_ORDER - 1]; ExtendReturn value(ExtendLoop( model, between_length, first_right.words, first_right.words + first_right.length, first_right.backoff, second_left.pointers, second_left.pointers + second_left.length, pointers_write, second_left.full ? backoff_buffer : (second_right.backoff + second_right.length))); if (second_left.full) { for (unsigned char i = 0; i < value.next_use; ++i) value.adjust += backoff_buffer[i]; } else { std::copy(first_right.words, first_right.words + value.next_use, second_right.words + second_right.length); second_right.length += value.next_use; value.make_full |= (second_right.length == model.Order() - 1); } if (!first_left.full) { first_left.length = pointers_write - first_left.pointers; first_left.full = value.make_full || second_left.full || (first_left.length == model.Order() - 1); } assert(first_left.length < KENLM_MAX_ORDER); assert(second_right.length < KENLM_MAX_ORDER); return value.adjust; } } // namespace ngram } // namespace lm #endif // LM_PARTIAL_H
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/extensions/pdt/pdt.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Common classes for PDT expansion/traversal. #ifndef FST_EXTENSIONS_PDT_PDT_H_ #define FST_EXTENSIONS_PDT_PDT_H_ #include <map> #include <set> #include <unordered_map> #include <fst/compat.h> #include <fst/log.h> #include <fst/fst.h> #include <fst/state-table.h> namespace fst { // Provides bijection between parenthesis stacks and signed integral stack IDs. // Each stack ID is unique to each distinct stack. The open-close parenthesis // label pairs are passed using the parens argument. template <typename StackId, typename Label> class PdtStack { public: // The stacks are stored in a tree. The nodes are stored in a vector. Each // node represents the top of some stack and is identified by its position in // the vector. Its' parent node represents the stack with the top popped and // its children are stored in child_map_ and accessed by stack_id and label. // The paren_id is // the position in parens of the parenthesis for that node. struct StackNode { StackId parent_id; size_t paren_id; StackNode(StackId p, size_t i) : parent_id(p), paren_id(i) {} }; explicit PdtStack(const std::vector<std::pair<Label, Label>> &parens) : parens_(parens), min_paren_(kNoLabel), max_paren_(kNoLabel) { for (size_t i = 0; i < parens.size(); ++i) { const auto &pair = parens[i]; paren_map_[pair.first] = i; paren_map_[pair.second] = i; if (min_paren_ == kNoLabel || pair.first < min_paren_) { min_paren_ = pair.first; } if (pair.second < min_paren_) min_paren_ = pair.second; if (max_paren_ == kNoLabel || pair.first > max_paren_) { max_paren_ = pair.first; } if (pair.second > max_paren_) max_paren_ = pair.second; } nodes_.push_back(StackNode(-1, -1)); // Tree root. } // Returns stack ID given the current stack ID (0 if empty) and label read. // Pushes onto the stack if the label is an open parenthesis, returning the // new stack ID. Pops the stack if the label is a close parenthesis that // matches the top of the stack, returning the parent stack ID. Returns -1 if // label is an unmatched close parenthesis. Otherwise, returns the current // stack ID. StackId Find(StackId stack_id, Label label) { if (min_paren_ == kNoLabel || label < min_paren_ || label > max_paren_) { return stack_id; // Non-paren. } const auto it = paren_map_.find(label); // Non-paren. if (it == paren_map_.end()) return stack_id; const auto paren_id = it->second; // Open paren. if (label == parens_[paren_id].first) { auto &child_id = child_map_[std::make_pair(stack_id, label)]; if (child_id == 0) { // Child not found; pushes label. child_id = nodes_.size(); nodes_.push_back(StackNode(stack_id, paren_id)); } return child_id; } const auto &node = nodes_[stack_id]; // Matching close paren. if (paren_id == node.paren_id) return node.parent_id; // Non-matching close paren. return -1; } // Returns the stack ID obtained by popping the label at the top of the // current stack ID. StackId Pop(StackId stack_id) const { return nodes_[stack_id].parent_id; } // Returns the paren ID at the top of the stack. ssize_t Top(StackId stack_id) const { return nodes_[stack_id].paren_id; } ssize_t ParenId(Label label) const { const auto it = paren_map_.find(label); if (it == paren_map_.end()) return -1; // Non-paren. return it->second; } private: struct ChildHash { size_t operator()(const std::pair<StackId, Label> &pair) const { static constexpr size_t prime = 7853; return static_cast<size_t>(pair.first) + static_cast<size_t>(pair.second) * prime; } }; std::vector<std::pair<Label, Label>> parens_; std::vector<StackNode> nodes_; std::unordered_map<Label, size_t> paren_map_; // Child of stack node w.r.t label std::unordered_map<std::pair<StackId, Label>, StackId, ChildHash> child_map_; Label min_paren_; Label max_paren_; }; // State tuple for PDT expansion. template <typename S, typename K> struct PdtStateTuple { using StateId = S; using StackId = K; StateId state_id; StackId stack_id; PdtStateTuple(StateId state_id = kNoStateId, StackId stack_id = -1) : state_id(state_id), stack_id(stack_id) {} }; // Equality of PDT state tuples. template <typename S, typename K> inline bool operator==(const PdtStateTuple<S, K> &x, const PdtStateTuple<S, K> &y) { if (&x == &y) return true; return x.state_id == y.state_id && x.stack_id == y.stack_id; } // Hash function object for PDT state tuples template <class T> class PdtStateHash { public: size_t operator()(const T &tuple) const { static constexpr auto prime = 7853; return tuple.state_id + tuple.stack_id * prime; } }; // Tuple to PDT state bijection. template <class StateId, class StackId> class PdtStateTable : public CompactHashStateTable< PdtStateTuple<StateId, StackId>, PdtStateHash<PdtStateTuple<StateId, StackId>>> { public: PdtStateTable() {} PdtStateTable(const PdtStateTable &other) {} private: PdtStateTable &operator=(const PdtStateTable &) = delete; }; } // namespace fst #endif // FST_EXTENSIONS_PDT_PDT_H_
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/accumulator.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Classes to accumulate arc weights. Useful for weight lookahead. #ifndef FST_ACCUMULATOR_H_ #define FST_ACCUMULATOR_H_ #include <algorithm> #include <functional> #include <unordered_map> #include <vector> #include <fst/log.h> #include <fst/arcfilter.h> #include <fst/arcsort.h> #include <fst/dfs-visit.h> #include <fst/expanded-fst.h> #include <fst/replace.h> namespace fst { // This class accumulates arc weights using the semiring Plus(). template <class A> class DefaultAccumulator { public: using Arc = A; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; DefaultAccumulator() {} DefaultAccumulator(const DefaultAccumulator &acc, bool safe = false) {} void Init(const Fst<Arc> &fst, bool copy = false) {} void SetState(StateId state) {} Weight Sum(Weight w, Weight v) { return Plus(w, v); } template <class ArcIter> Weight Sum(Weight w, ArcIter *aiter, std::ptrdiff_t begin, std::ptrdiff_t end) { Adder<Weight> adder(w); // maintains cumulative sum accurately aiter->Seek(begin); for (auto pos = begin; pos < end; aiter->Next(), ++pos) adder.Add(aiter->Value().weight); return adder.Sum(); } constexpr bool Error() const { return false; } private: DefaultAccumulator &operator=(const DefaultAccumulator &) = delete; }; // This class accumulates arc weights using the log semiring Plus() assuming an // arc weight has a WeightConvert specialization to and from log64 weights. template <class A> class LogAccumulator { public: using Arc = A; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; LogAccumulator() {} LogAccumulator(const LogAccumulator &acc, bool safe = false) {} void Init(const Fst<Arc> &fst, bool copy = false) {} void SetState(StateId s) {} Weight Sum(Weight w, Weight v) { return LogPlus(w, v); } template <class ArcIter> Weight Sum(Weight w, ArcIter *aiter, std::ptrdiff_t begin, std::ptrdiff_t end) { auto sum = w; aiter->Seek(begin); for (auto pos = begin; pos < end; aiter->Next(), ++pos) { sum = LogPlus(sum, aiter->Value().weight); } return sum; } constexpr bool Error() const { return false; } private: Weight LogPlus(Weight w, Weight v) { if (w == Weight::Zero()) { return v; } const auto f1 = to_log_weight_(w).Value(); const auto f2 = to_log_weight_(v).Value(); if (f1 > f2) { return to_weight_(Log64Weight(f2 - internal::LogPosExp(f1 - f2))); } else { return to_weight_(Log64Weight(f1 - internal::LogPosExp(f2 - f1))); } } WeightConvert<Weight, Log64Weight> to_log_weight_; WeightConvert<Log64Weight, Weight> to_weight_; LogAccumulator &operator=(const LogAccumulator &) = delete; }; // Interface for shareable data for fast log accumulator copies. Holds pointers // to data only, storage is provided by derived classes. class FastLogAccumulatorData { public: FastLogAccumulatorData(int arc_limit, int arc_period) : arc_limit_(arc_limit), arc_period_(arc_period), weights_ptr_(nullptr), num_weights_(0), weight_positions_ptr_(nullptr), num_positions_(0) {} virtual ~FastLogAccumulatorData() {} // Cummulative weight per state for all states s.t. # of arcs > arc_limit_ // with arcs in order. The first element per state is Log64Weight::Zero(). const double *Weights() const { return weights_ptr_; } int NumWeights() const { return num_weights_; } // Maps from state to corresponding beginning weight position in weights_. // osition -1 means no pre-computed weights for that state. const int *WeightPositions() const { return weight_positions_ptr_; } int NumPositions() const { return num_positions_; } int ArcLimit() const { return arc_limit_; } int ArcPeriod() const { return arc_period_; } // Returns true if the data object is mutable and supports SetData(). virtual bool IsMutable() const = 0; // Does not take ownership but may invalidate the contents of weights and // weight_positions. virtual void SetData(std::vector<double> *weights, std::vector<int> *weight_positions) = 0; protected: void Init(int num_weights, const double *weights, int num_positions, const int *weight_positions) { weights_ptr_ = weights; num_weights_ = num_weights; weight_positions_ptr_ = weight_positions; num_positions_ = num_positions; } private: const int arc_limit_; const int arc_period_; const double *weights_ptr_; int num_weights_; const int *weight_positions_ptr_; int num_positions_; FastLogAccumulatorData(const FastLogAccumulatorData &) = delete; FastLogAccumulatorData &operator=(const FastLogAccumulatorData &) = delete; }; // FastLogAccumulatorData with mutable storage; filled by // FastLogAccumulator::Init. class MutableFastLogAccumulatorData : public FastLogAccumulatorData { public: MutableFastLogAccumulatorData(int arc_limit, int arc_period) : FastLogAccumulatorData(arc_limit, arc_period) {} bool IsMutable() const override { return true; } void SetData(std::vector<double> *weights, std::vector<int> *weight_positions) override { weights_.swap(*weights); weight_positions_.swap(*weight_positions); Init(weights_.size(), weights_.data(), weight_positions_.size(), weight_positions_.data()); } private: std::vector<double> weights_; std::vector<int> weight_positions_; MutableFastLogAccumulatorData(const MutableFastLogAccumulatorData &) = delete; MutableFastLogAccumulatorData &operator=( const MutableFastLogAccumulatorData &) = delete; }; // This class accumulates arc weights using the log semiring Plus() assuming an // arc weight has a WeightConvert specialization to and from log64 weights. The // member function Init(fst) has to be called to setup pre-computed weight // information. template <class A> class FastLogAccumulator { public: using Arc = A; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; explicit FastLogAccumulator(std::ptrdiff_t arc_limit = 20, std::ptrdiff_t arc_period = 10) : to_log_weight_(), to_weight_(), arc_limit_(arc_limit), arc_period_(arc_period), data_(std::make_shared<MutableFastLogAccumulatorData>(arc_limit, arc_period)), state_weights_(nullptr), error_(false) {} explicit FastLogAccumulator(std::shared_ptr<FastLogAccumulatorData> data) : to_log_weight_(), to_weight_(), arc_limit_(data->ArcLimit()), arc_period_(data->ArcPeriod()), data_(data), state_weights_(nullptr), error_(false) {} FastLogAccumulator(const FastLogAccumulator<Arc> &acc, bool safe = false) : to_log_weight_(), to_weight_(), arc_limit_(acc.arc_limit_), arc_period_(acc.arc_period_), data_(acc.data_), state_weights_(nullptr), error_(acc.error_) {} void SetState(StateId s) { const auto *weights = data_->Weights(); const auto *weight_positions = data_->WeightPositions(); state_weights_ = nullptr; if (s < data_->NumPositions()) { const auto pos = weight_positions[s]; if (pos >= 0) state_weights_ = &(weights[pos]); } } Weight Sum(Weight w, Weight v) const { return LogPlus(w, v); } template <class ArcIter> Weight Sum(Weight w, ArcIter *aiter, std::ptrdiff_t begin, std::ptrdiff_t end) const { if (error_) return Weight::NoWeight(); auto sum = w; // Finds begin and end of pre-stored weights. std::ptrdiff_t index_begin = -1; std::ptrdiff_t index_end = -1; std::ptrdiff_t stored_begin = end; std::ptrdiff_t stored_end = end; if (state_weights_) { index_begin = begin > 0 ? (begin - 1) / arc_period_ + 1 : 0; index_end = end / arc_period_; stored_begin = index_begin * arc_period_; stored_end = index_end * arc_period_; } // Computes sum before pre-stored weights. if (begin < stored_begin) { const auto pos_end = std::min(stored_begin, end); aiter->Seek(begin); for (auto pos = begin; pos < pos_end; aiter->Next(), ++pos) { sum = LogPlus(sum, aiter->Value().weight); } } // Computes sum between pre-stored weights. if (stored_begin < stored_end) { const auto f1 = state_weights_[index_end]; const auto f2 = state_weights_[index_begin]; if (f1 < f2) sum = LogPlus(sum, LogMinus(f1, f2)); // Commented out for efficiency; adds Zero(). /* else { // explicitly computes if cumulative sum lacks precision aiter->Seek(stored_begin); for (auto pos = stored_begin; pos < stored_end; aiter->Next(), ++pos) sum = LogPlus(sum, aiter->Value().weight); } */ } // Computes sum after pre-stored weights. if (stored_end < end) { const auto pos_start = std::max(stored_begin, stored_end); aiter->Seek(pos_start); for (auto pos = pos_start; pos < end; aiter->Next(), ++pos) { sum = LogPlus(sum, aiter->Value().weight); } } return sum; } template <class FST> void Init(const FST &fst, bool copy = false) { if (copy || !data_->IsMutable()) return; if (data_->NumPositions() != 0 || arc_limit_ < arc_period_) { FSTERROR() << "FastLogAccumulator: Initialization error"; error_ = true; return; } std::vector<double> weights; std::vector<int> weight_positions; weight_positions.reserve(CountStates(fst)); for (StateIterator<FST> siter(fst); !siter.Done(); siter.Next()) { const auto s = siter.Value(); if (fst.NumArcs(s) >= arc_limit_) { auto sum = FloatLimits<double>::PosInfinity(); if (weight_positions.size() <= s) weight_positions.resize(s + 1, -1); weight_positions[s] = weights.size(); weights.push_back(sum); size_t narcs = 0; ArcIterator<FST> aiter(fst, s); aiter.SetFlags(kArcWeightValue | kArcNoCache, kArcFlags); for (; !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); sum = LogPlus(sum, arc.weight); // Stores cumulative weight distribution per arc_period_. if (++narcs % arc_period_ == 0) weights.push_back(sum); } } } data_->SetData(&weights, &weight_positions); } bool Error() const { return error_; } std::shared_ptr<FastLogAccumulatorData> GetData() const { return data_; } private: static double LogPosExp(double x) { return x == FloatLimits<double>::PosInfinity() ? 0.0 : log(1.0F + exp(-x)); } static double LogMinusExp(double x) { return x == FloatLimits<double>::PosInfinity() ? 0.0 : log(1.0F - exp(-x)); } Weight LogPlus(Weight w, Weight v) const { if (w == Weight::Zero()) { return v; } const auto f1 = to_log_weight_(w).Value(); const auto f2 = to_log_weight_(v).Value(); if (f1 > f2) { return to_weight_(Log64Weight(f2 - LogPosExp(f1 - f2))); } else { return to_weight_(Log64Weight(f1 - LogPosExp(f2 - f1))); } } double LogPlus(double f1, Weight v) const { const auto f2 = to_log_weight_(v).Value(); if (f1 == FloatLimits<double>::PosInfinity()) { return f2; } else if (f1 > f2) { return f2 - LogPosExp(f1 - f2); } else { return f1 - LogPosExp(f2 - f1); } } // Assumes f1 < f2. Weight LogMinus(double f1, double f2) const { if (f2 == FloatLimits<double>::PosInfinity()) { return to_weight_(Log64Weight(f1)); } else { return to_weight_(Log64Weight(f1 - LogMinusExp(f2 - f1))); } } const WeightConvert<Weight, Log64Weight> to_log_weight_; const WeightConvert<Log64Weight, Weight> to_weight_; const std::ptrdiff_t arc_limit_; // Minimum number of arcs to pre-compute state. const std::ptrdiff_t arc_period_; // Saves cumulative weights per arc_period_. std::shared_ptr<FastLogAccumulatorData> data_; const double *state_weights_; bool error_; FastLogAccumulator &operator=(const FastLogAccumulator &) = delete; }; // Stores shareable data for cache log accumulator copies. All copies share the // same cache. template <class Arc> class CacheLogAccumulatorData { public: using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; CacheLogAccumulatorData(bool gc, size_t gc_limit) : cache_gc_(gc), cache_limit_(gc_limit), cache_size_(0) {} CacheLogAccumulatorData(const CacheLogAccumulatorData<Arc> &data) : cache_gc_(data.cache_gc_), cache_limit_(data.cache_limit_), cache_size_(0) {} bool CacheDisabled() const { return cache_gc_ && cache_limit_ == 0; } std::vector<double> *GetWeights(StateId s) { auto it = cache_.find(s); if (it != cache_.end()) { it->second.recent = true; return it->second.weights.get(); } else { return nullptr; } } void AddWeights(StateId s, std::vector<double> *weights) { if (cache_gc_ && cache_size_ >= cache_limit_) GC(false); cache_.insert(std::make_pair(s, CacheState(weights, true))); if (cache_gc_) cache_size_ += weights->capacity() * sizeof(double); } private: // Cached information for a given state. struct CacheState { std::unique_ptr<std::vector<double>> weights; // Accumulated weights. bool recent; // Has this state been accessed since last GC? CacheState(std::vector<double> *weights, bool recent) : weights(weights), recent(recent) {} }; // Garbage collect: Deletes from cache states that have not been accessed // since the last GC ('free_recent = false') until 'cache_size_' is 2/3 of // 'cache_limit_'. If it does not free enough memory, start deleting // recently accessed states. void GC(bool free_recent) { auto cache_target = (2 * cache_limit_) / 3 + 1; auto it = cache_.begin(); while (it != cache_.end() && cache_size_ > cache_target) { auto &cs = it->second; if (free_recent || !cs.recent) { cache_size_ -= cs.weights->capacity() * sizeof(double); cache_.erase(it++); } else { cs.recent = false; ++it; } } if (!free_recent && cache_size_ > cache_target) GC(true); } std::unordered_map<StateId, CacheState> cache_; // Cache. bool cache_gc_; // Enables garbage collection. size_t cache_limit_; // # of bytes cached. size_t cache_size_; // # of bytes allowed before GC. CacheLogAccumulatorData &operator=(const CacheLogAccumulatorData &) = delete; }; // This class accumulates arc weights using the log semiring Plus() has a // WeightConvert specialization to and from log64 weights. It is similar to the // FastLogAccumator. However here, the accumulated weights are pre-computed and // stored only for the states that are visited. The member function Init(fst) // has to be called to setup this accumulator. template <class Arc> class CacheLogAccumulator { public: using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; explicit CacheLogAccumulator(std::ptrdiff_t arc_limit = 10, bool gc = false, size_t gc_limit = 10 * 1024 * 1024) : arc_limit_(arc_limit), data_(std::make_shared<CacheLogAccumulatorData<Arc>>(gc, gc_limit)), s_(kNoStateId), error_(false) {} CacheLogAccumulator(const CacheLogAccumulator<Arc> &acc, bool safe = false) : arc_limit_(acc.arc_limit_), fst_(acc.fst_ ? acc.fst_->Copy() : nullptr), data_(safe ? std::make_shared<CacheLogAccumulatorData<Arc>>(*acc.data_) : acc.data_), s_(kNoStateId), error_(acc.error_) {} // Argument arc_limit specifies the minimum number of arcs to pre-compute. void Init(const Fst<Arc> &fst, bool copy = false) { if (!copy && fst_) { FSTERROR() << "CacheLogAccumulator: Initialization error"; error_ = true; return; } fst_.reset(fst.Copy()); } void SetState(StateId s, int depth = 0) { if (s == s_) return; s_ = s; if (data_->CacheDisabled() || error_) { weights_ = nullptr; return; } if (!fst_) { FSTERROR() << "CacheLogAccumulator::SetState: Incorrectly initialized"; error_ = true; weights_ = nullptr; return; } weights_ = data_->GetWeights(s); if ((weights_ == nullptr) && (fst_->NumArcs(s) >= arc_limit_)) { weights_ = new std::vector<double>; weights_->reserve(fst_->NumArcs(s) + 1); weights_->push_back(FloatLimits<double>::PosInfinity()); data_->AddWeights(s, weights_); } } Weight Sum(Weight w, Weight v) { return LogPlus(w, v); } template <class ArcIter> Weight Sum(Weight w, ArcIter *aiter, std::ptrdiff_t begin, std::ptrdiff_t end) { if (weights_ == nullptr) { auto sum = w; aiter->Seek(begin); for (auto pos = begin; pos < end; aiter->Next(), ++pos) { sum = LogPlus(sum, aiter->Value().weight); } return sum; } else { Extend(end, aiter); const auto &f1 = (*weights_)[end]; const auto &f2 = (*weights_)[begin]; if (f1 < f2) { return LogPlus(w, LogMinus(f1, f2)); } else { // Commented out for efficiency; adds Zero(). /* auto sum = w; // Explicitly computes if cumulative sum lacks precision. aiter->Seek(begin); for (auto pos = begin; pos < end; aiter->Next(), ++pos) { sum = LogPlus(sum, aiter->Value().weight); } return sum; */ return w; } } } // Returns first position from aiter->Position() whose accumulated // value is greater or equal to w (w.r.t. Zero() < One()). The // iterator may be repositioned. template <class ArcIter> size_t LowerBound(Weight w, ArcIter *aiter) { const auto f = to_log_weight_(w).Value(); auto pos = aiter->Position(); if (weights_) { Extend(fst_->NumArcs(s_), aiter); return std::lower_bound(weights_->begin() + pos + 1, weights_->end(), f, std::greater<double>()) - weights_->begin() - 1; } else { size_t n = 0; auto x = FloatLimits<double>::PosInfinity(); for (aiter->Reset(); !aiter->Done(); aiter->Next(), ++n) { x = LogPlus(x, aiter->Value().weight); if (n >= pos && x <= f) break; } return n; } } bool Error() const { return error_; } private: double LogPosExp(double x) { return x == FloatLimits<double>::PosInfinity() ? 0.0 : log(1.0F + exp(-x)); } double LogMinusExp(double x) { return x == FloatLimits<double>::PosInfinity() ? 0.0 : log(1.0F - exp(-x)); } Weight LogPlus(Weight w, Weight v) { if (w == Weight::Zero()) { return v; } const auto f1 = to_log_weight_(w).Value(); const auto f2 = to_log_weight_(v).Value(); if (f1 > f2) { return to_weight_(Log64Weight(f2 - LogPosExp(f1 - f2))); } else { return to_weight_(Log64Weight(f1 - LogPosExp(f2 - f1))); } } double LogPlus(double f1, Weight v) { const auto f2 = to_log_weight_(v).Value(); if (f1 == FloatLimits<double>::PosInfinity()) { return f2; } else if (f1 > f2) { return f2 - LogPosExp(f1 - f2); } else { return f1 - LogPosExp(f2 - f1); } } // Assumes f1 < f2. Weight LogMinus(double f1, double f2) { if (f2 == FloatLimits<double>::PosInfinity()) { return to_weight_(Log64Weight(f1)); } else { return to_weight_(Log64Weight(f1 - LogMinusExp(f2 - f1))); } } // Extends weights up to index 'end'. template <class ArcIter> void Extend(std::ptrdiff_t end, ArcIter *aiter) { if (weights_->size() <= end) { for (aiter->Seek(weights_->size() - 1); weights_->size() <= end; aiter->Next()) { weights_->push_back(LogPlus(weights_->back(), aiter->Value().weight)); } } } WeightConvert<Weight, Log64Weight> to_log_weight_; WeightConvert<Log64Weight, Weight> to_weight_; std::ptrdiff_t arc_limit_; // Minimum # of arcs to cache a state. std::vector<double> *weights_; // Accumulated weights for cur. state. std::unique_ptr<const Fst<Arc>> fst_; // Input FST. std::shared_ptr<CacheLogAccumulatorData<Arc>> data_; // Cache data. StateId s_; // Current state. bool error_; }; // Stores shareable data for replace accumulator copies. template <class Accumulator, class T> class ReplaceAccumulatorData { public: using Arc = typename Accumulator::Arc; using Label = typename Arc::Label; using StateId = typename Arc::StateId; using StateTable = T; using StateTuple = typename StateTable::StateTuple; ReplaceAccumulatorData() : state_table_(nullptr) {} explicit ReplaceAccumulatorData( const std::vector<Accumulator *> &accumulators) : state_table_(nullptr) { accumulators_.reserve(accumulators.size()); for (const auto accumulator : accumulators) { accumulators_.emplace_back(accumulator); } } void Init(const std::vector<std::pair<Label, const Fst<Arc> *>> &fst_tuples, const StateTable *state_table) { state_table_ = state_table; accumulators_.resize(fst_tuples.size()); for (Label i = 0; i < accumulators_.size(); ++i) { if (!accumulators_[i]) { accumulators_[i].reset(new Accumulator()); accumulators_[i]->Init(*(fst_tuples[i].second)); } fst_array_.emplace_back(fst_tuples[i].second->Copy()); } } const StateTuple &GetTuple(StateId s) const { return state_table_->Tuple(s); } Accumulator *GetAccumulator(size_t i) { return accumulators_[i].get(); } const Fst<Arc> *GetFst(size_t i) const { return fst_array_[i].get(); } private: const StateTable *state_table_; std::vector<std::unique_ptr<Accumulator>> accumulators_; std::vector<std::unique_ptr<const Fst<Arc>>> fst_array_; }; // This class accumulates weights in a ReplaceFst. The 'Init' method takes as // input the argument used to build the ReplaceFst and the ReplaceFst state // table. It uses accumulators of type 'Accumulator' in the underlying FSTs. template <class Accumulator, class T = DefaultReplaceStateTable<typename Accumulator::Arc>> class ReplaceAccumulator { public: using Arc = typename Accumulator::Arc; using Label = typename Arc::Label; using StateId = typename Arc::StateId; using StateTable = T; using StateTuple = typename StateTable::StateTuple; using Weight = typename Arc::Weight; ReplaceAccumulator() : init_(false), data_(std::make_shared< ReplaceAccumulatorData<Accumulator, StateTable>>()), error_(false) {} explicit ReplaceAccumulator(const std::vector<Accumulator *> &accumulators) : init_(false), data_(std::make_shared<ReplaceAccumulatorData<Accumulator, StateTable>>( accumulators)), error_(false) {} ReplaceAccumulator(const ReplaceAccumulator<Accumulator, StateTable> &acc, bool safe = false) : init_(acc.init_), data_(acc.data_), error_(acc.error_) { if (!init_) { FSTERROR() << "ReplaceAccumulator: Can't copy unintialized accumulator"; } if (safe) FSTERROR() << "ReplaceAccumulator: Safe copy not supported"; } // Does not take ownership of the state table, the state table is owned by // the ReplaceFst. void Init(const std::vector<std::pair<Label, const Fst<Arc> *>> &fst_tuples, const StateTable *state_table) { init_ = true; data_->Init(fst_tuples, state_table); } // Method required by LookAheadMatcher. However, ReplaceAccumulator needs to // be initialized by calling the Init method above before being passed to // LookAheadMatcher. // // TODO(allauzen): Revisit this. Consider creating a method // Init(const ReplaceFst<A, T, C>&, bool) and using friendship to get access // to the innards of ReplaceFst. void Init(const Fst<Arc> &fst, bool copy = false) { if (!init_) { FSTERROR() << "ReplaceAccumulator::Init: Accumulator needs to be" << " initialized before being passed to LookAheadMatcher"; error_ = true; } } void SetState(StateId s) { if (!init_) { FSTERROR() << "ReplaceAccumulator::SetState: Incorrectly initialized"; error_ = true; return; } auto tuple = data_->GetTuple(s); fst_id_ = tuple.fst_id - 1; // Replace FST ID is 1-based. data_->GetAccumulator(fst_id_)->SetState(tuple.fst_state); if ((tuple.prefix_id != 0) && (data_->GetFst(fst_id_)->Final(tuple.fst_state) != Weight::Zero())) { offset_ = 1; offset_weight_ = data_->GetFst(fst_id_)->Final(tuple.fst_state); } else { offset_ = 0; offset_weight_ = Weight::Zero(); } aiter_.reset( new ArcIterator<Fst<Arc>>(*data_->GetFst(fst_id_), tuple.fst_state)); } Weight Sum(Weight w, Weight v) { if (error_) return Weight::NoWeight(); return data_->GetAccumulator(fst_id_)->Sum(w, v); } template <class ArcIter> Weight Sum(Weight w, ArcIter *aiter, std::ptrdiff_t begin, std::ptrdiff_t end) { if (error_) return Weight::NoWeight(); auto sum = begin == end ? Weight::Zero() : data_->GetAccumulator(fst_id_)->Sum( w, aiter_.get(), begin ? begin - offset_ : 0, end - offset_); if (begin == 0 && end != 0 && offset_ > 0) sum = Sum(offset_weight_, sum); return sum; } bool Error() const { return error_; } private: bool init_; std::shared_ptr<ReplaceAccumulatorData<Accumulator, StateTable>> data_; Label fst_id_; size_t offset_; Weight offset_weight_; std::unique_ptr<ArcIterator<Fst<Arc>>> aiter_; bool error_; }; // SafeReplaceAccumulator accumulates weights in a ReplaceFst and copies of it // are always thread-safe copies. template <class Accumulator, class T> class SafeReplaceAccumulator { public: using Arc = typename Accumulator::Arc; using StateId = typename Arc::StateId; using Label = typename Arc::Label; using Weight = typename Arc::Weight; using StateTable = T; using StateTuple = typename StateTable::StateTuple; SafeReplaceAccumulator() {} SafeReplaceAccumulator(const SafeReplaceAccumulator &copy, bool safe) : SafeReplaceAccumulator(copy) {} explicit SafeReplaceAccumulator( const std::vector<Accumulator> &accumulators) { for (const auto &accumulator : accumulators) { accumulators_.emplace_back(accumulator, true); } } void Init(const std::vector<std::pair<Label, const Fst<Arc> *>> &fst_tuples, const StateTable *state_table) { state_table_ = state_table; for (Label i = 0; i < fst_tuples.size(); ++i) { if (i == accumulators_.size()) { accumulators_.resize(accumulators_.size() + 1); accumulators_[i].Init(*(fst_tuples[i].second)); } fst_array_.emplace_back(fst_tuples[i].second->Copy(true)); } init_ = true; } void Init(const Fst<Arc> &fst, bool copy = false) { if (!init_) { FSTERROR() << "SafeReplaceAccumulator::Init: Accumulator needs to be" << " initialized before being passed to LookAheadMatcher"; error_ = true; } } void SetState(StateId s) { auto tuple = state_table_->Tuple(s); fst_id_ = tuple.fst_id - 1; // Replace FST ID is 1-based GetAccumulator(fst_id_)->SetState(tuple.fst_state); offset_ = 0; offset_weight_ = Weight::Zero(); const auto final_weight = GetFst(fst_id_)->Final(tuple.fst_state); if ((tuple.prefix_id != 0) && (final_weight != Weight::Zero())) { offset_ = 1; offset_weight_ = final_weight; } aiter_.Set(*GetFst(fst_id_), tuple.fst_state); } Weight Sum(Weight w, Weight v) { if (error_) return Weight::NoWeight(); return GetAccumulator(fst_id_)->Sum(w, v); } template <class ArcIter> Weight Sum(Weight w, ArcIter *aiter, std::ptrdiff_t begin, std::ptrdiff_t end) { if (error_) return Weight::NoWeight(); if (begin == end) return Weight::Zero(); auto sum = GetAccumulator(fst_id_)->Sum( w, aiter_.get(), begin ? begin - offset_ : 0, end - offset_); if (begin == 0 && end != 0 && offset_ > 0) { sum = Sum(offset_weight_, sum); } return sum; } bool Error() const { return error_; } private: class ArcIteratorPtr { public: ArcIteratorPtr() {} ArcIteratorPtr(const ArcIteratorPtr &copy) {} void Set(const Fst<Arc> &fst, StateId state_id) { ptr_.reset(new ArcIterator<Fst<Arc>>(fst, state_id)); } ArcIterator<Fst<Arc>> *get() { return ptr_.get(); } private: std::unique_ptr<ArcIterator<Fst<Arc>>> ptr_; }; Accumulator *GetAccumulator(size_t i) { return &accumulators_[i]; } const Fst<Arc> *GetFst(size_t i) const { return fst_array_[i].get(); } const StateTable *state_table_; std::vector<Accumulator> accumulators_; std::vector<std::shared_ptr<Fst<Arc>>> fst_array_; ArcIteratorPtr aiter_; bool init_ = false; bool error_ = false; Label fst_id_; size_t offset_; Weight offset_weight_; }; } // namespace fst #endif // FST_ACCUMULATOR_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/extensions/python/fst.pxd
# See www.openfst.org for extensive documentation on this weighted # finite-state transducer library. from libc.time cimport time_t from libc.time cimport time from libcpp cimport bool from libcpp.vector cimport vector from libcpp.utility cimport pair from libcpp.string cimport string from basictypes cimport int32 from basictypes cimport int64 from basictypes cimport uint32 from basictypes cimport uint64 from ios cimport istream from ios cimport ostream cdef extern from "<fst/util.h>" nogil: # Note that this is a copy, so it should be viewed as read-only. bool FLAGS_fst_error_fatal cdef extern from "<fst/fstlib.h>" namespace "fst" nogil: # FST properties. const uint64 kExpanded const uint64 kMutable const uint64 kError const uint64 kAcceptor const uint64 kNotAcceptor const uint64 kIDeterministic const uint64 kNonIDeterministic const uint64 kODeterministic const uint64 kNonODeterministic const uint64 kEpsilons const uint64 kNoEpsilons const uint64 kIEpsilons const uint64 kNoIEpsilons const uint64 kOEpsilons const uint64 kNoOEpsilons const uint64 kILabelSorted const uint64 kNotILabelSorted const uint64 kOLabelSorted const uint64 kNotOLabelSorted const uint64 kWeighted const uint64 kUnweighted const uint64 kCyclic const uint64 kAcyclic const uint64 kInitialCyclic const uint64 kInitialAcyclic const uint64 kTopSorted const uint64 kNotTopSorted const uint64 kAccessible const uint64 kNotAccessible const uint64 kCoAccessible const uint64 kNotCoAccessible const uint64 kString const uint64 kNotString const uint64 kWeightedCycles const uint64 kUnweightedCycles const uint64 kNullProperties const uint64 kCopyProperties const uint64 kIntrinsicProperties const uint64 kExtrinsicProperties const uint64 kSetStartProperties const uint64 kSetFinalProperties const uint64 kAddStateProperties const uint64 kAddArcProperties const uint64 kSetArcProperties const uint64 kDeleteStatesProperties const uint64 kDeleteArcsProperties const uint64 kStateSortProperties const uint64 kArcSortProperties const uint64 kILabelInvariantProperties const uint64 kOLabelInvariantProperties const uint64 kWeightInvariantProperties const uint64 kAddSuperFinalProperties const uint64 kRmSuperFinalProperties const uint64 kBinaryProperties const uint64 kTrinaryProperties const uint64 kPosTrinaryProperties const uint64 kNegTrinaryProperties const uint64 kFstProperties # ArcIterator flags. const uint32 kArcILabelValue const uint32 kArcOLabelValue const uint32 kArcWeightValue const uint32 kArcNextStateValue const uint32 kArcNoCache const uint32 kArcValueFlags const uint32 kArcFlags # EncodeMapper flags. const uint32 kEncodeLabels const uint32 kEncodeWeights const uint32 kEncodeFlags # Default argument constants. const float kDelta const float kShortestDelta const int kNoLabel const int kNoStateId const int64 kNoSymbol enum ClosureType: CLOSURE_STAR CLOSURE_PLUS enum ComposeFilter: AUTO_FILTER NULL_FILTER SEQUENCE_FILTER ALT_SEQUENCE_FILTER MATCH_FILTER TRIVIAL_FILTER cdef cppclass ComposeOptions: ComposeOptions(bool, ComposeFilter) enum DeterminizeType: DETERMINIZE_FUNCTIONAL DETERMINIZE_NONFUNCTIONAL DETERMINIZE_DISAMBIGUATE enum EncodeType: DECODE ENCODE enum EpsNormalizeType: EPS_NORM_INPUT EPS_NORM_OUTPUT enum ProjectType: PROJECT_INPUT PROJECT_OUTPUT enum QueueType: TRIVIAL_QUEUE FIFO_QUEUE LIFO_QUEUE SHORTEST_FIRST_QUEUE TOP_ORDER_QUEUE STATE_ORDER_QUEUE SCC_QUEUE AUTO_QUEUE OTHER_QUEUE # This is a templated struct at the C++ level, but Cython does not support # templated structs unless we pretend they are full-blown classes. cdef cppclass RandGenOptions[RandArcSelection]: RandGenOptions(const RandArcSelection &, int32, int32, bool, bool) enum ReplaceLabelType: REPLACE_LABEL_NEITHER REPLACE_LABEL_INPUT REPLACE_LABEL_OUTPUT REPLACE_LABEL_BOTH enum ReweightType: REWEIGHT_TO_INITIAL REWEIGHT_TO_FINAL cdef cppclass SymbolTableTextOptions: SymbolTableTextOptions(bool) # Symbol tables. cdef cppclass SymbolTable: SymbolTable() SymbolTable(const string &) @staticmethod SymbolTable *Read(const string &) @staticmethod SymbolTable *ReadText(const string &, const SymbolTableTextOptions &) int64 AddSymbol(const string &, int64) int64 AddSymbol(const string &) SymbolTable *Copy() # Aliased for overload. string FindSymbol "Find"(int64) # Aliased for overload. int64 FindIndex "Find"(string) # Aliased for overload. bool MemberSymbol "Member"(string) # Aliased for overload. bool MemberIndex "Member"(int64) void AddTable(const SymbolTable &) int64 GetNthKey(ssize_t) const string &Name() void SetName(const string &) const string &CheckSum() const string &LabeledCheckSum() bool Write(const string &) bool WriteText(const string &) int64 AvailableKey() size_t NumSymbols() SymbolTable *CompactSymbolTable(const SymbolTable &syms) SymbolTable *MergeSymbolTable(const SymbolTable &, const SymbolTable &, bool *) SymbolTable *FstReadSymbols(const string &, bool) cdef cppclass SymbolTableIterator: SymbolTableIterator(const SymbolTable &) bool Done() void Next() void Reset() string Symbol() int64 Value() cdef extern from "<fst/script/fstscript.h>" namespace "fst::script" nogil: # Weights. cdef cppclass WeightClass: WeightClass() WeightClass(const WeightClass &) WeightClass(const string &, const string &) const string &Type() string ToString() @staticmethod const WeightClass &Zero(const string &) @staticmethod const WeightClass &One(const string &) @staticmethod const WeightClass &NoWeight(const string &) # Alias. cdef bool Eq "operator=="(const WeightClass &, const WeightClass &) # Alias. cdef bool Ne "operator!="(const WeightClass &, const WeightClass &) cdef WeightClass Plus(const WeightClass &, const WeightClass &) cdef WeightClass Times(const WeightClass &, const WeightClass &) cdef WeightClass Divide(const WeightClass &, const WeightClass &) cdef WeightClass Power(const WeightClass &, size_t) # Arcs. cdef cppclass ArcClass: ArcClass(const ArcClass &) ArcClass(int64, int64, const WeightClass &, int64) int64 ilabel int64 olabel WeightClass weight int64 nextstate # FSTs. cdef cppclass FstClass: FstClass(const FstClass &) @staticmethod FstClass *Read(const string &) # Aliased for overload. @staticmethod FstClass *ReadFromStream "Read"(istream &, const string &) int64 Start() WeightClass Final(int64) size_t NumArcs(int64) size_t NumInputEpsilons(int64) size_t NumOutputEpsilons(int64) const string &ArcType() const string &FstType() const SymbolTable *InputSymbols() const SymbolTable *OutputSymbols() const string &WeightType() bool Write(const string &) bool Write(ostream &, const string &) uint64 Properties(uint64, bool) bool ValidStateId(int64) cdef cppclass MutableFstClass(FstClass): bool AddArc(int64, const ArcClass &) int64 AddState() bool DeleteArcs(int64, size_t) bool DeleteArcs(int64) bool DeleteStates(const vector[int64] &) void DeleteStates() SymbolTable *MutableInputSymbols() SymbolTable *MutableOutputSymbols() int64 NumStates() bool ReserveArcs(int64, size_t) void ReserveStates(int64) bool SetStart(int64) bool SetFinal(int64, const WeightClass &) void SetInputSymbols(SymbolTable *) void SetOutputSymbols(SymbolTable *) void SetProperties(uint64, uint64) cdef cppclass VectorFstClass(MutableFstClass): VectorFstClass(const FstClass &) VectorFstClass(const string &) # EncodeMapper. cdef cppclass EncodeMapperClass: EncodeMapperClass(const string &, uint32, EncodeType) # Aliased to __call__ as Cython doesn't have good support for operator(). ArcClass __call__ "operator()"(const ArcClass &) const string &ArcType() uint32 Flags() uint64 Properties(uint64) EncodeType Type() const SymbolTable *InputSymbols() const SymbolTable *OutputSymbols() void SetInputSymbols(const SymbolTable *) void SetOutputSymbols(const SymbolTable *) const string &WeightType() # Iterators. cdef cppclass ArcIteratorClass: ArcIteratorClass(const FstClass &, int64) bool Done() ArcClass Value() void Next() void Reset() void Seek(size_t) size_t Position() uint32 Flags() void SetFlags(uint32, uint32) cdef cppclass MutableArcIteratorClass: MutableArcIteratorClass(MutableFstClass *, int64) bool Done() ArcClass Value() void Next() void Reset() void Seek(size_t) void SetValue(const ArcClass &) size_t Position() uint32 Flags() void SetFlags(uint32, uint32) cdef cppclass StateIteratorClass: StateIteratorClass(const FstClass &) bool Done() int64 Value() void Next() void Reset() ctypedef pair[int64, const FstClass *] LabelFstClassPair ctypedef pair[int64, int64] LabelPair cdef extern from "<fst/script/fstscript.h>" namespace "fst::script" nogil: enum ArcFilterType: ANY_ARC_FILTER EPSILON_ARC_FILTER INPUT_EPSILON_ARC_FILTER OUTPUT_EPSILON_ARC_FILTER enum ArcSortType: ILABEL_SORT OLABEL_SORT cdef void ArcSort(MutableFstClass *, ArcSortType) cdef ClosureType GetClosureType(bool) cdef void Closure(MutableFstClass *, ClosureType) cdef FstClass *CompileFstInternal(istream &, const string &, const string &, const string &, const SymbolTable *, const SymbolTable *, const SymbolTable*, bool, bool, bool, bool, bool) cdef void Compose(FstClass &, FstClass &, MutableFstClass *, const ComposeOptions &) cdef void Concat(MutableFstClass *, const FstClass &) cdef void Connect(MutableFstClass *) cdef FstClass *Convert(const FstClass &, const string &) cdef void Decode(MutableFstClass *, const EncodeMapperClass &) cdef cppclass DeterminizeOptions: DeterminizeOptions(float, const WeightClass &, int64, int64, DeterminizeType, bool) cdef void Determinize(const FstClass &, MutableFstClass *, const DeterminizeOptions &) cdef cppclass DisambiguateOptions: DisambiguateOptions(float, const WeightClass &, int64, int64) cdef void Disambiguate(const FstClass &, MutableFstClass *, const DisambiguateOptions &) cdef void Difference(const FstClass &, const FstClass &, MutableFstClass *, const ComposeOptions &) cdef void DrawFst(const FstClass &fst, const SymbolTable *, const SymbolTable *, const SymbolTable *, bool, const string &, float, float, bool, bool, float, float, int, int, const string &, bool, ostream *, const string &) cdef void Encode(MutableFstClass *, EncodeMapperClass *) cdef EpsNormalizeType GetEpsNormalizeType(bool) cdef void EpsNormalize(const FstClass &, MutableFstClass *, EpsNormalizeType) cdef bool Equal(const FstClass &, const FstClass &, float) cdef bool Equivalent(const FstClass &, const FstClass &, float) cdef void Intersect(const FstClass &, const FstClass &, MutableFstClass *, const ComposeOptions &) cdef void Invert(MutableFstClass *fst) cdef bool Isomorphic(const FstClass &, const FstClass &, float) enum MapType: ARC_SUM_MAPPER IDENTITY_MAPPER INPUT_EPSILON_MAPPER INVERT_MAPPER OUTPUT_EPSILON_MAPPER PLUS_MAPPER QUANTIZE_MAPPER RMWEIGHT_MAPPER SUPERFINAL_MAPPER TIMES_MAPPER TO_LOG_MAPPER TO_LOG64_MAPPER TO_STD_MAPPER cdef FstClass *Map(const FstClass &, MapType, float, double, const WeightClass &) cdef void Minimize(MutableFstClass *, MutableFstClass *, float, bool) cdef ProjectType GetProjectType(bool) cdef void Project(MutableFstClass *, ProjectType) cdef void PrintFst(const FstClass &, ostream &, const string &, const SymbolTable *, const SymbolTable *, const SymbolTable *, bool, bool, const string &) cdef void Prune(const FstClass &, MutableFstClass *, const WeightClass &, int64, float) cdef void Prune(MutableFstClass *, const WeightClass &, int64, float) cdef void Push(const FstClass &, MutableFstClass *, uint32 flags, ReweightType, float) cdef void Push(MutableFstClass *, ReweightType, float, bool) enum RandArcSelection: UNIFORM_ARC_SELECTOR LOG_PROB_ARC_SELECTOR FAST_LOG_PROB_ARC_SELECTOR cdef bool RandEquivalent(const FstClass &, const FstClass &, int32, float, time_t, const RandGenOptions[RandArcSelection] &) cdef void RandGen(const FstClass &, MutableFstClass *, time_t, const RandGenOptions[RandArcSelection] &) cdef void Relabel(MutableFstClass *, const SymbolTable *, const SymbolTable *, const string &, bool, const SymbolTable *, const SymbolTable *, const string &, bool) cdef void Relabel(MutableFstClass *, const vector[LabelPair] &, const vector[LabelPair] &) cdef cppclass ReplaceOptions: ReplaceOptions(int64, ReplaceLabelType, ReplaceLabelType, int64) cdef void Replace(const vector[LabelFstClassPair] &, MutableFstClass *, const ReplaceOptions &) cdef void Reverse(const FstClass &, MutableFstClass *, bool) cdef void Reweight(MutableFstClass *, const vector[WeightClass] &, ReweightType) cdef cppclass RmEpsilonOptions: RmEpsilonOptions(QueueType, bool, const WeightClass &, int64, float) cdef void RmEpsilon(MutableFstClass *, const RmEpsilonOptions &) cdef cppclass ShortestDistanceOptions: ShortestDistanceOptions(QueueType, ArcFilterType, int64, float) cdef void ShortestDistance(const FstClass &, vector[WeightClass] *, const ShortestDistanceOptions &) cdef void ShortestDistance(const FstClass &, vector[WeightClass] *, bool, float) cdef cppclass ShortestPathOptions: ShortestPathOptions(QueueType, int32, bool, float, const WeightClass &, int64) cdef void ShortestPath(const FstClass &, MutableFstClass *, const ShortestPathOptions &) cdef void Synchronize(const FstClass &, MutableFstClass *) cdef bool TopSort(MutableFstClass *) cdef void Union(MutableFstClass *, const FstClass &) cdef bool Verify(const FstClass &) cdef extern from "<fst/script/getters.h>" namespace "fst::script" nogil: cdef bool GetArcSortType(const string &, ArcSortType *) cdef bool GetComposeFilter(const string &, ComposeFilter *) cdef bool GetDeterminizeType(const string &, DeterminizeType *) cdef uint32 GetEncodeFlags(bool, bool) cdef bool GetMapType(const string &, MapType *) cdef uint32 GetPushFlags(bool, bool, bool, bool) cdef bool GetQueueType(const string &, QueueType *) cdef bool GetRandArcSelection(const string &, RandArcSelection *) cdef bool GetReplaceLabelType(string, bool, ReplaceLabelType *) cdef ReweightType GetReweightType(bool) cdef extern from "<fst/extensions/far/far.h>" namespace "fst" nogil: enum FarType: FAR_DEFAULT FAR_STTABLE FAR_STLIST FAR_FST FAR_SSTABLE cdef extern from "<fst/extensions/far/getters.h>" \ namespace "fst" nogil: string GetFarTypeString(FarType) cdef extern from "<fst/extensions/far/getters.h>" \ namespace "fst::script" nogil: FarType GetFarType(const string &) cdef extern from "<fst/extensions/far/far-class.h>" \ namespace "fst::script" nogil: cdef cppclass FarReaderClass: const string &ArcType() bool Done() bool Error() bool Find(const string &) const FstClass *GetFstClass() const string &GetKey() void Next() void Reset() FarType Type() # For simplicity, we always use the multiple-file one. @staticmethod FarReaderClass *Open(const vector[string] &) cdef cppclass FarWriterClass: bool Add(const string &, const FstClass &) bool Error() const string &ArcType() FarType Type() @staticmethod FarWriterClass *Create(const string &, const string &, FarType)
0
coqui_public_repos/STT
coqui_public_repos/STT/bin/run-ci-ldc93s1_checkpoint.sh
#!/bin/sh set -xe ldc93s1_dir="./data/smoke_test" ldc93s1_csv="${ldc93s1_dir}/ldc93s1.csv" if [ ! -f "${ldc93s1_dir}/ldc93s1.csv" ]; then echo "Downloading and preprocessing LDC93S1 example data, saving in ${ldc93s1_dir}." python -u bin/import_ldc93s1.py ${ldc93s1_dir} fi; # Force only one visible device because we have a single-sample dataset # and when trying to run on multiple devices (like GPUs), this will break export CUDA_VISIBLE_DEVICES=0 python -u train.py --alphabet_config_path "data/alphabet.txt" \ --show_progressbar false --early_stop false \ --train_files ${ldc93s1_csv} --train_batch_size 1 \ --dev_files ${ldc93s1_csv} --dev_batch_size 1 \ --test_files ${ldc93s1_csv} --test_batch_size 1 \ --n_hidden 100 --epochs 1 \ --max_to_keep 1 --checkpoint_dir '/tmp/ckpt' \ --learning_rate 0.001 --dropout_rate 0.05 \ --scorer_path 'data/smoke_test/pruned_lm.scorer' | tee /tmp/resume.log if ! grep "Loading best validating checkpoint from" /tmp/resume.log; then echo "Did not resume training from checkpoint" exit 1 else exit 0 fi
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/bin/fstinfo.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/flags.h> DEFINE_string(arc_filter, "any", "Arc filter: one of:" " \"any\", \"epsilon\", \"iepsilon\", \"oepsilon\"; " "this only affects the counts of (co)accessible states, " "connected states, and (strongly) connected components"); DEFINE_string(info_type, "auto", "Info format: one of: \"auto\", \"long\", \"short\""); DEFINE_bool(pipe, false, "Send info to stderr, input to stdout"); DEFINE_bool(test_properties, true, "Compute property values (if unknown to FST)"); DEFINE_bool(fst_verify, true, "Verify FST sanity"); int fstinfo_main(int argc, char **argv); int main(int argc, char **argv) { return fstinfo_main(argc, argv); }
0
coqui_public_repos/TTS/TTS/vocoder
coqui_public_repos/TTS/TTS/vocoder/models/melgan_discriminator.py
import numpy as np from torch import nn from torch.nn.utils.parametrizations import weight_norm class MelganDiscriminator(nn.Module): def __init__( self, in_channels=1, out_channels=1, kernel_sizes=(5, 3), base_channels=16, max_channels=1024, downsample_factors=(4, 4, 4, 4), groups_denominator=4, ): super().__init__() self.layers = nn.ModuleList() layer_kernel_size = np.prod(kernel_sizes) layer_padding = (layer_kernel_size - 1) // 2 # initial layer self.layers += [ nn.Sequential( nn.ReflectionPad1d(layer_padding), weight_norm(nn.Conv1d(in_channels, base_channels, layer_kernel_size, stride=1)), nn.LeakyReLU(0.2, inplace=True), ) ] # downsampling layers layer_in_channels = base_channels for downsample_factor in downsample_factors: layer_out_channels = min(layer_in_channels * downsample_factor, max_channels) layer_kernel_size = downsample_factor * 10 + 1 layer_padding = (layer_kernel_size - 1) // 2 layer_groups = layer_in_channels // groups_denominator self.layers += [ nn.Sequential( weight_norm( nn.Conv1d( layer_in_channels, layer_out_channels, kernel_size=layer_kernel_size, stride=downsample_factor, padding=layer_padding, groups=layer_groups, ) ), nn.LeakyReLU(0.2, inplace=True), ) ] layer_in_channels = layer_out_channels # last 2 layers layer_padding1 = (kernel_sizes[0] - 1) // 2 layer_padding2 = (kernel_sizes[1] - 1) // 2 self.layers += [ nn.Sequential( weight_norm( nn.Conv1d( layer_out_channels, layer_out_channels, kernel_size=kernel_sizes[0], stride=1, padding=layer_padding1, ) ), nn.LeakyReLU(0.2, inplace=True), ), weight_norm( nn.Conv1d( layer_out_channels, out_channels, kernel_size=kernel_sizes[1], stride=1, padding=layer_padding2 ) ), ] def forward(self, x): feats = [] for layer in self.layers: x = layer(x) feats.append(x) return x, feats
0
coqui_public_repos/STT
coqui_public_repos/STT/doc/Java-Examples.rst
Java API Usage example ====================== Examples are from `native_client/java/app/src/main/java/ai/coqui/sttexampleapp/STTActivity.java`. Creating a model instance and loading model ------------------------------------------- .. literalinclude:: ../native_client/java/app/src/main/java/ai/coqui/sttexampleapp/STTActivity.java :language: java :linenos: :lineno-match: :start-after: sphinx-doc: java_ref_model_start :end-before: sphinx-doc: java_ref_model_stop Transcribing audio with the loaded model ---------------------------------------- .. literalinclude:: ../native_client/java/app/src/main/java/ai/coqui/sttexampleapp/STTActivity.java :language: java :linenos: :lineno-match: :start-after: sphinx-doc: java_ref_inference_start :end-before: sphinx-doc: java_ref_inference_stop Full source code ---------------- See :download:`Full source code<../native_client/java/app/src/main/java/ai/coqui/sttexampleapp/STTActivity.java>`.
0
coqui_public_repos/TTS/TTS/tts/utils/text
coqui_public_repos/TTS/TTS/tts/utils/text/chinese_mandarin/numbers.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Licensed under WTFPL or the Unlicense or CC0. # This uses Python 3, but it's easy to port to Python 2 by changing # strings to u'xx'. import itertools import re def _num2chinese(num: str, big=False, simp=True, o=False, twoalt=False) -> str: """Convert numerical arabic numbers (0->9) to chinese hanzi numbers (〇 -> 九) Args: num (str): arabic number to convert big (bool, optional): use financial characters. Defaults to False. simp (bool, optional): use simplified characters instead of tradictional characters. Defaults to True. o (bool, optional): use 〇 for 'zero'. Defaults to False. twoalt (bool, optional): use 两/兩 for 'two' when appropriate. Defaults to False. Raises: ValueError: if number is more than 1e48 ValueError: if 'e' exposent in number Returns: str: converted number as hanzi characters """ # check num first nd = str(num) if abs(float(nd)) >= 1e48: raise ValueError("number out of range") if "e" in nd: raise ValueError("scientific notation is not supported") c_symbol = "正负点" if simp else "正負點" if o: # formal twoalt = False if big: c_basic = "零壹贰叁肆伍陆柒捌玖" if simp else "零壹貳參肆伍陸柒捌玖" c_unit1 = "拾佰仟" c_twoalt = "贰" if simp else "貳" else: c_basic = "〇一二三四五六七八九" if o else "零一二三四五六七八九" c_unit1 = "十百千" if twoalt: c_twoalt = "两" if simp else "兩" else: c_twoalt = "二" c_unit2 = "万亿兆京垓秭穰沟涧正载" if simp else "萬億兆京垓秭穰溝澗正載" revuniq = lambda l: "".join(k for k, g in itertools.groupby(reversed(l))) nd = str(num) result = [] if nd[0] == "+": result.append(c_symbol[0]) elif nd[0] == "-": result.append(c_symbol[1]) if "." in nd: integer, remainder = nd.lstrip("+-").split(".") else: integer, remainder = nd.lstrip("+-"), None if int(integer): splitted = [integer[max(i - 4, 0) : i] for i in range(len(integer), 0, -4)] intresult = [] for nu, unit in enumerate(splitted): # special cases if int(unit) == 0: # 0000 intresult.append(c_basic[0]) continue if nu > 0 and int(unit) == 2: # 0002 intresult.append(c_twoalt + c_unit2[nu - 1]) continue ulist = [] unit = unit.zfill(4) for nc, ch in enumerate(reversed(unit)): if ch == "0": if ulist: # ???0 ulist.append(c_basic[0]) elif nc == 0: ulist.append(c_basic[int(ch)]) elif nc == 1 and ch == "1" and unit[1] == "0": # special case for tens # edit the 'elif' if you don't like # 十四, 三千零十四, 三千三百一十四 ulist.append(c_unit1[0]) elif nc > 1 and ch == "2": ulist.append(c_twoalt + c_unit1[nc - 1]) else: ulist.append(c_basic[int(ch)] + c_unit1[nc - 1]) ustr = revuniq(ulist) if nu == 0: intresult.append(ustr) else: intresult.append(ustr + c_unit2[nu - 1]) result.append(revuniq(intresult).strip(c_basic[0])) else: result.append(c_basic[0]) if remainder: result.append(c_symbol[2]) result.append("".join(c_basic[int(ch)] for ch in remainder)) return "".join(result) def _number_replace(match) -> str: """function to apply in a match, transform all numbers in a match by chinese characters Args: match (re.Match): numbers regex matches Returns: str: replaced characters for the numbers """ match_str: str = match.group() return _num2chinese(match_str) def replace_numbers_to_characters_in_text(text: str) -> str: """Replace all arabic numbers in a text by their equivalent in chinese characters (simplified) Args: text (str): input text to transform Returns: str: output text """ text = re.sub(r"[0-9]+", _number_replace, text) return text
0
coqui_public_repos
coqui_public_repos/data-checker/Dockerfile
FROM ghcr.io/coqui-ai/stt-train:main RUN python -m pip install pandarallel RUN python -m pip install stt RUN python -m pip install numpy RUN python -m pip install librosa WORKDIR /home/ubuntu COPY . /home/ubuntu/ RUN mkdir /home/ubuntu/stt_model RUN wget https://github.com/coqui-ai/STT-models/releases/download/english%2Fcoqui%2Fv1.0.0-huge-vocab/model.tflite -P /home/ubuntu/stt_model RUN wget https://github.com/coqui-ai/STT-models/releases/download/english%2Fcoqui%2Fv1.0.0-huge-vocab/huge-vocabulary.scorer -P /home/ubuntu/stt_model RUN python data_checks.py /code/data/smoke_test/ldc93s1_flac.csv 2 /home/ubuntu/stt_model/model.tflite /home/ubuntu/stt_model/huge-vocabulary.scorer RUN python data_checks.py /code/data/smoke_test/ldc93s1_opus.csv 2 RUN python data_checks.py /code/data/smoke_test/russian_sample_data/ru.csv 2 /home/ubuntu/stt_model/model.tflite /home/ubuntu/stt_model/huge-vocabulary.scorer
0
coqui_public_repos/STT/training/coqui_stt_training
coqui_public_repos/STT/training/coqui_stt_training/util/check_characters.py
""" Usage: From within the training/ directory, call this script as a module: $ python3 -m coqui_stt_training.util.check_characters "INFILE" e.g. $ python3 -m coqui_stt_training.util.check_characters -csv /home/data/french.csv e.g. $ python3 -m coqui_stt_training.util.check_characters -csv ../train.csv,../test.csv e.g. $ python3 -m coqui_stt_training.util.check_characters -alpha -csv ../train.csv Point this script to your transcripts, and it returns to the terminal the unique set of characters in those files (combined). These files are assumed to be csv, with the transcript being the third field. The script simply reads all the text from all the files, storing a set of unique characters that were seen along the way. """ import argparse import csv import os import sys import unicodedata from .io import open_remote def main(): parser = argparse.ArgumentParser() parser.add_argument( "-csv", "--csv-files", help="Str. Filenames as a comma separated list", required=True, ) parser.add_argument( "-alpha", "--alphabet-format", help="Bool. Print in format for alphabet.txt", action="store_true", ) parser.add_argument( "-unicode", "--disable-unicode-variants", help="Bool. DISABLE check for unicode consistency (use with --alphabet-format)", action="store_true", ) args = parser.parse_args() in_files = args.csv_files.split(",") print("### Reading in the following transcript files: ###") print("### {} ###".format(in_files)) all_text = set() for in_file in in_files: with open_remote(in_file, "r") as csv_file: reader = csv.reader(csv_file) try: next(reader, None) # skip the file header (i.e. "transcript") for row in reader: if not args.disable_unicode_variants: unicode_transcript = unicodedata.normalize("NFKC", row[2]) if row[2] != unicode_transcript: print( "Your input file", in_file, "contains at least one transript with unicode chars on more than one code-point: '{}'. Consider using NFKC normalization: unicodedata.normalize('NFKC', str).".format( row[2] ), ) sys.exit(-1) all_text |= set(row[2]) except IndexError: print( "Your input file", in_file, "is not formatted properly. Check if there are 3 columns with the 3rd containing the transcript", ) sys.exit(-1) finally: csv_file.close() print("### The following unique characters were found in your transcripts: ###") if args.alphabet_format: for char in list(all_text): print(char) print("### ^^^ You can copy-paste these into data/alphabet.txt ###") else: print(list(all_text)) if __name__ == "__main__": main()
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions/compact/compact16_unweighted-fst.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/fst.h> #include <fst/compact-fst.h> namespace fst { static FstRegisterer<CompactUnweightedFst<StdArc, uint16>> CompactUnweightedFst_StdArc_uint16_registerer; static FstRegisterer<CompactUnweightedFst<LogArc, uint16>> CompactUnweightedFst_LogArc_uint16_registerer; } // namespace fst
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/script/concat.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/script/fst-class.h> #include <fst/script/concat.h> #include <fst/script/script-impl.h> namespace fst { namespace script { // 1 void Concat(MutableFstClass *ofst, const FstClass &ifst) { if (!internal::ArcTypesMatch(*ofst, ifst, "Concat")) { ofst->SetProperties(kError, kError); return; } ConcatArgs1 args(ofst, ifst); Apply<Operation<ConcatArgs1>>("Concat", ofst->ArcType(), &args); } // 2 void Concat(const FstClass &ifst, MutableFstClass *ofst) { if (!internal::ArcTypesMatch(ifst, *ofst, "Concat")) { ofst->SetProperties(kError, kError); return; } ConcatArgs2 args(ifst, ofst); Apply<Operation<ConcatArgs2>>("Concat", ofst->ArcType(), &args); } REGISTER_FST_OPERATION(Concat, StdArc, ConcatArgs1); REGISTER_FST_OPERATION(Concat, LogArc, ConcatArgs1); REGISTER_FST_OPERATION(Concat, Log64Arc, ConcatArgs1); REGISTER_FST_OPERATION(Concat, StdArc, ConcatArgs2); REGISTER_FST_OPERATION(Concat, LogArc, ConcatArgs2); REGISTER_FST_OPERATION(Concat, Log64Arc, ConcatArgs2); } // namespace script } // namespace fst
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/extensions/far/create.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Creates a finite-state archive from component FSTs. #ifndef FST_EXTENSIONS_FAR_CREATE_H_ #define FST_EXTENSIONS_FAR_CREATE_H_ #include <sstream> #include <string> #include <vector> #include <fst/extensions/far/far.h> namespace fst { template <class Arc> void FarCreate(const std::vector<string> &in_fnames, const string &out_fname, const int32_t generate_keys, const FarType &far_type, const string &key_prefix, const string &key_suffix) { std::unique_ptr<FarWriter<Arc>> far_writer( FarWriter<Arc>::Create(out_fname, far_type)); if (!far_writer) return; for (size_t i = 0; i < in_fnames.size(); ++i) { std::unique_ptr<Fst<Arc>> ifst(Fst<Arc>::Read(in_fnames[i])); if (!ifst) return; string key; if (generate_keys > 0) { std::ostringstream keybuf; keybuf.width(generate_keys); keybuf.fill('0'); keybuf << i + 1; key = keybuf.str(); } else { auto *filename = new char[in_fnames[i].size() + 1]; strcpy(filename, in_fnames[i].c_str()); key = basename(filename); delete[] filename; } far_writer->Add(key_prefix + key + key_suffix, *ifst); } } } // namespace fst #endif // FST_EXTENSIONS_FAR_CREATE_H_
0
coqui_public_repos/snakepit/src
coqui_public_repos/snakepit/src/models/ProcessGroup-model.js
const Sequelize = require('sequelize') const sequelize = require('./db.js') const Process = require('./Process-model.js') var ProcessGroup = sequelize.define('processgroup', { id: { type: Sequelize.INTEGER, autoIncrement: true, primaryKey: true }, index: { type: Sequelize.INTEGER, allowNull: false } }) ProcessGroup.hasMany(Process, { onDelete: 'cascade' }) Process.belongsTo(ProcessGroup) module.exports = ProcessGroup
0
coqui_public_repos/STT-models/hungarian/itml
coqui_public_repos/STT-models/hungarian/itml/v0.1.1/LICENSE
GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007 Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU Affero General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Remote Network Interaction; Use with the GNU General Public License. Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. <one line to give the program's name and a brief idea of what it does.> Copyright (C) <year> <name of author> This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <https://www.gnu.org/licenses/>. Also add information on how to contact you by electronic and paper mail. If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see <https://www.gnu.org/licenses/>.
0
coqui_public_repos
coqui_public_repos/STT/ds_sox.supp
{ sox_effect_gain Memcheck:Leak match-leak-kinds: reachable fun:malloc fun:realloc fun:lsx_realloc fun:lsx_usage_lines fun:lsx_gain_effect_fn fun:sox_find_effect fun:_Z14GetAudioBufferPKci fun:_Z11ProcessFileP10ModelStatePKcb fun:main } { sox_effect_rate Memcheck:Leak match-leak-kinds: reachable fun:malloc fun:realloc fun:lsx_realloc fun:lsx_usage_lines fun:lsx_rate_effect_fn fun:sox_find_effect fun:_Z14GetAudioBufferPKci fun:_Z11ProcessFileP10ModelStatePKcb fun:main } { sox_effect_flanger Memcheck:Leak match-leak-kinds: reachable fun:malloc fun:realloc fun:lsx_realloc fun:lsx_usage_lines fun:lsx_flanger_effect_fn fun:sox_find_effect fun:_Z14GetAudioBufferPKci fun:_Z11ProcessFileP10ModelStatePKcb fun:main }
0
coqui_public_repos/inference-engine/third_party/onnxruntime/include/onnxruntime/core
coqui_public_repos/inference-engine/third_party/onnxruntime/include/onnxruntime/core/framework/run_options.h
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. #pragma once #include <string> #include <atomic> #include "core/session/onnxruntime_c_api.h" #include "core/framework/config_options.h" /** * Configuration information for a Run call. */ struct OrtRunOptions { /// Log severity. See https://github.com/microsoft/onnxruntime/blob/master/include/onnxruntime/core/common/logging/severity.h /// Default = -1 (use the log severity from the InferenceSession that the Run is for). int run_log_severity_level = -1; int run_log_verbosity_level = 0; ///< VLOG level if debug build and run_log_severity_level is 0 (VERBOSE). std::string run_tag; ///< A tag for the Run() calls using this. // Set to 'true' to ensure the termination of all the outstanding Run() calls // that use this OrtRunOptions instance. Some of the outstanding Run() calls may // be forced to terminate with an error status. bool terminate = false; // Set to 'true' to run only the nodes from feeds to required fetches. // So it is possible that only some of the nodes are executed. bool only_execute_path_to_fetches = false; #ifdef ENABLE_TRAINING // Set to 'true' to run in training mode. bool training_mode = true; #endif // Stores the configurations for this run // To add an configuration to this specific run, call OrtApis::AddRunConfigEntry // The configuration keys and value formats are defined in // /include/onnxruntime/core/session/onnxruntime_run_options_config_keys.h onnxruntime::ConfigOptions config_options; OrtRunOptions() = default; ~OrtRunOptions() = default; }; namespace onnxruntime { using RunOptions = OrtRunOptions; } // namespace onnxruntime
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/script/isomorphic.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #ifndef FST_SCRIPT_ISOMORPHIC_H_ #define FST_SCRIPT_ISOMORPHIC_H_ #include <tuple> #include <fst/isomorphic.h> #include <fst/script/arg-packs.h> #include <fst/script/fst-class.h> namespace fst { namespace script { using IsomorphicInnerArgs = std::tuple<const FstClass &, const FstClass &, float>; using IsomorphicArgs = WithReturnValue<bool, IsomorphicInnerArgs>; template <class Arc> void Isomorphic(IsomorphicArgs *args) { const Fst<Arc> &fst1 = *(std::get<0>(args->args).GetFst<Arc>()); const Fst<Arc> &fst2 = *(std::get<1>(args->args).GetFst<Arc>()); args->retval = Isomorphic(fst1, fst2, std::get<2>(args->args)); } bool Isomorphic(const FstClass &fst1, const FstClass &fst2, float delta = kDelta); } // namespace script } // namespace fst #endif // FST_SCRIPT_ISOMORPHIC_H_
0
coqui_public_repos/snakepit
coqui_public_repos/snakepit/scripts/clean.sh
set -o pipefail ( echo "Cleaning started..." set -x rm -rf "$JOB_DIR/tmp" rm -rf "$JOB_DIR/src" echo "Cleaning done." ) 2>&1 | ts '[%Y-%m-%d %H:%M:%S] [clean]' >>"$JOB_DIR/pit.log"
0
coqui_public_repos/TTS/recipes/blizzard2013
coqui_public_repos/TTS/recipes/blizzard2013/tacotron1-Capacitron/train_capacitron_t1.py
import os from trainer import Trainer, TrainerArgs from TTS.config.shared_configs import BaseAudioConfig from TTS.tts.configs.shared_configs import BaseDatasetConfig, CapacitronVAEConfig from TTS.tts.configs.tacotron_config import TacotronConfig from TTS.tts.datasets import load_tts_samples from TTS.tts.models.tacotron import Tacotron from TTS.tts.utils.text.tokenizer import TTSTokenizer from TTS.utils.audio import AudioProcessor output_path = os.path.dirname(os.path.abspath(__file__)) data_path = "/srv/data/" # Using LJSpeech like dataset processing for the blizzard dataset dataset_config = BaseDatasetConfig(formatter="ljspeech", meta_file_train="metadata.csv", path=data_path) audio_config = BaseAudioConfig( sample_rate=24000, do_trim_silence=True, trim_db=60.0, signal_norm=True, mel_fmin=80.0, mel_fmax=12000, spec_gain=20.0, log_func="np.log10", ref_level_db=20, preemphasis=0.0, min_level_db=-100, ) # Using the standard Capacitron config capacitron_config = CapacitronVAEConfig(capacitron_VAE_loss_alpha=1.0) config = TacotronConfig( run_name="Blizzard-Capacitron-T1", audio=audio_config, capacitron_vae=capacitron_config, use_capacitron_vae=True, batch_size=128, # Tune this to your gpu max_audio_len=6 * 24000, # Tune this to your gpu min_audio_len=0.5 * 24000, eval_batch_size=16, num_loader_workers=12, num_eval_loader_workers=8, precompute_num_workers=24, run_eval=True, test_delay_epochs=5, r=2, optimizer="CapacitronOptimizer", optimizer_params={"RAdam": {"betas": [0.9, 0.998], "weight_decay": 1e-6}, "SGD": {"lr": 1e-5, "momentum": 0.9}}, attention_type="graves", attention_heads=5, epochs=1000, text_cleaner="phoneme_cleaners", use_phonemes=True, phoneme_language="en-us", phonemizer="espeak", phoneme_cache_path=os.path.join(data_path, "phoneme_cache"), stopnet_pos_weight=15, print_step=50, print_eval=True, mixed_precision=False, output_path=output_path, datasets=[dataset_config], lr=1e-3, lr_scheduler="StepwiseGradualLR", lr_scheduler_params={"gradual_learning_rates": [[0, 1e-3], [2e4, 5e-4], [4e4, 3e-4], [6e4, 1e-4], [8e4, 5e-5]]}, scheduler_after_epoch=False, # scheduler doesn't work without this flag loss_masking=False, decoder_loss_alpha=1.0, postnet_loss_alpha=1.0, postnet_diff_spec_alpha=1.0, decoder_diff_spec_alpha=1.0, decoder_ssim_alpha=1.0, postnet_ssim_alpha=1.0, ) ap = AudioProcessor(**config.audio.to_dict()) tokenizer, config = TTSTokenizer.init_from_config(config) train_samples, eval_samples = load_tts_samples(dataset_config, eval_split=True) model = Tacotron(config, ap, tokenizer, speaker_manager=None) trainer = Trainer( TrainerArgs(), config, output_path, model=model, train_samples=train_samples, eval_samples=eval_samples, ) # 🚀 trainer.fit()
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions/compact/compact8_unweighted-fst.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/fst.h> #include <fst/compact-fst.h> namespace fst { static FstRegisterer<CompactUnweightedFst<StdArc, uint8>> CompactUnweightedFst_StdArc_uint8_registerer; static FstRegisterer<CompactUnweightedFst<LogArc, uint8>> CompactUnweightedFst_LogArc_uint8_registerer; } // namespace fst
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions/Makefile.in
# Makefile.in generated by automake 1.15.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2017 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/extensions ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/ac_python_devel.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/include/fst/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ ctags-recursive dvi-recursive html-recursive info-recursive \ install-data-recursive install-dvi-recursive \ install-exec-recursive install-html-recursive \ install-info-recursive install-pdf-recursive \ install-ps-recursive install-recursive installcheck-recursive \ installdirs-recursive pdf-recursive ps-recursive \ tags-recursive uninstall-recursive am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive am__recursive_targets = \ $(RECURSIVE_TARGETS) \ $(RECURSIVE_CLEAN_TARGETS) \ $(am__extra_recursive_targets) AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ distdir am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DIST_SUBDIRS = compact compress const far linear lookahead pdt mpdt \ ngram python special am__DIST_COMMON = $(srcdir)/Makefile.in DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DL_LIBS = @DL_LIBS@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PYTHON = @PYTHON@ PYTHON_CPPFLAGS = @PYTHON_CPPFLAGS@ PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@ PYTHON_EXTRA_LDFLAGS = @PYTHON_EXTRA_LDFLAGS@ PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@ PYTHON_LDFLAGS = @PYTHON_LDFLAGS@ PYTHON_PLATFORM = @PYTHON_PLATFORM@ PYTHON_PREFIX = @PYTHON_PREFIX@ PYTHON_SITE_PKG = @PYTHON_SITE_PKG@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libfstdir = @libfstdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgpyexecdir = @pkgpyexecdir@ pkgpythondir = @pkgpythondir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ pyexecdir = @pyexecdir@ pythondir = @pythondir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ @HAVE_COMPACT_TRUE@compactdir = compact @HAVE_COMPRESS_TRUE@compressdir = compress @HAVE_CONST_TRUE@constdir = const @HAVE_FAR_TRUE@fardir = far @HAVE_GRM_TRUE@fardir = far @HAVE_PYTHON_TRUE@fardir = far @HAVE_GRM_TRUE@pdtdir = pdt @HAVE_MPDT_TRUE@pdtdir = pdt @HAVE_PDT_TRUE@pdtdir = pdt @HAVE_GRM_TRUE@mpdtdir = mpdt @HAVE_MPDT_TRUE@mpdtdir = mpdt @HAVE_LINEAR_TRUE@lineardir = linear @HAVE_LOOKAHEAD_TRUE@lookaheaddir = lookahead @HAVE_NGRAM_TRUE@ngramdir = ngram @HAVE_PYTHON_TRUE@pywrapfstdir = python @HAVE_SPECIAL_TRUE@specialdir = special SUBDIRS = $(compactdir) $(compressdir) $(constdir) $(fardir) $(lineardir) \ $(lookaheaddir) $(pdtdir) $(mpdtdir) $(ngramdir) $(pywrapfstdir) \ $(specialdir) all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/extensions/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/extensions/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run 'make' without going through this Makefile. # To change the values of 'make' variables: instead of editing Makefiles, # (1) if the variable is set in 'config.status', edit 'config.status' # (which will cause the Makefiles to be regenerated when you run 'make'); # (2) otherwise, pass the desired values on the 'make' command line. $(am__recursive_targets): @fail=; \ if $(am__make_keepgoing); then \ failcom='fail=yes'; \ else \ failcom='exit 1'; \ fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-recursive TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-recursive CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-recursive cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ $(am__make_dryrun) \ || test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(am__recursive_targets) install-am install-strip .PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am check \ check-am clean clean-generic clean-libtool cscopelist-am ctags \ ctags-am distclean distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-am uninstall uninstall-am .PRECIOUS: Makefile # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT:
0
coqui_public_repos/TTS/TTS/vocoder
coqui_public_repos/TTS/TTS/vocoder/layers/qmf.dat
0.0000000e+000 -5.5252865e-004 -5.6176926e-004 -4.9475181e-004 -4.8752280e-004 -4.8937912e-004 -5.0407143e-004 -5.2265643e-004 -5.4665656e-004 -5.6778026e-004 -5.8709305e-004 -6.1327474e-004 -6.3124935e-004 -6.5403334e-004 -6.7776908e-004 -6.9416146e-004 -7.1577365e-004 -7.2550431e-004 -7.4409419e-004 -7.4905981e-004 -7.6813719e-004 -7.7248486e-004 -7.8343323e-004 -7.7798695e-004 -7.8036647e-004 -7.8014496e-004 -7.7579773e-004 -7.6307936e-004 -7.5300014e-004 -7.3193572e-004 -7.2153920e-004 -6.9179375e-004 -6.6504151e-004 -6.3415949e-004 -5.9461189e-004 -5.5645764e-004 -5.1455722e-004 -4.6063255e-004 -4.0951215e-004 -3.5011759e-004 -2.8969812e-004 -2.0983373e-004 -1.4463809e-004 -6.1733441e-005 1.3494974e-005 1.0943831e-004 2.0430171e-004 2.9495311e-004 4.0265402e-004 5.1073885e-004 6.2393761e-004 7.4580259e-004 8.6084433e-004 9.8859883e-004 1.1250155e-003 1.2577885e-003 1.3902495e-003 1.5443220e-003 1.6868083e-003 1.8348265e-003 1.9841141e-003 2.1461584e-003 2.3017255e-003 2.4625617e-003 2.6201759e-003 2.7870464e-003 2.9469448e-003 3.1125421e-003 3.2739613e-003 3.4418874e-003 3.6008268e-003 3.7603923e-003 3.9207432e-003 4.0819753e-003 4.2264269e-003 4.3730720e-003 4.5209853e-003 4.6606461e-003 4.7932561e-003 4.9137604e-003 5.0393023e-003 5.1407354e-003 5.2461166e-003 5.3471681e-003 5.4196776e-003 5.4876040e-003 5.5475715e-003 5.5938023e-003 5.6220643e-003 5.6455197e-003 5.6389200e-003 5.6266114e-003 5.5917129e-003 5.5404364e-003 5.4753783e-003 5.3838976e-003 5.2715759e-003 5.1382275e-003 4.9839688e-003 4.8109469e-003 4.6039530e-003 4.3801862e-003 4.1251642e-003 3.8456408e-003 3.5401247e-003 3.2091886e-003 2.8446758e-003 2.4508540e-003 2.0274176e-003 1.5784683e-003 1.0902329e-003 5.8322642e-004 2.7604519e-005 -5.4642809e-004 -1.1568136e-003 -1.8039473e-003 -2.4826724e-003 -3.1933778e-003 -3.9401124e-003 -4.7222596e-003 -5.5337211e-003 -6.3792293e-003 -7.2615817e-003 -8.1798233e-003 -9.1325330e-003 -1.0115022e-002 -1.1131555e-002 -1.2185000e-002 -1.3271822e-002 -1.4390467e-002 -1.5540555e-002 -1.6732471e-002 -1.7943338e-002 -1.9187243e-002 -2.0453179e-002 -2.1746755e-002 -2.3068017e-002 -2.4416099e-002 -2.5787585e-002 -2.7185943e-002 -2.8607217e-002 -3.0050266e-002 -3.1501761e-002 -3.2975408e-002 -3.4462095e-002 -3.5969756e-002 -3.7481285e-002 -3.9005368e-002 -4.0534917e-002 -4.2064909e-002 -4.3609754e-002 -4.5148841e-002 -4.6684303e-002 -4.8216572e-002 -4.9738576e-002 -5.1255616e-002 -5.2763075e-002 -5.4245277e-002 -5.5717365e-002 -5.7161645e-002 -5.8591568e-002 -5.9983748e-002 -6.1345517e-002 -6.2685781e-002 -6.3971590e-002 -6.5224711e-002 -6.6436751e-002 -6.7607599e-002 -6.8704383e-002 -6.9763024e-002 -7.0762871e-002 -7.1700267e-002 -7.2568258e-002 -7.3362026e-002 -7.4100364e-002 -7.4745256e-002 -7.5313734e-002 -7.5800836e-002 -7.6199248e-002 -7.6499217e-002 -7.6709349e-002 -7.6817398e-002 -7.6823001e-002 -7.6720492e-002 -7.6505072e-002 -7.6174832e-002 -7.5730576e-002 -7.5157626e-002 -7.4466439e-002 -7.3640601e-002 -7.2677464e-002 -7.1582636e-002 -7.0353307e-002 -6.8966401e-002 -6.7452502e-002 -6.5769067e-002 -6.3944481e-002 -6.1960278e-002 -5.9816657e-002 -5.7515269e-002 -5.5046003e-002 -5.2409382e-002 -4.9597868e-002 -4.6630331e-002 -4.3476878e-002 -4.0145828e-002 -3.6641812e-002 -3.2958393e-002 -2.9082401e-002 -2.5030756e-002 -2.0799707e-002 -1.6370126e-002 -1.1762383e-002 -6.9636862e-003 -1.9765601e-003 3.2086897e-003 8.5711749e-003 1.4128883e-002 1.9883413e-002 2.5822729e-002 3.1953127e-002 3.8277657e-002 4.4780682e-002 5.1480418e-002 5.8370533e-002 6.5440985e-002 7.2694330e-002 8.0137293e-002 8.7754754e-002 9.5553335e-002 1.0353295e-001 1.1168269e-001 1.2000780e-001 1.2850029e-001 1.3715518e-001 1.4597665e-001 1.5496071e-001 1.6409589e-001 1.7338082e-001 1.8281725e-001 1.9239667e-001 2.0212502e-001 2.1197359e-001 2.2196527e-001 2.3206909e-001 2.4230169e-001 2.5264803e-001 2.6310533e-001 2.7366340e-001 2.8432142e-001 2.9507167e-001 3.0590986e-001 3.1682789e-001 3.2781137e-001 3.3887227e-001 3.4999141e-001 3.6115899e-001 3.7237955e-001 3.8363500e-001 3.9492118e-001 4.0623177e-001 4.1756969e-001 4.2891199e-001 4.4025538e-001 4.5159965e-001 4.6293081e-001 4.7424532e-001 4.8552531e-001 4.9677083e-001 5.0798175e-001 5.1912350e-001 5.3022409e-001 5.4125534e-001 5.5220513e-001 5.6307891e-001 5.7385241e-001 5.8454032e-001 5.9511231e-001 6.0557835e-001 6.1591099e-001 6.2612427e-001 6.3619801e-001 6.4612697e-001 6.5590163e-001 6.6551399e-001 6.7496632e-001 6.8423533e-001 6.9332824e-001 7.0223887e-001 7.1094104e-001 7.1944626e-001 7.2774489e-001 7.3582118e-001 7.4368279e-001 7.5131375e-001 7.5870808e-001 7.6586749e-001 7.7277809e-001 7.7942875e-001 7.8583531e-001 7.9197358e-001 7.9784664e-001 8.0344858e-001 8.0876950e-001 8.1381913e-001 8.1857760e-001 8.2304199e-001 8.2722753e-001 8.3110385e-001 8.3469374e-001 8.3797173e-001 8.4095414e-001 8.4362383e-001 8.4598185e-001 8.4803158e-001 8.4978052e-001 8.5119715e-001 8.5230470e-001 8.5310209e-001 8.5357206e-001 8.5373856e-001 8.5357206e-001 8.5310209e-001 8.5230470e-001 8.5119715e-001 8.4978052e-001 8.4803158e-001 8.4598185e-001 8.4362383e-001 8.4095414e-001 8.3797173e-001 8.3469374e-001 8.3110385e-001 8.2722753e-001 8.2304199e-001 8.1857760e-001 8.1381913e-001 8.0876950e-001 8.0344858e-001 7.9784664e-001 7.9197358e-001 7.8583531e-001 7.7942875e-001 7.7277809e-001 7.6586749e-001 7.5870808e-001 7.5131375e-001 7.4368279e-001 7.3582118e-001 7.2774489e-001 7.1944626e-001 7.1094104e-001 7.0223887e-001 6.9332824e-001 6.8423533e-001 6.7496632e-001 6.6551399e-001 6.5590163e-001 6.4612697e-001 6.3619801e-001 6.2612427e-001 6.1591099e-001 6.0557835e-001 5.9511231e-001 5.8454032e-001 5.7385241e-001 5.6307891e-001 5.5220513e-001 5.4125534e-001 5.3022409e-001 5.1912350e-001 5.0798175e-001 4.9677083e-001 4.8552531e-001 4.7424532e-001 4.6293081e-001 4.5159965e-001 4.4025538e-001 4.2891199e-001 4.1756969e-001 4.0623177e-001 3.9492118e-001 3.8363500e-001 3.7237955e-001 3.6115899e-001 3.4999141e-001 3.3887227e-001 3.2781137e-001 3.1682789e-001 3.0590986e-001 2.9507167e-001 2.8432142e-001 2.7366340e-001 2.6310533e-001 2.5264803e-001 2.4230169e-001 2.3206909e-001 2.2196527e-001 2.1197359e-001 2.0212502e-001 1.9239667e-001 1.8281725e-001 1.7338082e-001 1.6409589e-001 1.5496071e-001 1.4597665e-001 1.3715518e-001 1.2850029e-001 1.2000780e-001 1.1168269e-001 1.0353295e-001 9.5553335e-002 8.7754754e-002 8.0137293e-002 7.2694330e-002 6.5440985e-002 5.8370533e-002 5.1480418e-002 4.4780682e-002 3.8277657e-002 3.1953127e-002 2.5822729e-002 1.9883413e-002 1.4128883e-002 8.5711749e-003 3.2086897e-003 -1.9765601e-003 -6.9636862e-003 -1.1762383e-002 -1.6370126e-002 -2.0799707e-002 -2.5030756e-002 -2.9082401e-002 -3.2958393e-002 -3.6641812e-002 -4.0145828e-002 -4.3476878e-002 -4.6630331e-002 -4.9597868e-002 -5.2409382e-002 -5.5046003e-002 -5.7515269e-002 -5.9816657e-002 -6.1960278e-002 -6.3944481e-002 -6.5769067e-002 -6.7452502e-002 -6.8966401e-002 -7.0353307e-002 -7.1582636e-002 -7.2677464e-002 -7.3640601e-002 -7.4466439e-002 -7.5157626e-002 -7.5730576e-002 -7.6174832e-002 -7.6505072e-002 -7.6720492e-002 -7.6823001e-002 -7.6817398e-002 -7.6709349e-002 -7.6499217e-002 -7.6199248e-002 -7.5800836e-002 -7.5313734e-002 -7.4745256e-002 -7.4100364e-002 -7.3362026e-002 -7.2568258e-002 -7.1700267e-002 -7.0762871e-002 -6.9763024e-002 -6.8704383e-002 -6.7607599e-002 -6.6436751e-002 -6.5224711e-002 -6.3971590e-002 -6.2685781e-002 -6.1345517e-002 -5.9983748e-002 -5.8591568e-002 -5.7161645e-002 -5.5717365e-002 -5.4245277e-002 -5.2763075e-002 -5.1255616e-002 -4.9738576e-002 -4.8216572e-002 -4.6684303e-002 -4.5148841e-002 -4.3609754e-002 -4.2064909e-002 -4.0534917e-002 -3.9005368e-002 -3.7481285e-002 -3.5969756e-002 -3.4462095e-002 -3.2975408e-002 -3.1501761e-002 -3.0050266e-002 -2.8607217e-002 -2.7185943e-002 -2.5787585e-002 -2.4416099e-002 -2.3068017e-002 -2.1746755e-002 -2.0453179e-002 -1.9187243e-002 -1.7943338e-002 -1.6732471e-002 -1.5540555e-002 -1.4390467e-002 -1.3271822e-002 -1.2185000e-002 -1.1131555e-002 -1.0115022e-002 -9.1325330e-003 -8.1798233e-003 -7.2615817e-003 -6.3792293e-003 -5.5337211e-003 -4.7222596e-003 -3.9401124e-003 -3.1933778e-003 -2.4826724e-003 -1.8039473e-003 -1.1568136e-003 -5.4642809e-004 2.7604519e-005 5.8322642e-004 1.0902329e-003 1.5784683e-003 2.0274176e-003 2.4508540e-003 2.8446758e-003 3.2091886e-003 3.5401247e-003 3.8456408e-003 4.1251642e-003 4.3801862e-003 4.6039530e-003 4.8109469e-003 4.9839688e-003 5.1382275e-003 5.2715759e-003 5.3838976e-003 5.4753783e-003 5.5404364e-003 5.5917129e-003 5.6266114e-003 5.6389200e-003 5.6455197e-003 5.6220643e-003 5.5938023e-003 5.5475715e-003 5.4876040e-003 5.4196776e-003 5.3471681e-003 5.2461166e-003 5.1407354e-003 5.0393023e-003 4.9137604e-003 4.7932561e-003 4.6606461e-003 4.5209853e-003 4.3730720e-003 4.2264269e-003 4.0819753e-003 3.9207432e-003 3.7603923e-003 3.6008268e-003 3.4418874e-003 3.2739613e-003 3.1125421e-003 2.9469448e-003 2.7870464e-003 2.6201759e-003 2.4625617e-003 2.3017255e-003 2.1461584e-003 1.9841141e-003 1.8348265e-003 1.6868083e-003 1.5443220e-003 1.3902495e-003 1.2577885e-003 1.1250155e-003 9.8859883e-004 8.6084433e-004 7.4580259e-004 6.2393761e-004 5.1073885e-004 4.0265402e-004 2.9495311e-004 2.0430171e-004 1.0943831e-004 1.3494974e-005 -6.1733441e-005 -1.4463809e-004 -2.0983373e-004 -2.8969812e-004 -3.5011759e-004 -4.0951215e-004 -4.6063255e-004 -5.1455722e-004 -5.5645764e-004 -5.9461189e-004 -6.3415949e-004 -6.6504151e-004 -6.9179375e-004 -7.2153920e-004 -7.3193572e-004 -7.5300014e-004 -7.6307936e-004 -7.7579773e-004 -7.8014496e-004 -7.8036647e-004 -7.7798695e-004 -7.8343323e-004 -7.7248486e-004 -7.6813719e-004 -7.4905981e-004 -7.4409419e-004 -7.2550431e-004 -7.1577365e-004 -6.9416146e-004 -6.7776908e-004 -6.5403334e-004 -6.3124935e-004 -6.1327474e-004 -5.8709305e-004 -5.6778026e-004 -5.4665656e-004 -5.2265643e-004 -5.0407143e-004 -4.8937912e-004 -4.8752280e-004 -4.9475181e-004 -5.6176926e-004 -5.5252865e-004
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/test/weight_test.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Regression test for FST weights. #include <cstdlib> #include <ctime> #include <fst/flags.h> #include <fst/log.h> #include <fst/expectation-weight.h> #include <fst/float-weight.h> #include <fst/lexicographic-weight.h> #include <fst/power-weight.h> #include <fst/product-weight.h> #include <fst/set-weight.h> #include <fst/signed-log-weight.h> #include <fst/sparse-power-weight.h> #include <fst/string-weight.h> #include <fst/union-weight.h> #include "./weight-tester.h" DEFINE_int32(seed, -1, "random seed"); DEFINE_int32(repeat, 10000, "number of test repetitions"); namespace { using fst::Adder; using fst::ExpectationWeight; using fst::GALLIC; using fst::GallicWeight; using fst::LexicographicWeight; using fst::LogWeight; using fst::LogWeightTpl; using fst::MinMaxWeight; using fst::MinMaxWeightTpl; using fst::NaturalLess; using fst::PowerWeight; using fst::ProductWeight; using fst::SetWeight; using fst::SET_INTERSECT_UNION; using fst::SET_UNION_INTERSECT; using fst::SET_BOOLEAN; using fst::SignedLogWeight; using fst::SignedLogWeightTpl; using fst::SparsePowerWeight; using fst::StringWeight; using fst::STRING_LEFT; using fst::STRING_RIGHT; using fst::TropicalWeight; using fst::TropicalWeightTpl; using fst::UnionWeight; using fst::WeightConvert; using fst::WeightGenerate; using fst::WeightTester; template <class T> void TestTemplatedWeights(int repeat) { using TropicalWeightGenerate = WeightGenerate<TropicalWeightTpl<T>>; TropicalWeightGenerate tropical_generate; WeightTester<TropicalWeightTpl<T>, TropicalWeightGenerate> tropical_tester( tropical_generate); tropical_tester.Test(repeat); using LogWeightGenerate = WeightGenerate<LogWeightTpl<T>>; LogWeightGenerate log_generate; WeightTester<LogWeightTpl<T>, LogWeightGenerate> log_tester(log_generate); log_tester.Test(repeat); using MinMaxWeightGenerate = WeightGenerate<MinMaxWeightTpl<T>>; MinMaxWeightGenerate minmax_generate(true); WeightTester<MinMaxWeightTpl<T>, MinMaxWeightGenerate> minmax_tester( minmax_generate); minmax_tester.Test(repeat); using SignedLogWeightGenerate = WeightGenerate<SignedLogWeightTpl<T>>; SignedLogWeightGenerate signedlog_generate; WeightTester<SignedLogWeightTpl<T>, SignedLogWeightGenerate> signedlog_tester(signedlog_generate); signedlog_tester.Test(repeat); } template <class Weight> void TestAdder(int n) { Weight sum = Weight::Zero(); Adder<Weight> adder; for (int i = 0; i < n; ++i) { sum = Plus(sum, Weight::One()); adder.Add(Weight::One()); } CHECK(ApproxEqual(sum, adder.Sum())); } template <class Weight> void TestSignedAdder(int n) { Weight sum = Weight::Zero(); Adder<Weight> adder; const Weight minus_one = Minus(Weight::Zero(), Weight::One()); for (int i = 0; i < n; ++i) { if (i < n/4 || i > 3*n/4) { sum = Plus(sum, Weight::One()); adder.Add(Weight::One()); } else { sum = Minus(sum, Weight::One()); adder.Add(minus_one); } } CHECK(ApproxEqual(sum, adder.Sum())); } template <typename Weight1, typename Weight2> void TestWeightConversion(Weight1 w1) { // Tests round-trp conversion. WeightConvert<Weight2, Weight1> to_w1_; WeightConvert<Weight1, Weight2> to_w2_; Weight2 w2 = to_w2_(w1); Weight1 nw1 = to_w1_(w2); CHECK_EQ(w1, nw1); } template <typename FromWeight, typename ToWeight> void TestWeightCopy(FromWeight w) { // Test copy constructor. const ToWeight to_copied(w); const FromWeight roundtrip_copied(to_copied); CHECK_EQ(w, roundtrip_copied); // Test copy assign. ToWeight to_copy_assigned; to_copy_assigned = w; CHECK_EQ(to_copied, to_copy_assigned); FromWeight roundtrip_copy_assigned; roundtrip_copy_assigned = to_copy_assigned; CHECK_EQ(w, roundtrip_copy_assigned); } template <typename FromWeight, typename ToWeight> void TestWeightMove(FromWeight w) { // Assume FromWeight -> FromWeight copy works. const FromWeight orig(w); ToWeight to_moved(std::move(w)); const FromWeight roundtrip_moved(std::move(to_moved)); CHECK_EQ(orig, roundtrip_moved); // Test move assign. w = orig; ToWeight to_move_assigned; to_move_assigned = std::move(w); FromWeight roundtrip_move_assigned; roundtrip_move_assigned = std::move(to_move_assigned); CHECK_EQ(orig, roundtrip_move_assigned); } template <class Weight> void TestImplicitConversion() { // Only test a few of the operations; assumes they are implemented with the // same pattern. CHECK(Weight(2.0f) == 2.0f); CHECK(Weight(2.0) == 2.0); CHECK(2.0f == Weight(2.0f)); CHECK(2.0 == Weight(2.0)); CHECK_EQ(Weight::Zero(), Times(Weight::Zero(), 3.0f)); CHECK_EQ(Weight::Zero(), Times(Weight::Zero(), 3.0)); CHECK_EQ(Weight::Zero(), Times(3.0, Weight::Zero())); CHECK_EQ(Weight(3.0), Plus(Weight::Zero(), 3.0f)); CHECK_EQ(Weight(3.0), Plus(Weight::Zero(), 3.0)); CHECK_EQ(Weight(3.0), Plus(3.0, Weight::Zero())); } void TestPowerWeightGetSetValue() { PowerWeight<LogWeight, 3> w; // LogWeight has unspecified initial value, so don't check it. w.SetValue(0, LogWeight(2)); w.SetValue(1, LogWeight(3)); CHECK_EQ(LogWeight(2), w.Value(0)); CHECK_EQ(LogWeight(3), w.Value(1)); } void TestSparsePowerWeightGetSetValue() { const LogWeight default_value(17); SparsePowerWeight<LogWeight> w; w.SetDefaultValue(default_value); // All gets should be the default. CHECK_EQ(default_value, w.Value(0)); CHECK_EQ(default_value, w.Value(100)); // First set should fill first_. w.SetValue(10, LogWeight(10)); CHECK_EQ(LogWeight(10), w.Value(10)); w.SetValue(10, LogWeight(20)); CHECK_EQ(LogWeight(20), w.Value(10)); // Add a smaller index. w.SetValue(5, LogWeight(5)); CHECK_EQ(LogWeight(5), w.Value(5)); CHECK_EQ(LogWeight(20), w.Value(10)); // Add some larger indices. w.SetValue(30, LogWeight(30)); CHECK_EQ(LogWeight(5), w.Value(5)); CHECK_EQ(LogWeight(20), w.Value(10)); CHECK_EQ(LogWeight(30), w.Value(30)); w.SetValue(29, LogWeight(29)); CHECK_EQ(LogWeight(5), w.Value(5)); CHECK_EQ(LogWeight(20), w.Value(10)); CHECK_EQ(LogWeight(29), w.Value(29)); CHECK_EQ(LogWeight(30), w.Value(30)); w.SetValue(31, LogWeight(31)); CHECK_EQ(LogWeight(5), w.Value(5)); CHECK_EQ(LogWeight(20), w.Value(10)); CHECK_EQ(LogWeight(29), w.Value(29)); CHECK_EQ(LogWeight(30), w.Value(30)); CHECK_EQ(LogWeight(31), w.Value(31)); // Replace a value. w.SetValue(30, LogWeight(60)); CHECK_EQ(LogWeight(60), w.Value(30)); // Replace a value with the default. CHECK_EQ(5, w.Size()); w.SetValue(30, default_value); CHECK_EQ(default_value, w.Value(30)); CHECK_EQ(4, w.Size()); // Replace lowest index by the default value. w.SetValue(5, default_value); CHECK_EQ(default_value, w.Value(5)); CHECK_EQ(3, w.Size()); // Clear out everything. w.SetValue(31, default_value); w.SetValue(29, default_value); w.SetValue(10, default_value); CHECK_EQ(0, w.Size()); CHECK_EQ(default_value, w.Value(5)); CHECK_EQ(default_value, w.Value(10)); CHECK_EQ(default_value, w.Value(29)); CHECK_EQ(default_value, w.Value(30)); CHECK_EQ(default_value, w.Value(31)); } } // namespace int main(int argc, char **argv) { std::set_new_handler(FailedNewHandler); SET_FLAGS(argv[0], &argc, &argv, true); LOG(INFO) << "Seed = " << FLAGS_seed; srand(FLAGS_seed); TestTemplatedWeights<float>(FLAGS_repeat); TestTemplatedWeights<double>(FLAGS_repeat); FLAGS_fst_weight_parentheses = "()"; TestTemplatedWeights<float>(FLAGS_repeat); TestTemplatedWeights<double>(FLAGS_repeat); FLAGS_fst_weight_parentheses = ""; // Makes sure type names for templated weights are consistent. CHECK(TropicalWeight::Type() == "tropical"); CHECK(TropicalWeightTpl<double>::Type() != TropicalWeightTpl<float>::Type()); CHECK(LogWeight::Type() == "log"); CHECK(LogWeightTpl<double>::Type() != LogWeightTpl<float>::Type()); TropicalWeightTpl<double> w(2.0); TropicalWeight tw(2.0); TestAdder<TropicalWeight>(1000); TestAdder<LogWeight>(1000); TestSignedAdder<SignedLogWeight>(1000); TestImplicitConversion<LogWeight>(); TestImplicitConversion<TropicalWeight>(); TestImplicitConversion<MinMaxWeight>(); TestWeightConversion<TropicalWeight, LogWeight>(2.0); using LeftStringWeight = StringWeight<int>; using LeftStringWeightGenerate = WeightGenerate<LeftStringWeight>; LeftStringWeightGenerate left_string_generate; WeightTester<LeftStringWeight, LeftStringWeightGenerate> left_string_tester( left_string_generate); left_string_tester.Test(FLAGS_repeat); using RightStringWeight = StringWeight<int, STRING_RIGHT>; using RightStringWeightGenerate = WeightGenerate<RightStringWeight>; RightStringWeightGenerate right_string_generate; WeightTester<RightStringWeight, RightStringWeightGenerate> right_string_tester(right_string_generate); right_string_tester.Test(FLAGS_repeat); // STRING_RESTRICT not tested since it requires equal strings, // so would fail. using IUSetWeight = SetWeight<int, SET_INTERSECT_UNION>; using IUSetWeightGenerate = WeightGenerate<IUSetWeight>; IUSetWeightGenerate iu_set_generate; WeightTester<IUSetWeight, IUSetWeightGenerate> iu_set_tester(iu_set_generate); iu_set_tester.Test(FLAGS_repeat); using UISetWeight = SetWeight<int, SET_UNION_INTERSECT>; using UISetWeightGenerate = WeightGenerate<UISetWeight>; UISetWeightGenerate ui_set_generate; WeightTester<UISetWeight, UISetWeightGenerate> ui_set_tester(ui_set_generate); ui_set_tester.Test(FLAGS_repeat); // SET_INTERSECT_UNION_RESTRICT not tested since it requires equal sets, // so would fail. using BoolSetWeight = SetWeight<int, SET_BOOLEAN>; using BoolSetWeightGenerate = WeightGenerate<BoolSetWeight>; BoolSetWeightGenerate bool_set_generate; WeightTester<BoolSetWeight, BoolSetWeightGenerate> bool_set_tester(bool_set_generate); bool_set_tester.Test(FLAGS_repeat); TestWeightConversion<IUSetWeight, UISetWeight>(iu_set_generate()); TestWeightCopy<IUSetWeight, UISetWeight>(iu_set_generate()); TestWeightCopy<IUSetWeight, BoolSetWeight>(iu_set_generate()); TestWeightCopy<UISetWeight, IUSetWeight>(ui_set_generate()); TestWeightCopy<UISetWeight, BoolSetWeight>(ui_set_generate()); TestWeightCopy<BoolSetWeight, IUSetWeight>(bool_set_generate()); TestWeightCopy<BoolSetWeight, UISetWeight>(bool_set_generate()); TestWeightMove<IUSetWeight, UISetWeight>(iu_set_generate()); TestWeightMove<IUSetWeight, BoolSetWeight>(iu_set_generate()); TestWeightMove<UISetWeight, IUSetWeight>(ui_set_generate()); TestWeightMove<UISetWeight, BoolSetWeight>(ui_set_generate()); TestWeightMove<BoolSetWeight, IUSetWeight>(bool_set_generate()); TestWeightMove<BoolSetWeight, UISetWeight>(bool_set_generate()); // COMPOSITE WEIGHTS AND TESTERS - DEFINITIONS using TropicalGallicWeight = GallicWeight<int, TropicalWeight>; using TropicalGallicWeightGenerate = WeightGenerate<TropicalGallicWeight>; TropicalGallicWeightGenerate tropical_gallic_generate(true); WeightTester<TropicalGallicWeight, TropicalGallicWeightGenerate> tropical_gallic_tester(tropical_gallic_generate); using TropicalGenGallicWeight = GallicWeight<int, TropicalWeight, GALLIC>; using TropicalGenGallicWeightGenerate = WeightGenerate<TropicalGenGallicWeight>; TropicalGenGallicWeightGenerate tropical_gen_gallic_generate(false); WeightTester<TropicalGenGallicWeight, TropicalGenGallicWeightGenerate> tropical_gen_gallic_tester(tropical_gen_gallic_generate); using TropicalProductWeight = ProductWeight<TropicalWeight, TropicalWeight>; using TropicalProductWeightGenerate = WeightGenerate<TropicalProductWeight>; TropicalProductWeightGenerate tropical_product_generate; WeightTester<TropicalProductWeight, TropicalProductWeightGenerate> tropical_product_tester(tropical_product_generate); using TropicalLexicographicWeight = LexicographicWeight<TropicalWeight, TropicalWeight>; using TropicalLexicographicWeightGenerate = WeightGenerate<TropicalLexicographicWeight>; TropicalLexicographicWeightGenerate tropical_lexicographic_generate; WeightTester<TropicalLexicographicWeight, TropicalLexicographicWeightGenerate> tropical_lexicographic_tester(tropical_lexicographic_generate); using TropicalCubeWeight = PowerWeight<TropicalWeight, 3>; using TropicalCubeWeightGenerate = WeightGenerate<TropicalCubeWeight>; TropicalCubeWeightGenerate tropical_cube_generate; WeightTester<TropicalCubeWeight, TropicalCubeWeightGenerate> tropical_cube_tester(tropical_cube_generate); using FirstNestedProductWeight = ProductWeight<TropicalProductWeight, TropicalWeight>; using FirstNestedProductWeightGenerate = WeightGenerate<FirstNestedProductWeight>; FirstNestedProductWeightGenerate first_nested_product_generate; WeightTester<FirstNestedProductWeight, FirstNestedProductWeightGenerate> first_nested_product_tester(first_nested_product_generate); using SecondNestedProductWeight = ProductWeight<TropicalWeight, TropicalProductWeight>; using SecondNestedProductWeightGenerate = WeightGenerate<SecondNestedProductWeight>; SecondNestedProductWeightGenerate second_nested_product_generate; WeightTester<SecondNestedProductWeight, SecondNestedProductWeightGenerate> second_nested_product_tester(second_nested_product_generate); using NestedProductCubeWeight = PowerWeight<FirstNestedProductWeight, 3>; using NestedProductCubeWeightGenerate = WeightGenerate<NestedProductCubeWeight>; NestedProductCubeWeightGenerate nested_product_cube_generate; WeightTester<NestedProductCubeWeight, NestedProductCubeWeightGenerate> nested_product_cube_tester(nested_product_cube_generate); using SparseNestedProductCubeWeight = SparsePowerWeight<NestedProductCubeWeight, size_t>; using SparseNestedProductCubeWeightGenerate = WeightGenerate<SparseNestedProductCubeWeight>; SparseNestedProductCubeWeightGenerate sparse_nested_product_cube_generate; WeightTester<SparseNestedProductCubeWeight, SparseNestedProductCubeWeightGenerate> sparse_nested_product_cube_tester(sparse_nested_product_cube_generate); using LogSparsePowerWeight = SparsePowerWeight<LogWeight, size_t>; using LogSparsePowerWeightGenerate = WeightGenerate<LogSparsePowerWeight>; LogSparsePowerWeightGenerate log_sparse_power_generate; WeightTester<LogSparsePowerWeight, LogSparsePowerWeightGenerate> log_sparse_power_tester(log_sparse_power_generate); using LogLogExpectationWeight = ExpectationWeight<LogWeight, LogWeight>; using LogLogExpectationWeightGenerate = WeightGenerate<LogLogExpectationWeight>; LogLogExpectationWeightGenerate log_log_expectation_generate; WeightTester<LogLogExpectationWeight, LogLogExpectationWeightGenerate> log_log_expectation_tester(log_log_expectation_generate); using LogLogSparseExpectationWeight = ExpectationWeight<LogWeight, LogSparsePowerWeight>; using LogLogSparseExpectationWeightGenerate = WeightGenerate<LogLogSparseExpectationWeight>; LogLogSparseExpectationWeightGenerate log_log_sparse_expectation_generate; WeightTester<LogLogSparseExpectationWeight, LogLogSparseExpectationWeightGenerate> log_log_sparse_expectation_tester(log_log_sparse_expectation_generate); struct UnionWeightOptions { using Compare = NaturalLess<TropicalWeight>; struct Merge { TropicalWeight operator()(const TropicalWeight &w1, const TropicalWeight &w2) const { return w1; } }; using ReverseOptions = UnionWeightOptions; }; using TropicalUnionWeight = UnionWeight<TropicalWeight, UnionWeightOptions>; using TropicalUnionWeightGenerate = WeightGenerate<TropicalUnionWeight>; TropicalUnionWeightGenerate tropical_union_generate; WeightTester<TropicalUnionWeight, TropicalUnionWeightGenerate> tropical_union_tester(tropical_union_generate); // COMPOSITE WEIGHTS AND TESTERS - TESTING // Tests composite weight I/O with parentheses. FLAGS_fst_weight_parentheses = "()"; // Unnested composite. tropical_gallic_tester.Test(FLAGS_repeat); tropical_gen_gallic_tester.Test(FLAGS_repeat); tropical_product_tester.Test(FLAGS_repeat); tropical_lexicographic_tester.Test(FLAGS_repeat); tropical_cube_tester.Test(FLAGS_repeat); log_sparse_power_tester.Test(FLAGS_repeat); log_log_expectation_tester.Test(FLAGS_repeat, false); tropical_union_tester.Test(FLAGS_repeat, false); // Nested composite. first_nested_product_tester.Test(FLAGS_repeat); second_nested_product_tester.Test(5); nested_product_cube_tester.Test(FLAGS_repeat); sparse_nested_product_cube_tester.Test(FLAGS_repeat); log_log_sparse_expectation_tester.Test(FLAGS_repeat, false); // ... and tests composite weight I/O without parentheses. FLAGS_fst_weight_parentheses = ""; // Unnested composite. tropical_gallic_tester.Test(FLAGS_repeat); tropical_product_tester.Test(FLAGS_repeat); tropical_lexicographic_tester.Test(FLAGS_repeat); tropical_cube_tester.Test(FLAGS_repeat); log_sparse_power_tester.Test(FLAGS_repeat); log_log_expectation_tester.Test(FLAGS_repeat, false); tropical_union_tester.Test(FLAGS_repeat, false); // Nested composite. second_nested_product_tester.Test(FLAGS_repeat); log_log_sparse_expectation_tester.Test(FLAGS_repeat, false); TestPowerWeightGetSetValue(); TestSparsePowerWeightGetSetValue(); std::cout << "PASS" << std::endl; return 0; }
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/test-electronjs_v8.0-win-amd64-opt.yml
build: template_file: test-win-opt-base.tyml dependencies: - "win-amd64-cpu-opt" - "test-training_16k-linux-amd64-py36m-opt" test_model_task: "test-training_16k-linux-amd64-py36m-opt" system_setup: > ${system.sox_win} && ${nodejs.win.prep_12} args: tests_cmdline: "${system.homedir.win}/DeepSpeech/ds/taskcluster/tc-electron-tests.sh 12.x 8.0.1 16k" metadata: name: "DeepSpeech Windows AMD64 CPU ElectronJS v8.0 tests" description: "Testing DeepSpeech for Windows/AMD64 on ElectronJS v8.0, CPU only, optimized version"
0
coqui_public_repos/inference-engine/third_party/kenlm/util
coqui_public_repos/inference-engine/third_party/kenlm/util/double-conversion/bignum.cc
// Copyright 2010 the V8 project authors. All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "bignum.h" #include "utils.h" namespace kenlm_double_conversion { Bignum::Bignum() : bigits_(bigits_buffer_, kBigitCapacity), used_digits_(0), exponent_(0) { for (int i = 0; i < kBigitCapacity; ++i) { bigits_[i] = 0; } } template<typename S> static int BitSize(S value) { (void) value; // Mark variable as used. return 8 * sizeof(value); } // Guaranteed to lie in one Bigit. void Bignum::AssignUInt16(uint16_t value) { ASSERT(kBigitSize >= BitSize(value)); Zero(); if (value == 0) return; EnsureCapacity(1); bigits_[0] = value; used_digits_ = 1; } void Bignum::AssignUInt64(uint64_t value) { const int kUInt64Size = 64; Zero(); if (value == 0) return; int needed_bigits = kUInt64Size / kBigitSize + 1; EnsureCapacity(needed_bigits); for (int i = 0; i < needed_bigits; ++i) { bigits_[i] = value & kBigitMask; value = value >> kBigitSize; } used_digits_ = needed_bigits; Clamp(); } void Bignum::AssignBignum(const Bignum& other) { exponent_ = other.exponent_; for (int i = 0; i < other.used_digits_; ++i) { bigits_[i] = other.bigits_[i]; } // Clear the excess digits (if there were any). for (int i = other.used_digits_; i < used_digits_; ++i) { bigits_[i] = 0; } used_digits_ = other.used_digits_; } static uint64_t ReadUInt64(Vector<const char> buffer, int from, int digits_to_read) { uint64_t result = 0; for (int i = from; i < from + digits_to_read; ++i) { int digit = buffer[i] - '0'; ASSERT(0 <= digit && digit <= 9); result = result * 10 + digit; } return result; } void Bignum::AssignDecimalString(Vector<const char> value) { // 2^64 = 18446744073709551616 > 10^19 const int kMaxUint64DecimalDigits = 19; Zero(); int length = value.length(); unsigned int pos = 0; // Let's just say that each digit needs 4 bits. while (length >= kMaxUint64DecimalDigits) { uint64_t digits = ReadUInt64(value, pos, kMaxUint64DecimalDigits); pos += kMaxUint64DecimalDigits; length -= kMaxUint64DecimalDigits; MultiplyByPowerOfTen(kMaxUint64DecimalDigits); AddUInt64(digits); } uint64_t digits = ReadUInt64(value, pos, length); MultiplyByPowerOfTen(length); AddUInt64(digits); Clamp(); } static int HexCharValue(char c) { if ('0' <= c && c <= '9') return c - '0'; if ('a' <= c && c <= 'f') return 10 + c - 'a'; ASSERT('A' <= c && c <= 'F'); return 10 + c - 'A'; } void Bignum::AssignHexString(Vector<const char> value) { Zero(); int length = value.length(); int needed_bigits = length * 4 / kBigitSize + 1; EnsureCapacity(needed_bigits); int string_index = length - 1; for (int i = 0; i < needed_bigits - 1; ++i) { // These bigits are guaranteed to be "full". Chunk current_bigit = 0; for (int j = 0; j < kBigitSize / 4; j++) { current_bigit += HexCharValue(value[string_index--]) << (j * 4); } bigits_[i] = current_bigit; } used_digits_ = needed_bigits - 1; Chunk most_significant_bigit = 0; // Could be = 0; for (int j = 0; j <= string_index; ++j) { most_significant_bigit <<= 4; most_significant_bigit += HexCharValue(value[j]); } if (most_significant_bigit != 0) { bigits_[used_digits_] = most_significant_bigit; used_digits_++; } Clamp(); } void Bignum::AddUInt64(uint64_t operand) { if (operand == 0) return; Bignum other; other.AssignUInt64(operand); AddBignum(other); } void Bignum::AddBignum(const Bignum& other) { ASSERT(IsClamped()); ASSERT(other.IsClamped()); // If this has a greater exponent than other append zero-bigits to this. // After this call exponent_ <= other.exponent_. Align(other); // There are two possibilities: // aaaaaaaaaaa 0000 (where the 0s represent a's exponent) // bbbbb 00000000 // ---------------- // ccccccccccc 0000 // or // aaaaaaaaaa 0000 // bbbbbbbbb 0000000 // ----------------- // cccccccccccc 0000 // In both cases we might need a carry bigit. EnsureCapacity(1 + Max(BigitLength(), other.BigitLength()) - exponent_); Chunk carry = 0; int bigit_pos = other.exponent_ - exponent_; ASSERT(bigit_pos >= 0); for (int i = 0; i < other.used_digits_; ++i) { Chunk sum = bigits_[bigit_pos] + other.bigits_[i] + carry; bigits_[bigit_pos] = sum & kBigitMask; carry = sum >> kBigitSize; bigit_pos++; } while (carry != 0) { Chunk sum = bigits_[bigit_pos] + carry; bigits_[bigit_pos] = sum & kBigitMask; carry = sum >> kBigitSize; bigit_pos++; } used_digits_ = Max(bigit_pos, used_digits_); ASSERT(IsClamped()); } void Bignum::SubtractBignum(const Bignum& other) { ASSERT(IsClamped()); ASSERT(other.IsClamped()); // We require this to be bigger than other. ASSERT(LessEqual(other, *this)); Align(other); int offset = other.exponent_ - exponent_; Chunk borrow = 0; int i; for (i = 0; i < other.used_digits_; ++i) { ASSERT((borrow == 0) || (borrow == 1)); Chunk difference = bigits_[i + offset] - other.bigits_[i] - borrow; bigits_[i + offset] = difference & kBigitMask; borrow = difference >> (kChunkSize - 1); } while (borrow != 0) { Chunk difference = bigits_[i + offset] - borrow; bigits_[i + offset] = difference & kBigitMask; borrow = difference >> (kChunkSize - 1); ++i; } Clamp(); } void Bignum::ShiftLeft(int shift_amount) { if (used_digits_ == 0) return; exponent_ += shift_amount / kBigitSize; int local_shift = shift_amount % kBigitSize; EnsureCapacity(used_digits_ + 1); BigitsShiftLeft(local_shift); } void Bignum::MultiplyByUInt32(uint32_t factor) { if (factor == 1) return; if (factor == 0) { Zero(); return; } if (used_digits_ == 0) return; // The product of a bigit with the factor is of size kBigitSize + 32. // Assert that this number + 1 (for the carry) fits into double chunk. ASSERT(kDoubleChunkSize >= kBigitSize + 32 + 1); DoubleChunk carry = 0; for (int i = 0; i < used_digits_; ++i) { DoubleChunk product = static_cast<DoubleChunk>(factor) * bigits_[i] + carry; bigits_[i] = static_cast<Chunk>(product & kBigitMask); carry = (product >> kBigitSize); } while (carry != 0) { EnsureCapacity(used_digits_ + 1); bigits_[used_digits_] = carry & kBigitMask; used_digits_++; carry >>= kBigitSize; } } void Bignum::MultiplyByUInt64(uint64_t factor) { if (factor == 1) return; if (factor == 0) { Zero(); return; } ASSERT(kBigitSize < 32); uint64_t carry = 0; uint64_t low = factor & 0xFFFFFFFF; uint64_t high = factor >> 32; for (int i = 0; i < used_digits_; ++i) { uint64_t product_low = low * bigits_[i]; uint64_t product_high = high * bigits_[i]; uint64_t tmp = (carry & kBigitMask) + product_low; bigits_[i] = tmp & kBigitMask; carry = (carry >> kBigitSize) + (tmp >> kBigitSize) + (product_high << (32 - kBigitSize)); } while (carry != 0) { EnsureCapacity(used_digits_ + 1); bigits_[used_digits_] = carry & kBigitMask; used_digits_++; carry >>= kBigitSize; } } void Bignum::MultiplyByPowerOfTen(int exponent) { const uint64_t kFive27 = UINT64_2PART_C(0x6765c793, fa10079d); const uint16_t kFive1 = 5; const uint16_t kFive2 = kFive1 * 5; const uint16_t kFive3 = kFive2 * 5; const uint16_t kFive4 = kFive3 * 5; const uint16_t kFive5 = kFive4 * 5; const uint16_t kFive6 = kFive5 * 5; const uint32_t kFive7 = kFive6 * 5; const uint32_t kFive8 = kFive7 * 5; const uint32_t kFive9 = kFive8 * 5; const uint32_t kFive10 = kFive9 * 5; const uint32_t kFive11 = kFive10 * 5; const uint32_t kFive12 = kFive11 * 5; const uint32_t kFive13 = kFive12 * 5; const uint32_t kFive1_to_12[] = { kFive1, kFive2, kFive3, kFive4, kFive5, kFive6, kFive7, kFive8, kFive9, kFive10, kFive11, kFive12 }; ASSERT(exponent >= 0); if (exponent == 0) return; if (used_digits_ == 0) return; // We shift by exponent at the end just before returning. int remaining_exponent = exponent; while (remaining_exponent >= 27) { MultiplyByUInt64(kFive27); remaining_exponent -= 27; } while (remaining_exponent >= 13) { MultiplyByUInt32(kFive13); remaining_exponent -= 13; } if (remaining_exponent > 0) { MultiplyByUInt32(kFive1_to_12[remaining_exponent - 1]); } ShiftLeft(exponent); } void Bignum::Square() { ASSERT(IsClamped()); int product_length = 2 * used_digits_; EnsureCapacity(product_length); // Comba multiplication: compute each column separately. // Example: r = a2a1a0 * b2b1b0. // r = 1 * a0b0 + // 10 * (a1b0 + a0b1) + // 100 * (a2b0 + a1b1 + a0b2) + // 1000 * (a2b1 + a1b2) + // 10000 * a2b2 // // In the worst case we have to accumulate nb-digits products of digit*digit. // // Assert that the additional number of bits in a DoubleChunk are enough to // sum up used_digits of Bigit*Bigit. if ((1 << (2 * (kChunkSize - kBigitSize))) <= used_digits_) { UNIMPLEMENTED(); } DoubleChunk accumulator = 0; // First shift the digits so we don't overwrite them. int copy_offset = used_digits_; for (int i = 0; i < used_digits_; ++i) { bigits_[copy_offset + i] = bigits_[i]; } // We have two loops to avoid some 'if's in the loop. for (int i = 0; i < used_digits_; ++i) { // Process temporary digit i with power i. // The sum of the two indices must be equal to i. int bigit_index1 = i; int bigit_index2 = 0; // Sum all of the sub-products. while (bigit_index1 >= 0) { Chunk chunk1 = bigits_[copy_offset + bigit_index1]; Chunk chunk2 = bigits_[copy_offset + bigit_index2]; accumulator += static_cast<DoubleChunk>(chunk1) * chunk2; bigit_index1--; bigit_index2++; } bigits_[i] = static_cast<Chunk>(accumulator) & kBigitMask; accumulator >>= kBigitSize; } for (int i = used_digits_; i < product_length; ++i) { int bigit_index1 = used_digits_ - 1; int bigit_index2 = i - bigit_index1; // Invariant: sum of both indices is again equal to i. // Inner loop runs 0 times on last iteration, emptying accumulator. while (bigit_index2 < used_digits_) { Chunk chunk1 = bigits_[copy_offset + bigit_index1]; Chunk chunk2 = bigits_[copy_offset + bigit_index2]; accumulator += static_cast<DoubleChunk>(chunk1) * chunk2; bigit_index1--; bigit_index2++; } // The overwritten bigits_[i] will never be read in further loop iterations, // because bigit_index1 and bigit_index2 are always greater // than i - used_digits_. bigits_[i] = static_cast<Chunk>(accumulator) & kBigitMask; accumulator >>= kBigitSize; } // Since the result was guaranteed to lie inside the number the // accumulator must be 0 now. ASSERT(accumulator == 0); // Don't forget to update the used_digits and the exponent. used_digits_ = product_length; exponent_ *= 2; Clamp(); } void Bignum::AssignPowerUInt16(uint16_t base, int power_exponent) { ASSERT(base != 0); ASSERT(power_exponent >= 0); if (power_exponent == 0) { AssignUInt16(1); return; } Zero(); int shifts = 0; // We expect base to be in range 2-32, and most often to be 10. // It does not make much sense to implement different algorithms for counting // the bits. while ((base & 1) == 0) { base >>= 1; shifts++; } int bit_size = 0; int tmp_base = base; while (tmp_base != 0) { tmp_base >>= 1; bit_size++; } int final_size = bit_size * power_exponent; // 1 extra bigit for the shifting, and one for rounded final_size. EnsureCapacity(final_size / kBigitSize + 2); // Left to Right exponentiation. int mask = 1; while (power_exponent >= mask) mask <<= 1; // The mask is now pointing to the bit above the most significant 1-bit of // power_exponent. // Get rid of first 1-bit; mask >>= 2; uint64_t this_value = base; bool delayed_multipliciation = false; const uint64_t max_32bits = 0xFFFFFFFF; while (mask != 0 && this_value <= max_32bits) { this_value = this_value * this_value; // Verify that there is enough space in this_value to perform the // multiplication. The first bit_size bits must be 0. if ((power_exponent & mask) != 0) { uint64_t base_bits_mask = ~((static_cast<uint64_t>(1) << (64 - bit_size)) - 1); bool high_bits_zero = (this_value & base_bits_mask) == 0; if (high_bits_zero) { this_value *= base; } else { delayed_multipliciation = true; } } mask >>= 1; } AssignUInt64(this_value); if (delayed_multipliciation) { MultiplyByUInt32(base); } // Now do the same thing as a bignum. while (mask != 0) { Square(); if ((power_exponent & mask) != 0) { MultiplyByUInt32(base); } mask >>= 1; } // And finally add the saved shifts. ShiftLeft(shifts * power_exponent); } // Precondition: this/other < 16bit. uint16_t Bignum::DivideModuloIntBignum(const Bignum& other) { ASSERT(IsClamped()); ASSERT(other.IsClamped()); ASSERT(other.used_digits_ > 0); // Easy case: if we have less digits than the divisor than the result is 0. // Note: this handles the case where this == 0, too. if (BigitLength() < other.BigitLength()) { return 0; } Align(other); uint16_t result = 0; // Start by removing multiples of 'other' until both numbers have the same // number of digits. while (BigitLength() > other.BigitLength()) { // This naive approach is extremely inefficient if `this` divided by other // is big. This function is implemented for doubleToString where // the result should be small (less than 10). ASSERT(other.bigits_[other.used_digits_ - 1] >= ((1 << kBigitSize) / 16)); ASSERT(bigits_[used_digits_ - 1] < 0x10000); // Remove the multiples of the first digit. // Example this = 23 and other equals 9. -> Remove 2 multiples. result += static_cast<uint16_t>(bigits_[used_digits_ - 1]); SubtractTimes(other, bigits_[used_digits_ - 1]); } ASSERT(BigitLength() == other.BigitLength()); // Both bignums are at the same length now. // Since other has more than 0 digits we know that the access to // bigits_[used_digits_ - 1] is safe. Chunk this_bigit = bigits_[used_digits_ - 1]; Chunk other_bigit = other.bigits_[other.used_digits_ - 1]; if (other.used_digits_ == 1) { // Shortcut for easy (and common) case. int quotient = this_bigit / other_bigit; bigits_[used_digits_ - 1] = this_bigit - other_bigit * quotient; ASSERT(quotient < 0x10000); result += static_cast<uint16_t>(quotient); Clamp(); return result; } int division_estimate = this_bigit / (other_bigit + 1); ASSERT(division_estimate < 0x10000); result += static_cast<uint16_t>(division_estimate); SubtractTimes(other, division_estimate); if (other_bigit * (division_estimate + 1) > this_bigit) { // No need to even try to subtract. Even if other's remaining digits were 0 // another subtraction would be too much. return result; } while (LessEqual(other, *this)) { SubtractBignum(other); result++; } return result; } template<typename S> static int SizeInHexChars(S number) { ASSERT(number > 0); int result = 0; while (number != 0) { number >>= 4; result++; } return result; } static char HexCharOfValue(int value) { ASSERT(0 <= value && value <= 16); if (value < 10) return static_cast<char>(value + '0'); return static_cast<char>(value - 10 + 'A'); } bool Bignum::ToHexString(char* buffer, int buffer_size) const { ASSERT(IsClamped()); // Each bigit must be printable as separate hex-character. ASSERT(kBigitSize % 4 == 0); const int kHexCharsPerBigit = kBigitSize / 4; if (used_digits_ == 0) { if (buffer_size < 2) return false; buffer[0] = '0'; buffer[1] = '\0'; return true; } // We add 1 for the terminating '\0' character. int needed_chars = (BigitLength() - 1) * kHexCharsPerBigit + SizeInHexChars(bigits_[used_digits_ - 1]) + 1; if (needed_chars > buffer_size) return false; int string_index = needed_chars - 1; buffer[string_index--] = '\0'; for (int i = 0; i < exponent_; ++i) { for (int j = 0; j < kHexCharsPerBigit; ++j) { buffer[string_index--] = '0'; } } for (int i = 0; i < used_digits_ - 1; ++i) { Chunk current_bigit = bigits_[i]; for (int j = 0; j < kHexCharsPerBigit; ++j) { buffer[string_index--] = HexCharOfValue(current_bigit & 0xF); current_bigit >>= 4; } } // And finally the last bigit. Chunk most_significant_bigit = bigits_[used_digits_ - 1]; while (most_significant_bigit != 0) { buffer[string_index--] = HexCharOfValue(most_significant_bigit & 0xF); most_significant_bigit >>= 4; } return true; } Bignum::Chunk Bignum::BigitAt(int index) const { if (index >= BigitLength()) return 0; if (index < exponent_) return 0; return bigits_[index - exponent_]; } int Bignum::Compare(const Bignum& a, const Bignum& b) { ASSERT(a.IsClamped()); ASSERT(b.IsClamped()); int bigit_length_a = a.BigitLength(); int bigit_length_b = b.BigitLength(); if (bigit_length_a < bigit_length_b) return -1; if (bigit_length_a > bigit_length_b) return +1; for (int i = bigit_length_a - 1; i >= Min(a.exponent_, b.exponent_); --i) { Chunk bigit_a = a.BigitAt(i); Chunk bigit_b = b.BigitAt(i); if (bigit_a < bigit_b) return -1; if (bigit_a > bigit_b) return +1; // Otherwise they are equal up to this digit. Try the next digit. } return 0; } int Bignum::PlusCompare(const Bignum& a, const Bignum& b, const Bignum& c) { ASSERT(a.IsClamped()); ASSERT(b.IsClamped()); ASSERT(c.IsClamped()); if (a.BigitLength() < b.BigitLength()) { return PlusCompare(b, a, c); } if (a.BigitLength() + 1 < c.BigitLength()) return -1; if (a.BigitLength() > c.BigitLength()) return +1; // The exponent encodes 0-bigits. So if there are more 0-digits in 'a' than // 'b' has digits, then the bigit-length of 'a'+'b' must be equal to the one // of 'a'. if (a.exponent_ >= b.BigitLength() && a.BigitLength() < c.BigitLength()) { return -1; } Chunk borrow = 0; // Starting at min_exponent all digits are == 0. So no need to compare them. int min_exponent = Min(Min(a.exponent_, b.exponent_), c.exponent_); for (int i = c.BigitLength() - 1; i >= min_exponent; --i) { Chunk chunk_a = a.BigitAt(i); Chunk chunk_b = b.BigitAt(i); Chunk chunk_c = c.BigitAt(i); Chunk sum = chunk_a + chunk_b; if (sum > chunk_c + borrow) { return +1; } else { borrow = chunk_c + borrow - sum; if (borrow > 1) return -1; borrow <<= kBigitSize; } } if (borrow == 0) return 0; return -1; } void Bignum::Clamp() { while (used_digits_ > 0 && bigits_[used_digits_ - 1] == 0) { used_digits_--; } if (used_digits_ == 0) { // Zero. exponent_ = 0; } } bool Bignum::IsClamped() const { return used_digits_ == 0 || bigits_[used_digits_ - 1] != 0; } void Bignum::Zero() { for (int i = 0; i < used_digits_; ++i) { bigits_[i] = 0; } used_digits_ = 0; exponent_ = 0; } void Bignum::Align(const Bignum& other) { if (exponent_ > other.exponent_) { // If "X" represents a "hidden" digit (by the exponent) then we are in the // following case (a == this, b == other): // a: aaaaaaXXXX or a: aaaaaXXX // b: bbbbbbX b: bbbbbbbbXX // We replace some of the hidden digits (X) of a with 0 digits. // a: aaaaaa000X or a: aaaaa0XX int zero_digits = exponent_ - other.exponent_; EnsureCapacity(used_digits_ + zero_digits); for (int i = used_digits_ - 1; i >= 0; --i) { bigits_[i + zero_digits] = bigits_[i]; } for (int i = 0; i < zero_digits; ++i) { bigits_[i] = 0; } used_digits_ += zero_digits; exponent_ -= zero_digits; ASSERT(used_digits_ >= 0); ASSERT(exponent_ >= 0); } } void Bignum::BigitsShiftLeft(int shift_amount) { ASSERT(shift_amount < kBigitSize); ASSERT(shift_amount >= 0); Chunk carry = 0; for (int i = 0; i < used_digits_; ++i) { Chunk new_carry = bigits_[i] >> (kBigitSize - shift_amount); bigits_[i] = ((bigits_[i] << shift_amount) + carry) & kBigitMask; carry = new_carry; } if (carry != 0) { bigits_[used_digits_] = carry; used_digits_++; } } void Bignum::SubtractTimes(const Bignum& other, int factor) { ASSERT(exponent_ <= other.exponent_); if (factor < 3) { for (int i = 0; i < factor; ++i) { SubtractBignum(other); } return; } Chunk borrow = 0; int exponent_diff = other.exponent_ - exponent_; for (int i = 0; i < other.used_digits_; ++i) { DoubleChunk product = static_cast<DoubleChunk>(factor) * other.bigits_[i]; DoubleChunk remove = borrow + product; Chunk difference = bigits_[i + exponent_diff] - (remove & kBigitMask); bigits_[i + exponent_diff] = difference & kBigitMask; borrow = static_cast<Chunk>((difference >> (kChunkSize - 1)) + (remove >> kBigitSize)); } for (int i = other.used_digits_ + exponent_diff; i < used_digits_; ++i) { if (borrow == 0) return; Chunk difference = bigits_[i] - borrow; bigits_[i] = difference & kBigitMask; borrow = difference >> (kChunkSize - 1); } Clamp(); } } // namespace kenlm_double_conversion
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/bin/fstreverse.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/flags.h> DEFINE_bool(require_superinitial, true, "Always create a superinitial state"); int fstreverse_main(int argc, char **argv); int main(int argc, char **argv) { return fstreverse_main(argc, argv); }
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/tf_android-armv7-dbg.yml
build: template_file: generic_tc_caching-linux-opt-base.tyml cache: artifact_url: ${system.tensorflow_dbg.android_armv7.url} artifact_namespace: ${system.tensorflow_dbg.android_armv7.namespace} system_config: > ${tensorflow.packages_xenial.apt} && ${java.packages_xenial.apt} scripts: setup: "taskcluster/tf_tc-setup.sh --android-armv7" build: "taskcluster/tf_tc-build.sh --android-armv7 dbg" package: "taskcluster/tf_tc-package.sh" maxRunTime: 14400 workerType: "${docker.tfBuild}" metadata: name: "TensorFlow Android ARMv7 debug" description: "Building TensorFlow for Android ARMv7, debug version"
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions/compact/Makefile.in
# Makefile.in generated by automake 1.14.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/extensions/compact DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/depcomp ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/ac_python_devel.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/include/fst/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(libfstdir)" LTLIBRARIES = $(lib_LTLIBRARIES) $(libfst_LTLIBRARIES) compact16_acceptor_fst_la_LIBADD = am_compact16_acceptor_fst_la_OBJECTS = compact16_acceptor-fst.lo compact16_acceptor_fst_la_OBJECTS = \ $(am_compact16_acceptor_fst_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = compact16_acceptor_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact16_acceptor_fst_la_LDFLAGS) $(LDFLAGS) -o $@ compact16_string_fst_la_LIBADD = am_compact16_string_fst_la_OBJECTS = compact16_string-fst.lo compact16_string_fst_la_OBJECTS = \ $(am_compact16_string_fst_la_OBJECTS) compact16_string_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) $(compact16_string_fst_la_LDFLAGS) \ $(LDFLAGS) -o $@ compact16_unweighted_fst_la_LIBADD = am_compact16_unweighted_fst_la_OBJECTS = compact16_unweighted-fst.lo compact16_unweighted_fst_la_OBJECTS = \ $(am_compact16_unweighted_fst_la_OBJECTS) compact16_unweighted_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact16_unweighted_fst_la_LDFLAGS) $(LDFLAGS) -o $@ compact16_unweighted_acceptor_fst_la_LIBADD = am_compact16_unweighted_acceptor_fst_la_OBJECTS = \ compact16_unweighted_acceptor-fst.lo compact16_unweighted_acceptor_fst_la_OBJECTS = \ $(am_compact16_unweighted_acceptor_fst_la_OBJECTS) compact16_unweighted_acceptor_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) \ --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link \ $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact16_unweighted_acceptor_fst_la_LDFLAGS) $(LDFLAGS) -o \ $@ compact16_weighted_string_fst_la_LIBADD = am_compact16_weighted_string_fst_la_OBJECTS = \ compact16_weighted_string-fst.lo compact16_weighted_string_fst_la_OBJECTS = \ $(am_compact16_weighted_string_fst_la_OBJECTS) compact16_weighted_string_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) \ --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link \ $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact16_weighted_string_fst_la_LDFLAGS) $(LDFLAGS) -o $@ compact64_acceptor_fst_la_LIBADD = am_compact64_acceptor_fst_la_OBJECTS = compact64_acceptor-fst.lo compact64_acceptor_fst_la_OBJECTS = \ $(am_compact64_acceptor_fst_la_OBJECTS) compact64_acceptor_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact64_acceptor_fst_la_LDFLAGS) $(LDFLAGS) -o $@ compact64_string_fst_la_LIBADD = am_compact64_string_fst_la_OBJECTS = compact64_string-fst.lo compact64_string_fst_la_OBJECTS = \ $(am_compact64_string_fst_la_OBJECTS) compact64_string_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) $(compact64_string_fst_la_LDFLAGS) \ $(LDFLAGS) -o $@ compact64_unweighted_fst_la_LIBADD = am_compact64_unweighted_fst_la_OBJECTS = compact64_unweighted-fst.lo compact64_unweighted_fst_la_OBJECTS = \ $(am_compact64_unweighted_fst_la_OBJECTS) compact64_unweighted_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact64_unweighted_fst_la_LDFLAGS) $(LDFLAGS) -o $@ compact64_unweighted_acceptor_fst_la_LIBADD = am_compact64_unweighted_acceptor_fst_la_OBJECTS = \ compact64_unweighted_acceptor-fst.lo compact64_unweighted_acceptor_fst_la_OBJECTS = \ $(am_compact64_unweighted_acceptor_fst_la_OBJECTS) compact64_unweighted_acceptor_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) \ --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link \ $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact64_unweighted_acceptor_fst_la_LDFLAGS) $(LDFLAGS) -o \ $@ compact64_weighted_string_fst_la_LIBADD = am_compact64_weighted_string_fst_la_OBJECTS = \ compact64_weighted_string-fst.lo compact64_weighted_string_fst_la_OBJECTS = \ $(am_compact64_weighted_string_fst_la_OBJECTS) compact64_weighted_string_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) \ --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link \ $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact64_weighted_string_fst_la_LDFLAGS) $(LDFLAGS) -o $@ compact8_acceptor_fst_la_LIBADD = am_compact8_acceptor_fst_la_OBJECTS = compact8_acceptor-fst.lo compact8_acceptor_fst_la_OBJECTS = \ $(am_compact8_acceptor_fst_la_OBJECTS) compact8_acceptor_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) $(compact8_acceptor_fst_la_LDFLAGS) \ $(LDFLAGS) -o $@ compact8_string_fst_la_LIBADD = am_compact8_string_fst_la_OBJECTS = compact8_string-fst.lo compact8_string_fst_la_OBJECTS = $(am_compact8_string_fst_la_OBJECTS) compact8_string_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) $(compact8_string_fst_la_LDFLAGS) \ $(LDFLAGS) -o $@ compact8_unweighted_fst_la_LIBADD = am_compact8_unweighted_fst_la_OBJECTS = compact8_unweighted-fst.lo compact8_unweighted_fst_la_OBJECTS = \ $(am_compact8_unweighted_fst_la_OBJECTS) compact8_unweighted_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact8_unweighted_fst_la_LDFLAGS) $(LDFLAGS) -o $@ compact8_unweighted_acceptor_fst_la_LIBADD = am_compact8_unweighted_acceptor_fst_la_OBJECTS = \ compact8_unweighted_acceptor-fst.lo compact8_unweighted_acceptor_fst_la_OBJECTS = \ $(am_compact8_unweighted_acceptor_fst_la_OBJECTS) compact8_unweighted_acceptor_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) \ --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link \ $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact8_unweighted_acceptor_fst_la_LDFLAGS) $(LDFLAGS) -o \ $@ compact8_weighted_string_fst_la_LIBADD = am_compact8_weighted_string_fst_la_OBJECTS = \ compact8_weighted_string-fst.lo compact8_weighted_string_fst_la_OBJECTS = \ $(am_compact8_weighted_string_fst_la_OBJECTS) compact8_weighted_string_fst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) \ $(compact8_weighted_string_fst_la_LDFLAGS) $(LDFLAGS) -o $@ am__DEPENDENCIES_1 = libfstcompact_la_DEPENDENCIES = ../../lib/libfst.la \ $(am__DEPENDENCIES_1) am_libfstcompact_la_OBJECTS = compact8_acceptor-fst.lo \ compact8_string-fst.lo compact8_unweighted-fst.lo \ compact8_unweighted_acceptor-fst.lo \ compact8_weighted_string-fst.lo compact16_acceptor-fst.lo \ compact16_string-fst.lo compact16_unweighted-fst.lo \ compact16_unweighted_acceptor-fst.lo \ compact16_weighted_string-fst.lo compact64_acceptor-fst.lo \ compact64_string-fst.lo compact64_unweighted-fst.lo \ compact64_unweighted_acceptor-fst.lo \ compact64_weighted_string-fst.lo libfstcompact_la_OBJECTS = $(am_libfstcompact_la_OBJECTS) libfstcompact_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \ $(AM_CXXFLAGS) $(CXXFLAGS) $(libfstcompact_la_LDFLAGS) \ $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = SOURCES = $(compact16_acceptor_fst_la_SOURCES) \ $(compact16_string_fst_la_SOURCES) \ $(compact16_unweighted_fst_la_SOURCES) \ $(compact16_unweighted_acceptor_fst_la_SOURCES) \ $(compact16_weighted_string_fst_la_SOURCES) \ $(compact64_acceptor_fst_la_SOURCES) \ $(compact64_string_fst_la_SOURCES) \ $(compact64_unweighted_fst_la_SOURCES) \ $(compact64_unweighted_acceptor_fst_la_SOURCES) \ $(compact64_weighted_string_fst_la_SOURCES) \ $(compact8_acceptor_fst_la_SOURCES) \ $(compact8_string_fst_la_SOURCES) \ $(compact8_unweighted_fst_la_SOURCES) \ $(compact8_unweighted_acceptor_fst_la_SOURCES) \ $(compact8_weighted_string_fst_la_SOURCES) \ $(libfstcompact_la_SOURCES) DIST_SOURCES = $(compact16_acceptor_fst_la_SOURCES) \ $(compact16_string_fst_la_SOURCES) \ $(compact16_unweighted_fst_la_SOURCES) \ $(compact16_unweighted_acceptor_fst_la_SOURCES) \ $(compact16_weighted_string_fst_la_SOURCES) \ $(compact64_acceptor_fst_la_SOURCES) \ $(compact64_string_fst_la_SOURCES) \ $(compact64_unweighted_fst_la_SOURCES) \ $(compact64_unweighted_acceptor_fst_la_SOURCES) \ $(compact64_weighted_string_fst_la_SOURCES) \ $(compact8_acceptor_fst_la_SOURCES) \ $(compact8_string_fst_la_SOURCES) \ $(compact8_unweighted_fst_la_SOURCES) \ $(compact8_unweighted_acceptor_fst_la_SOURCES) \ $(compact8_weighted_string_fst_la_SOURCES) \ $(libfstcompact_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DL_LIBS = @DL_LIBS@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PYTHON = @PYTHON@ PYTHON_CPPFLAGS = @PYTHON_CPPFLAGS@ PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@ PYTHON_EXTRA_LDFLAGS = @PYTHON_EXTRA_LDFLAGS@ PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@ PYTHON_LDFLAGS = @PYTHON_LDFLAGS@ PYTHON_PLATFORM = @PYTHON_PLATFORM@ PYTHON_PREFIX = @PYTHON_PREFIX@ PYTHON_SITE_PKG = @PYTHON_SITE_PKG@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libfstdir = @libfstdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgpyexecdir = @pkgpyexecdir@ pkgpythondir = @pkgpythondir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ pyexecdir = @pyexecdir@ pythondir = @pythondir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ AM_CPPFLAGS = -I$(srcdir)/../../include $(ICU_CPPFLAGS) libfst_LTLIBRARIES = compact8_acceptor-fst.la compact8_string-fst.la compact8_unweighted-fst.la compact8_unweighted_acceptor-fst.la compact8_weighted_string-fst.la compact16_acceptor-fst.la compact16_string-fst.la compact16_unweighted-fst.la compact16_unweighted_acceptor-fst.la compact16_weighted_string-fst.la compact64_acceptor-fst.la compact64_string-fst.la compact64_unweighted-fst.la compact64_unweighted_acceptor-fst.la compact64_weighted_string-fst.la lib_LTLIBRARIES = libfstcompact.la libfstcompact_la_SOURCES = compact8_acceptor-fst.cc compact8_string-fst.cc compact8_unweighted-fst.cc compact8_unweighted_acceptor-fst.cc compact8_weighted_string-fst.cc compact16_acceptor-fst.cc compact16_string-fst.cc compact16_unweighted-fst.cc compact16_unweighted_acceptor-fst.cc compact16_weighted_string-fst.cc compact64_acceptor-fst.cc compact64_string-fst.cc compact64_unweighted-fst.cc compact64_unweighted_acceptor-fst.cc compact64_weighted_string-fst.cc libfstcompact_la_LDFLAGS = -version-info 10:0:0 libfstcompact_la_LIBADD = ../../lib/libfst.la -lm $(DL_LIBS) compact8_acceptor_fst_la_SOURCES = compact8_acceptor-fst.cc compact8_acceptor_fst_la_LDFLAGS = -module compact8_string_fst_la_SOURCES = compact8_string-fst.cc compact8_string_fst_la_LDFLAGS = -module compact8_unweighted_fst_la_SOURCES = compact8_unweighted-fst.cc compact8_unweighted_fst_la_LDFLAGS = -module compact8_unweighted_acceptor_fst_la_SOURCES = compact8_unweighted_acceptor-fst.cc compact8_unweighted_acceptor_fst_la_LDFLAGS = -module compact8_weighted_string_fst_la_SOURCES = compact8_weighted_string-fst.cc compact8_weighted_string_fst_la_LDFLAGS = -module compact16_acceptor_fst_la_SOURCES = compact16_acceptor-fst.cc compact16_acceptor_fst_la_LDFLAGS = -module compact16_string_fst_la_SOURCES = compact16_string-fst.cc compact16_string_fst_la_LDFLAGS = -module compact16_unweighted_fst_la_SOURCES = compact16_unweighted-fst.cc compact16_unweighted_fst_la_LDFLAGS = -module compact16_unweighted_acceptor_fst_la_SOURCES = compact16_unweighted_acceptor-fst.cc compact16_unweighted_acceptor_fst_la_LDFLAGS = -module compact16_weighted_string_fst_la_SOURCES = compact16_weighted_string-fst.cc compact16_weighted_string_fst_la_LDFLAGS = -module compact64_acceptor_fst_la_SOURCES = compact64_acceptor-fst.cc compact64_acceptor_fst_la_LDFLAGS = -module compact64_string_fst_la_SOURCES = compact64_string-fst.cc compact64_string_fst_la_LDFLAGS = -module compact64_unweighted_fst_la_SOURCES = compact64_unweighted-fst.cc compact64_unweighted_fst_la_LDFLAGS = -module compact64_unweighted_acceptor_fst_la_SOURCES = compact64_unweighted_acceptor-fst.cc compact64_unweighted_acceptor_fst_la_LDFLAGS = -module compact64_weighted_string_fst_la_SOURCES = compact64_weighted_string-fst.cc compact64_weighted_string_fst_la_LDFLAGS = -module all: all-am .SUFFIXES: .SUFFIXES: .cc .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/extensions/compact/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/extensions/compact/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-libLTLIBRARIES: $(lib_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \ } uninstall-libLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \ done clean-libLTLIBRARIES: -test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES) @list='$(lib_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } install-libfstLTLIBRARIES: $(libfst_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(libfst_LTLIBRARIES)'; test -n "$(libfstdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(libfstdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(libfstdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libfstdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libfstdir)"; \ } uninstall-libfstLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(libfst_LTLIBRARIES)'; test -n "$(libfstdir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libfstdir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libfstdir)/$$f"; \ done clean-libfstLTLIBRARIES: -test -z "$(libfst_LTLIBRARIES)" || rm -f $(libfst_LTLIBRARIES) @list='$(libfst_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } compact16_acceptor-fst.la: $(compact16_acceptor_fst_la_OBJECTS) $(compact16_acceptor_fst_la_DEPENDENCIES) $(EXTRA_compact16_acceptor_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact16_acceptor_fst_la_LINK) -rpath $(libfstdir) $(compact16_acceptor_fst_la_OBJECTS) $(compact16_acceptor_fst_la_LIBADD) $(LIBS) compact16_string-fst.la: $(compact16_string_fst_la_OBJECTS) $(compact16_string_fst_la_DEPENDENCIES) $(EXTRA_compact16_string_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact16_string_fst_la_LINK) -rpath $(libfstdir) $(compact16_string_fst_la_OBJECTS) $(compact16_string_fst_la_LIBADD) $(LIBS) compact16_unweighted-fst.la: $(compact16_unweighted_fst_la_OBJECTS) $(compact16_unweighted_fst_la_DEPENDENCIES) $(EXTRA_compact16_unweighted_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact16_unweighted_fst_la_LINK) -rpath $(libfstdir) $(compact16_unweighted_fst_la_OBJECTS) $(compact16_unweighted_fst_la_LIBADD) $(LIBS) compact16_unweighted_acceptor-fst.la: $(compact16_unweighted_acceptor_fst_la_OBJECTS) $(compact16_unweighted_acceptor_fst_la_DEPENDENCIES) $(EXTRA_compact16_unweighted_acceptor_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact16_unweighted_acceptor_fst_la_LINK) -rpath $(libfstdir) $(compact16_unweighted_acceptor_fst_la_OBJECTS) $(compact16_unweighted_acceptor_fst_la_LIBADD) $(LIBS) compact16_weighted_string-fst.la: $(compact16_weighted_string_fst_la_OBJECTS) $(compact16_weighted_string_fst_la_DEPENDENCIES) $(EXTRA_compact16_weighted_string_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact16_weighted_string_fst_la_LINK) -rpath $(libfstdir) $(compact16_weighted_string_fst_la_OBJECTS) $(compact16_weighted_string_fst_la_LIBADD) $(LIBS) compact64_acceptor-fst.la: $(compact64_acceptor_fst_la_OBJECTS) $(compact64_acceptor_fst_la_DEPENDENCIES) $(EXTRA_compact64_acceptor_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact64_acceptor_fst_la_LINK) -rpath $(libfstdir) $(compact64_acceptor_fst_la_OBJECTS) $(compact64_acceptor_fst_la_LIBADD) $(LIBS) compact64_string-fst.la: $(compact64_string_fst_la_OBJECTS) $(compact64_string_fst_la_DEPENDENCIES) $(EXTRA_compact64_string_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact64_string_fst_la_LINK) -rpath $(libfstdir) $(compact64_string_fst_la_OBJECTS) $(compact64_string_fst_la_LIBADD) $(LIBS) compact64_unweighted-fst.la: $(compact64_unweighted_fst_la_OBJECTS) $(compact64_unweighted_fst_la_DEPENDENCIES) $(EXTRA_compact64_unweighted_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact64_unweighted_fst_la_LINK) -rpath $(libfstdir) $(compact64_unweighted_fst_la_OBJECTS) $(compact64_unweighted_fst_la_LIBADD) $(LIBS) compact64_unweighted_acceptor-fst.la: $(compact64_unweighted_acceptor_fst_la_OBJECTS) $(compact64_unweighted_acceptor_fst_la_DEPENDENCIES) $(EXTRA_compact64_unweighted_acceptor_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact64_unweighted_acceptor_fst_la_LINK) -rpath $(libfstdir) $(compact64_unweighted_acceptor_fst_la_OBJECTS) $(compact64_unweighted_acceptor_fst_la_LIBADD) $(LIBS) compact64_weighted_string-fst.la: $(compact64_weighted_string_fst_la_OBJECTS) $(compact64_weighted_string_fst_la_DEPENDENCIES) $(EXTRA_compact64_weighted_string_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact64_weighted_string_fst_la_LINK) -rpath $(libfstdir) $(compact64_weighted_string_fst_la_OBJECTS) $(compact64_weighted_string_fst_la_LIBADD) $(LIBS) compact8_acceptor-fst.la: $(compact8_acceptor_fst_la_OBJECTS) $(compact8_acceptor_fst_la_DEPENDENCIES) $(EXTRA_compact8_acceptor_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact8_acceptor_fst_la_LINK) -rpath $(libfstdir) $(compact8_acceptor_fst_la_OBJECTS) $(compact8_acceptor_fst_la_LIBADD) $(LIBS) compact8_string-fst.la: $(compact8_string_fst_la_OBJECTS) $(compact8_string_fst_la_DEPENDENCIES) $(EXTRA_compact8_string_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact8_string_fst_la_LINK) -rpath $(libfstdir) $(compact8_string_fst_la_OBJECTS) $(compact8_string_fst_la_LIBADD) $(LIBS) compact8_unweighted-fst.la: $(compact8_unweighted_fst_la_OBJECTS) $(compact8_unweighted_fst_la_DEPENDENCIES) $(EXTRA_compact8_unweighted_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact8_unweighted_fst_la_LINK) -rpath $(libfstdir) $(compact8_unweighted_fst_la_OBJECTS) $(compact8_unweighted_fst_la_LIBADD) $(LIBS) compact8_unweighted_acceptor-fst.la: $(compact8_unweighted_acceptor_fst_la_OBJECTS) $(compact8_unweighted_acceptor_fst_la_DEPENDENCIES) $(EXTRA_compact8_unweighted_acceptor_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact8_unweighted_acceptor_fst_la_LINK) -rpath $(libfstdir) $(compact8_unweighted_acceptor_fst_la_OBJECTS) $(compact8_unweighted_acceptor_fst_la_LIBADD) $(LIBS) compact8_weighted_string-fst.la: $(compact8_weighted_string_fst_la_OBJECTS) $(compact8_weighted_string_fst_la_DEPENDENCIES) $(EXTRA_compact8_weighted_string_fst_la_DEPENDENCIES) $(AM_V_CXXLD)$(compact8_weighted_string_fst_la_LINK) -rpath $(libfstdir) $(compact8_weighted_string_fst_la_OBJECTS) $(compact8_weighted_string_fst_la_LIBADD) $(LIBS) libfstcompact.la: $(libfstcompact_la_OBJECTS) $(libfstcompact_la_DEPENDENCIES) $(EXTRA_libfstcompact_la_DEPENDENCIES) $(AM_V_CXXLD)$(libfstcompact_la_LINK) -rpath $(libdir) $(libfstcompact_la_OBJECTS) $(libfstcompact_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact16_acceptor-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact16_string-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact16_unweighted-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact16_unweighted_acceptor-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact16_weighted_string-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact64_acceptor-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact64_string-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact64_unweighted-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact64_unweighted_acceptor-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact64_weighted_string-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact8_acceptor-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact8_string-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact8_unweighted-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact8_unweighted_acceptor-fst.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/compact8_weighted_string-fst.Plo@am__quote@ .cc.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\ @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cc.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\ @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ `$(CYGPATH_W) '$<'` &&\ @am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cc.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\ @am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(libfstdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libLTLIBRARIES clean-libfstLTLIBRARIES \ clean-libtool mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-libfstLTLIBRARIES install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-libLTLIBRARIES install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-libLTLIBRARIES uninstall-libfstLTLIBRARIES .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libLTLIBRARIES clean-libfstLTLIBRARIES clean-libtool \ cscopelist-am ctags ctags-am distclean distclean-compile \ distclean-generic distclean-libtool distclean-tags distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-libLTLIBRARIES \ install-libfstLTLIBRARIES install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am uninstall-libLTLIBRARIES \ uninstall-libfstLTLIBRARIES # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT:
0
coqui_public_repos
coqui_public_repos/stt-model-manager/CODE_OF_CONDUCT.md
# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, caste, color, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement by emailing [coc-report@coqui.ai](mailto:coc-report@coqui.ai). All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0]. Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. For answers to common questions about this code of conduct, see the FAQ at [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at [https://www.contributor-covenant.org/translations][translations]. [homepage]: https://www.contributor-covenant.org [v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html [Mozilla CoC]: https://github.com/mozilla/diversity [FAQ]: https://www.contributor-covenant.org/faq [translations]: https://www.contributor-covenant.org/translations
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/test-electronjs_v6.0_16k-linux-amd64-opt.yml
build: template_file: test-linux-opt-base.tyml docker_image: "ubuntu:16.04" dependencies: - "linux-amd64-cpu-opt" - "test-training_16k-linux-amd64-py36m-opt" test_model_task: "test-training_16k-linux-amd64-py36m-opt" system_setup: > ${nodejs.packages_xenial.prep_12} && ${nodejs.packages_xenial.apt_pinning} && apt-get -qq update && apt-get -qq -y install ${nodejs.packages_xenial.apt} ${electronjs.packages_xenial.apt} args: tests_cmdline: "${system.homedir.linux}/DeepSpeech/ds/taskcluster/tc-electron-tests.sh 12.x 6.0.12 16k" workerType: "${docker.dsTests}" metadata: name: "DeepSpeech Linux AMD64 CPU ElectronJS v6.0 tests (16kHz)" description: "Testing DeepSpeech for Linux/AMD64 on ElectronJS v6.0, CPU only, optimized version (16kHz)"
0
coqui_public_repos
coqui_public_repos/open-speech-corpora/README.md
# 💎 Open Speech Corpora A list of open speech corpora for Speech Technology research and development. This list has a preference for free (i.e. no $ cost) and truly open corpora (e.g. released under a [Creative Commons license](https://en.wikipedia.org/wiki/Creative_Commons_license) or a [Community Data License Agreement](https://en.wikipedia.org/wiki/Linux_Foundation#Community_Data_License_Agreement_%28CDLA%29)). Not all these corpora may meet those criteria, but all the following corpora are accessible and usable for research and/or commercial use. Feel free to propse additions to the list! *There's a long backlog of corpora to be added in the [Issues](https://github.com/coqui-ai/open-speech-corpora/issues), and Pull Requests are very welcome :)* ## 📜 [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | Common Voice | Multilingual | >15,000 hours (validated); >20,000 hours (total) | Multi-speaker | <https://voice.mozilla.org/en/datasets> | [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/) | | Yesno | Hebrew | 6 mins | one male | <http://www.openslr.org/1/> | [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/) | | LJ Speech Corpus | English | ~24 hours | [one female](https://librivox.org/reader/11049) | <https://data.keithito.com/data/speech/LJSpeech-1.1.tar.bz2> | [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/) | | NST Danish ASR Database | Danish | 229,992 utterances | 616 speakers | original: <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-19/>, reorganized: <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-55/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Danish Dictation | Danish | 34,955 utterances | 151 speakers | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-20/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Danish Speech Synthesis | Danish | 4,108 utterances | 1 male speaker | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-21/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Swedish ASR Database | Swedish | 366,000 utterances | 1,000 speakers | original: <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-16/>, reorganized: <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-56/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Swedish Dictation | Swedish | 45,620 utterances | 195 speakers | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-17/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Swedish Speech Synthesis | Swedish | 5,279 utterances | 1 male speaker | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-18/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Norwegian ASR Database | Norwegian | 359,760 utterances | 980 speakers | original: <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-13/>, reorganized: <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-54/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Norwegian Dictation | Norwegian | 33,360 utterances | 144 speakers | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-14/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NST Norwegian Speech Synthesis | Norwegian | 5,363 utterances | 1 male speaker | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-15/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | NB Tale – Speech Database for Norwegian | Norwegian | 7,600 utterances + ~12 hours | 380 speakers | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-31/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | Norwegian Parliamentary Speech Corpus (v0.1) | Norwegian | ~59 hours | 203 speakers | <https://www.nb.no/sprakbanken/en/resource-catalogue/oai-nb-no-sbr-58/> | [CC-0](https://creativecommons.org/publicdomain/zero/1.0/) | | Wikimedia Commons Odia | Odia | ~8 hours | ~20 speakers | <https://commons.wikimedia.org/wiki/Category:Odia_pronunciation> | mostly(?) [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/) | | Thorsten-21.02-neutral | German | ~24 hours | 1 male speaker | <https://www.Thorsten-Voice.de> | [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/) | | Thorsten-21.06-emotional | German | 2.400 utterances (8 emotions) | 1 male speaker | <https://www.Thorsten-Voice.de> | [CC-0](https://creativecommons.org/share-your-work/public-domain/cc0/) | ## 📜 [CC-BY](https://creativecommons.org/licenses/by/4.0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | ARU Speech Corpus | English (UK) | 720 utterances / speaker | 12 (6 femals; 6 male) | <http://datacat.liverpool.ac.uk/681/1/ARU_Speech_Corpus_v1_0.zip> | [CC-BY 3.0](https://creativecommons.org/licenses/by/3.0/) | | Althingi Parliamentary Speech Corpus | Icelandic | 542 hours and 25 minutes | 196 speakers | <http://www.malfong.is/index.php?dlid=73&lang=en> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/) | | Alþingisumræður Parliamentary Speech Corpus | Icelandic | ~21 hours | | <http://www.malfong.is/index.php?dlid=8&lang=en> | [CC-BY 3.0](https://creativecommons.org/licenses/by/3.0/) | | Hjal Corpus | Icelandic | ~41,000 recordings | 883 speakers | <http://www.malfong.is/index.php?dlid=5&lang=en> | [CC-BY 3.0](https://creativecommons.org/licenses/by/3.0/) | | The Malromur Corpus | Icelandic | 152 hours | 563 speakers | <http://www.malfong.is/index.php?dlid=65&lang=en> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/) | | Telecooperation German Corpus for Kinect | German | ~35 hours | ~180 speakers | <http://www.repository.voxforge1.org/downloads/de/german-speechdata-TUDa-2015.tar.gz> | [CC-BY 2.0](https://creativecommons.org/licenses/by/2.0/) | | African Speech Technology English-English Speech Corpus | English | ~21 hours | | <https://repo.sadilar.org/handle/20.500.12185/283> | [CC-BY 2.5 South Africa](https://creativecommons.org/licenses/by/2.5/za/legalcode) | | African Speech Technology isiXhosa Speech Corpus | isiXhosa | ~26 hours | | <https://repo.sadilar.org/handle/20.500.12185/305> | [CC-BY 2.5 South Africa](https://creativecommons.org/licenses/by/2.5/za/legalcode) | | NCHLT Afrikaans | Afrikaans | 56 hours | 210 speakers (98 female / 112 male) | <https://repo.sadilar.org/handle/20.500.12185/280> | CC-BY 3.0 | | NCHLT English | English | 56 hours | 210 speakers (100 female / 110 male) | <https://repo.sadilar.org/handle/20.500.12185/274> | CC-BY 3.0 | | NCHLT isiNdebele | isiNdebele | 56 hours | 148 speakers (78 female / 70 male) | <https://repo.sadilar.org/handle/20.500.12185/272> | CC-BY 3.0 | | NCHLT isiXhosa | isiXhosa | 56 hours | 209 speakers (106 female / 103 male) | <https://repo.sadilar.org/handle/20.500.12185/279> | CC-BY 3.0 | | NCHLT isiZulu | isiZulu | 56 hours | 210 speakers (98 female / 112 male) | <https://repo.sadilar.org/handle/20.500.12185/275> | CC-BY 3.0 | | NCHLT Sepedi | Sepedi | 56 hours | 210 speakers (100 female / 110 male) | <https://repo.sadilar.org/handle/20.500.12185/270> | CC-BY 3.0 | | NCHLT Sesotho | Sesotho | 56 hours | 210 speakers (113 female / 97 male) | <https://repo.sadilar.org/handle/20.500.12185/278> | CC-BY 3.0 | | NCHLT Setswana | Setswana | 56 hours | 210 speakers (109 female / 101 male) | <https://repo.sadilar.org/handle/20.500.12185/281> | CC-BY 3.0 | | NCHLT Siswati | Siswati | 56 hours | 197 speakers (96 female / 101 male) | <https://repo.sadilar.org/handle/20.500.12185/271> | CC-BY 3.0 | | NCHLT Tshivenda | Tshivenda | 56 hours | 208 speakers (83 female / 125 male) | <https://repo.sadilar.org/handle/20.500.12185/276> | CC-BY 3.0 | | NCHLT Xitsonga | Xitsonga | 56 hours | 198 speakers (95 female/103 male) | <https://repo.sadilar.org/handle/20.500.12185/277> | CC-BY 3.0 | | Lwazi II Cross-lingual Proper Name Corpus | Afrikaans; English; isiZulu; Sesotho | 2 hours 5 mins| 20 speakers | <https://repo.sadilar.org/handle/20.500.12185/445> | CC-BY 3.0 | | Lwazi II Proper Name Call Routing Telephone Corpus | English | 2 hours 7 mins | | <https://repo.sadilar.org/handle/20.500.12185/448> | CC-BY 3.0 | | Lwazi II Afrikaans Trajectory Tracking Corpus | Afrikaans | 4 hours | one male | <https://repo.sadilar.org/handle/20.500.12185/442> | CC-BY 3.0 | | LibriSpeech | English | ~1000 hours | 2484 speakers (1201 female / 1283 male) | <http://www.openslr.org/12/> | CC-BY 4.0 | | Zeroth-Korean | Korean | 52.8 hours | 115 speakers | <http://www.openslr.org/40/> | CC-BY 4.0 | | Speech Commands | English | 17.8 hours | >1,000 speakers | <https://ai.googleblog.com/2017/08/launching-speech-commands-dataset.html> | CC-BY 4.0 | | ParlamentParla | Catalan | 320 hours | | <https://www.openslr.org/59/> | CC-BY 4.0 | | SIWIS | French | ~10 hours | one female | <http://datashare.is.ed.ac.uk/download/DS_10283_2353.zip> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | | VCTK | English | 44 hours | 109 speakers | <http://datashare.is.ed.ac.uk/download/DS_10283_3443.zip> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | | LibriTTS | English | 586 hours | 2,456 speakers (1,185 female / 1,271 male) | <http://www.openslr.org/60/> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | | Augmented LibriSpeech | Audio (English); Text (English, French) | 236 hours | | <https://persyval-platform.univ-grenoble-alpes.fr/datasets/DS91> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | | Helsinki Prosody Corpus | English | 262.5 hours | 1,230 speakers | <https://github.com/Helsinki-NLP/prosody> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | |Tuva Speech Database | Norwegian | 24 hours | 40 speakers | https://www.nb.no/sprakbanken/show?serial=oai:nb.no:sbr-44&lang= | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | | COERLL Kʼicheʼ corpus | Kʼicheʼ | 34 minutes | ? speakers | https://cl.indiana.edu/~ftyers/resources/utexas-kiche-audio.tar.gz | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | | Timers and Such v0.1 | English (synthetic: US, real: various nationalities) | synthetic: 172 hours, real: 0.29 hours | 21 synthetic, 11 real | https://zenodo.org/record/4110812#.X9j0RmBOkYM | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | | Large Corpus of Czech Parliament Plenary Hearings | Czech | 444 hours | | <https://lindat.mff.cuni.cz/repository/xmlui/handle/11234/1-3126> | [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/legalcode) | ## 📜 [CC-BY-SA](https://creativecommons.org/licenses/by-sa/4.0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | Iban | Iban | 8 hours | | <http://www.openslr.org/24/> <https://github.com/sarahjuan/iban> | CC-BY-SA 2.0 | | Vystadial 2013 | English; Czech | 41 hours; 15 hours | | <http://www.openslr.org/6/> | CC-BY-SA 3.0 US | | Vystadial 2016 Czech | Czech | 77 hours; includes Vystadial 2013 Czech | | <https://lindat.cz/repository/xmlui/handle/11234/1-1740> | CC-BY-SA 4.0 | | Free Spoken Digit Dataset | English | 2,000 isolated digits | 4 speakers | <https://github.com/Jakobovski/free-spoken-digit-dataset> | CC-BY-SA 4.0 | | Google Javanese | Javanese | 296 hours| 1019 speakers| <http://www.openslr.org/35/> | CC-BY-SA 4.0 | | Google Nepali | Nepali | 165 hours| 527 speakers| <http://www.openslr.org/54/> | CC-BY-SA 4.0 | | Google Bengali | Bengali | 229 hours| 508 speakers| <http://www.openslr.org/53/> | CC-BY-SA 4.0 | | Google Sinhala | Sinhala | 224 hours| 478 speakers| <http://www.openslr.org/52/> | CC-BY-SA 4.0 | | Google Sundanese | Sundanese | 333 hours| 542 speakers| <http://www.openslr.org/36/> | CC-BY-SA 4.0 | | Spoken Wikipedia Corpus (SWC-2017) | English; German; Dutch | 182 hours; 249 hours; 79 hours | 395 speakers; 339 speakers; 145 speakers | <https://nats.gitlab.io/swc/> | CC-BY-SA 4.0 | | Chuvash TTS | Chuvash | 4 hours | 1 speaker | <https://github.com/ftyers/Turkic_TTS> | CC-BY-SA 4.0 | | Forschergeist | German | 2 hours | 2 speakers (1 female; 1 male) | female speaker: <https://goofy.zamia.org/zamia-speech/corpora/forschergeist/annettevogt-20180320-rec.tgz>; male speaker: <https://goofy.zamia.org/zamia-speech/corpora/forschergeist/timpritlove-20180320-rec.tgz> | CC-BY-SA 4.0 | | Malayalam Speech Corpus by [SMC](https://blog.smc.org.in/malayalam-speech-corpus/) | Malayalam | 1:36 hours | 75 speakers (3 female, 12 male, 60 unidentified) | https://releases.smc.org.in/msc-reviewed-speech/ | CC-BY-SA 4.0 | | Google Malayalam | Malayalam | 3.02 hours| 24 speakers| <http://www.openslr.org/63/> | CC-BY-SA 4.0 | ## 📜 [CC-BY-ND](https://creativecommons.org/licenses/by-nd/4.0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | IBM Recorded Debates v1 | English | 5 hours | 10 speakers | <https://www.research.ibm.com/haifa/dept/vst/debating_data.shtml#Debate%20Speech%20Analysis> | CC-BY-ND | | IBM Recorded Debates v2 | English | ~14 hours | 14 speakers | <https://www.research.ibm.com/haifa/dept/vst/debating_data.shtml#Debate%20Speech%20Analysis> | CC-BY-ND | ## 📜 [CC-BY-NC](https://creativecommons.org/licenses/by-nc/4.0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | TV3Parla | Catalan | 240 hours | | <http://laklak.eu/share/tv3_0.3.tar.gz> | [CC-BY-NC 4.0](https://creativecommons.org/licenses/by-nc/4.0/) | | Russian Open STT Corpus | Russian | ~10,000 hours public, ~10,000 more upon request | | <https://github.com/snakers4/open_stt/#links> | [CC-BY-NC 4.0](https://creativecommons.org/licenses/by-nc/4.0/) with some [exceptions](https://github.com/snakers4/open_stt/blob/master/LICENSE)| | Russian Open TTS Corpus | Russian | 145 hours | 3 males | <https://github.com/snakers4/open_tts/#links> | [CC-BY-NC 4.0](https://creativecommons.org/licenses/by-nc/4.0/) with some [expections](https://github.com/snakers4/open_tts/blob/master/LICENSE)| | OVM – Otázky Václava Moravce | Czech | 35 hours | | <https://lindat.mff.cuni.cz/repository/xmlui/handle/11858/00-097C-0000-000D-EC98-3> | [CC-BY-NC 3.0](https://creativecommons.org/licenses/by-nc/3.0/) | ## 📜 [CC-BY-NC-SA](https://creativecommons.org/licenses/by-nc-sa/4.0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | CHiME-Home | English | 6.8 hours | | <https://archive.org/details/chime-home> | [CC-BY-NC-SA 3.0](https://creativecommons.org/licenses/by-nc-sa/3.0/) | | Cameroon Pidgin English Corpus | Cameroon Pidgin English | ~17 hours | | <http://ota.ox.ac.uk/text/2563.zip> | [CC-BY-NC-SA 3.0](https://creativecommons.org/licenses/by-nc-sa/3.0/) | ## 📜 [CC-BY-NC-ND](https://creativecommons.org/licenses/by-nc-nd/4.0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | Tatoeba-Eng | English | ~250 hours (rough estimate) | 6 speakers | <https://voice.mozilla.org/en/datasets> | [CC-BY-NC 4.0](https://creativecommons.org/licenses/by-nc/4.0/) (some audio) / [CC-BY-NC-ND 3.0](https://creativecommons.org/licenses/by-nc-nd/3.0/) (most audio) / [CC-BY 2.0](https://creativecommons.org/licenses/by/2.0/) (all text) | | TED-LIUM | English | 118 hours | 685 speakers (36h female / 81h male) | <http://www.openslr.org/7/> | [CC-BY-NC-ND 3.0](https://creativecommons.org/licenses/by-nc-nd/3.0/) | | TED-LIUM-2 | English | 207 hours | 1242 speakers (66h female / 141h male) | <http://www.openslr.org/19/> | [CC-BY-NC-ND 3.0](https://creativecommons.org/licenses/by-nc-nd/3.0/) | | TED-LIUM-3 | English | 452 hours | 2028 speakers (134h female / 316h male) | <http://www.openslr.org/51/> | [CC-BY-NC-ND 3.0](https://creativecommons.org/licenses/by-nc-nd/3.0/) | | Pansori TEDxKR | Korean | 3 hours | 41 speakers | <http://www.openslr.org/58/> | [CC-BY-NC-ND 4.0](https://creativecommons.org/licenses/by-nc-nd/4.0/) | | Primewords Mandarin | Mandarin | 100 hours | 296 speakers | <http://www.openslr.org/47/> | [CC-BY-NC-ND 4.0](https://creativecommons.org/licenses/by-nc-nd/4.0/)| | MuST-C v1.0 | Audio (English); Text (Dutch, French, German, Italian, Portuguese, Romanian, Russian, Spanish) | 408, 504, 492, 465, 442, 385, 432, 489 hours per language pair | | <https://ict.fbk.eu/must-c-release-v1-0/> | [CC-BY-NC-ND 4.0](https://creativecommons.org/licenses/by-nc-nd/4.0/) | | Czech Parliament Meetings | Czech | 88 hours | | <https://lindat.mff.cuni.cz/repository/xmlui/handle/11858/00-097C-0000-0005-CF9C-4> | [CC-BY-NC-ND 3.0](https://creativecommons.org/licenses/by-nc-nd/3.0/) | | BembaSpeech | Bemba | 24 hours | 17 speakers (9 male / 8 female) | <https://github.com/csikasote/BembaSpeech> | [CC-BY-NC-ND 4.0](https://creativecommons.org/licenses/by-nc-nd/4.0/) | ## 📜 [CDLA-Permissive](https://cdla.io/permissive-1-0/) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | DiPCo | English | ~5 hours | 32 speakers (13 female; 19 male) | <https://s3.amazonaws.com/dipco/DiPCo.tgz> | [CDLA-Permissive-1.0](https://cdla.io/permissive-1-0/) | ## 📜 [GNU General Public License](https://www.gnu.org/licenses/gpl.html) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | VoxForge | English | ~120 hours | ~2966 speakers | <http://www.repository.voxforge1.org/downloads/en/Trunk/Audio/Main/16kHz_16bit/> <https://voice.mozilla.org/en/datasets> | GNU-GPL 3.0 | | VoxForge | Russian | | | <http://www.repository.voxforge1.org/downloads/ru/Trunk/Audio/Main/16kHz_16bit/> <http://www.repository.voxforge1.org/downloads/Russian/Trunk/Audio/Main/16kHz_16bit/>| GNU-GPL 3.0 | | VoxForge | German | | | <http://www.repository.voxforge1.org/downloads/de/Trunk/Audio/Main/16kHz_16bit/> | GNU-GPL 3.0 | ## 📜 [Apache License](https://www.apache.org/licenses/LICENSE-2.0) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | AISHELL-1 | Mandarin | 170 hours | 400 speakers | <http://www.openslr.org/33/> | Apache 2.0 | | Tunisian_MSA | Modern Standard Arabic (Tunisia) | 11.2 hours | 118 speakers | <http://www.openslr.org/46/> | Apache 2.0 | | African Accented French | French | 22 hours | 232 speakers | <http://www.openslr.org/57/> | Apache 2.0 | | THCHS-30 | Mandarin Chinese | 33.57 hours (13,389 utterances) | 40 speakers (31 female; 9 male) | <http://www.openslr.org/18/> | Apache 2.0 | | Living Audio Dataset - Dutch | Dutch | 57:49 min | 1 speaker | <https://github.com/Idlak/Living-Audio-Dataset> | Apache 2.0 | | Living Audio Dataset - English | English | 50:50 min | 1 speaker | <https://github.com/Idlak/Living-Audio-Dataset> | Apache 2.0 | | Living Audio Dataset - Irish | Irish | 61:56 min | 1 speaker | <https://github.com/Idlak/Living-Audio-Dataset> | Apache 2.0 | | Living Audio Dataset - Russian | Russian | 34:58 min | 1 speaker | <https://github.com/Idlak/Living-Audio-Dataset> | Apache 2.0 | ## 📜 [MIT License](https://opensource.org/licenses/MIT) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | ALFFA | Amharic;Hausa (paid); Swahili; Wolof | | | <http://www.openslr.org/25/> <https://github.com/besacier/ALFFA_PUBLIC> | MIT | ## 📜 [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | M-AILABS German Corpus | German | 237 hours and 22 minutes | | <http://www.caito.de/data/Training/stt_tts/de_DE.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS Queen's English Corpus | Queen's English | 45 hours and 35 minutes | | <http://www.caito.de/data/Training/stt_tts/en_UK.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS US English Corpus | American English | 102 hours and 7 minutes | | <http://www.caito.de/data/Training/stt_tts/en_US.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS Spanish Corpus | Spanish Spanish | 108 hours and 34 minutes | | <http://www.caito.de/data/Training/stt_tts/es_ES.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS Italian Corpus | Italian | 127 hours and 40 minutes | | <http://www.caito.de/data/Training/stt_tts/it_IT.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS Ukrainian Corpus | Ukrainian | 87 hours and 8 minutes | | <http://www.caito.de/data/Training/stt_tts/uk_UK.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS Russian Corpus | Russian | 46 hours and 47 minutes | | <http://www.caito.de/data/Training/stt_tts/ru_RU.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS French-v0.9 Corpus | French | 190 hours and 30 minutes | | <http://www.caito.de/data/Training/stt_tts/fr_FR.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| | M-AILABS Polish Corpus | Polish | 53 hours and 50 minutes | | <http://www.caito.de/data/Training/stt_tts/pl_PL.tgz> | [M-AILABS LICENSE](https://www.caito.de/2019/01/the-m-ailabs-speech-dataset/) (a data-specific [BSD 3-Clause License](https://opensource.org/licenses/BSD-3-Clause))| ## 📜 [Custom License](https://en.wikipedia.org/wiki/Copyright) | CORPUS | LANGUAGES | # HOURS | # SPEAKERS | DOWNLOAD | LICENSE | | --- | --- | --- | --- | --- | --- | | Fluent Speech Commands Corpus | English | 19 hours (30,043 utterances) | 97 speakers | <http://fluent.ai:2052/jf8398hf30f0381738rucj3828chfdnchs.tar.gz> | [Fluent Speech Commands Public License](https://groups.google.com/a/fluent.ai/forum/#!msg/fluent-speech-commands/MXh_7Y-3QC8/9i2pHPW9AwAJ) | | CMU Wilderness | 700 Langs | Alignments distributed without audio or text total:~14,000 hours; per lang: ~20 hours | | <https://github.com/festvox/datasets-CMU_Wilderness> | <https://live.bible.is/terms> | | CHiME-5 | English | 50 hours | 48 speakers | <http://spandh.dcs.shef.ac.uk/chime_challenge/data.html> | [CHiME-5 License](http://spandh.dcs.shef.ac.uk/chime_challenge/download.html) | | Fearless Steps Corpus | English | 19,000 hours (20 hours transcribed) | ~450 speakers | <https://fearless-steps.github.io/ChallengePhase3/#19k_Corpus_Access> | [NASA Media Usage Guidelines](https://www.nasa.gov/multimedia/guidelines/index.html) | | Microsoft Speech Corpus (Indian languages) | Telugu; Tamil; Gujarati | | | <https://msropendata.com/datasets/7230b4b1-912d-400e-be58-f84e0512985e> | [Microsoft Speech Corpus (Indian Languages) License](https://msropendata.com/datasets/7230b4b1-912d-400e-be58-f84e0512985e) | | Microsoft Speech Language Translation Corpus | English; Chinese; Japanese| | | <https://msropendata.com/datasets/54813518-4ea6-4c39-9bb2-b0d1e5f0c187> | [Microsoft Research Data License Agreement](https://msrodr-api.azurewebsites.net//licenses/2f933be3-284d-500b-7ea3-2aa2fd0f1bb2/file) | | Hey Snips Corpus | English | 11K positive "Hey Snips" (~4.4 hours) and 87K negative (~89 hours) utterances | 2215 speakers (positive & negative) and 4028 speakers (negative only) | <https://research.snips.ai/datasets/keyword-spotting> | [Snips Data License](https://github.com/snipsco/keyword-spotting-research-datasets/blob/master/LICENSE) | | Snips SLU Corpus | English; French | 1660 "Smart Lights EN" (~1.3 hours), 1286 "Smart Speaker EN" (~55 minutes), 1138 "Smart Speaker FR" (~50 minutes) utterances | English: 69 speakers; French: 30 speakers | <https://research.snips.ai/datasets/spoken-language-understanding> | [Snips Data License](https://github.com/snipsco/keyword-spotting-research-datasets/blob/master/LICENSE) | | CMU Sphinx Group - AN4 | English | "an4_clstk"(~50 minutes) "an4test_clstk" (~6 minutes) | "an4_clstk": 21 female, 53 male "an4test_clstk": 3 female, 7 male | http://www.speech.cs.cmu.edu/databases/an4/an4_raw.bigendian.tar.gz | [AN4](http://www.speech.cs.cmu.edu/databases/an4/LICENSE.html) | | FT Speech | Danish | ~1,857 hours (1,017,244 utterances) | 434 speakers (176 female, 258 male) | <https://ftspeech.dk> | [FT Speech License](https://ftspeech.dk/LICENSE.html) | | FalaBrasil-LAPS-Constituicao | Brazilian-Portuguese | 9 hours | 1 speaker | <https://drive.google.com/uc?export=download&confirm=SrvW&id=1Nf849u-27CYRzJqedLaI-FaZfMRO7FT> | ["Bases de áudio transcrito e bases de texto normalizadas (sem pontuação, com números escritos por extenso, etc.) disponibilizadas de forma gratuita* pelo Grupo FalaBrasil. [disponibilizadas de forma gratuita*] / Portanto, apenas as bases livres estão sendo disponibilizadas."](http://labvis.ufpa.br/falabrasil/downloads/) | | FalaBrasil-LaPSMail | Brazilian-Portuguese | 1 hour | 25 speakers | <https://drive.google.com/uc?export=download&confirm=PecV&id=1B_Vq8MDSE4fBQefVxqCGSl-EcKAcjJLb> | ["Bases de áudio transcrito e bases de texto normalizadas (sem pontuação, com números escritos por extenso, etc.) disponibilizadas de forma gratuita* pelo Grupo FalaBrasil. [disponibilizadas de forma gratuita*] / Portanto, apenas as bases livres estão sendo disponibilizadas."](http://labvis.ufpa.br/falabrasil/downloads/) | | FalaBrasil-LaPS Benchmark | Brazilian-Portuguese | 1 hour | 1 speaker | <https://drive.google.com/uc?export=download&confirm=XFfF&id=1nZ8L9nJTt4blFC0RGT9Y7XRu02aAvDIo> | ["Bases de áudio transcrito e bases de texto normalizadas (sem pontuação, com números escritos por extenso, etc.) disponibilizadas de forma gratuita* pelo Grupo FalaBrasil. [disponibilizadas de forma gratuita*] / Portanto, apenas as bases livres estão sendo disponibilizadas."](http://labvis.ufpa.br/falabrasil/downloads/) |
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/prune.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Functions implementing pruning. #ifndef FST_PRUNE_H_ #define FST_PRUNE_H_ #include <type_traits> #include <utility> #include <vector> #include <fst/log.h> #include <fst/arcfilter.h> #include <fst/heap.h> #include <fst/shortest-distance.h> namespace fst { namespace internal { template <class StateId, class Weight> class PruneCompare { public: PruneCompare(const std::vector<Weight> &idistance, const std::vector<Weight> &fdistance) : idistance_(idistance), fdistance_(fdistance) {} bool operator()(const StateId x, const StateId y) const { const auto wx = Times(IDistance(x), FDistance(x)); const auto wy = Times(IDistance(y), FDistance(y)); return less_(wx, wy); } private: Weight IDistance(const StateId s) const { return s < idistance_.size() ? idistance_[s] : Weight::Zero(); } Weight FDistance(const StateId s) const { return s < fdistance_.size() ? fdistance_[s] : Weight::Zero(); } const std::vector<Weight> &idistance_; const std::vector<Weight> &fdistance_; NaturalLess<Weight> less_; }; } // namespace internal template <class Arc, class ArcFilter> struct PruneOptions { using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; PruneOptions(const Weight &weight_threshold, StateId state_threshold, ArcFilter filter, std::vector<Weight> *distance = nullptr, float delta = kDelta, bool threshold_initial = false) : weight_threshold(std::move(weight_threshold)), state_threshold(state_threshold), filter(std::move(filter)), distance(distance), delta(delta), threshold_initial(threshold_initial) {} // Pruning weight threshold. Weight weight_threshold; // Pruning state threshold. StateId state_threshold; // Arc filter. ArcFilter filter; // If non-zero, passes in pre-computed shortest distance to final states. const std::vector<Weight> *distance; // Determines the degree of convergence required when computing shortest // distances. float delta; // Determines if the shortest path weight is left (true) or right // (false) multiplied by the threshold to get the limit for // keeping a state or arc (matters if the semiring is not // commutative). bool threshold_initial; }; // Pruning algorithm: this version modifies its input and it takes an options // class as an argument. After pruning the FST contains states and arcs that // belong to a successful path in the FST whose weight is no more than the // weight of the shortest path Times() the provided weight threshold. When the // state threshold is not kNoStateId, the output FST is further restricted to // have no more than the number of states in opts.state_threshold. Weights must // have the path property. The weight of any cycle needs to be bounded; i.e., // // Plus(weight, Weight::One()) == Weight::One() template <class Arc, class ArcFilter, typename std::enable_if< (Arc::Weight::Properties() & kPath) == kPath>::type * = nullptr> void Prune(MutableFst<Arc> *fst, const PruneOptions<Arc, ArcFilter> &opts) { using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using StateHeap = Heap<StateId, internal::PruneCompare<StateId, Weight>>; auto ns = fst->NumStates(); if (ns < 1) return; std::vector<Weight> idistance(ns, Weight::Zero()); std::vector<Weight> tmp; if (!opts.distance) { tmp.reserve(ns); ShortestDistance(*fst, &tmp, true, opts.delta); } const auto *fdistance = opts.distance ? opts.distance : &tmp; if ((opts.state_threshold == 0) || (fdistance->size() <= fst->Start()) || ((*fdistance)[fst->Start()] == Weight::Zero())) { fst->DeleteStates(); return; } internal::PruneCompare<StateId, Weight> compare(idistance, *fdistance); StateHeap heap(compare); std::vector<bool> visited(ns, false); std::vector<size_t> enqueued(ns, StateHeap::kNoKey); std::vector<StateId> dead; dead.push_back(fst->AddState()); NaturalLess<Weight> less; auto s = fst->Start(); const auto limit = opts.threshold_initial ? Times(opts.weight_threshold, (*fdistance)[s]) : Times((*fdistance)[s], opts.weight_threshold); StateId num_visited = 0; if (!less(limit, (*fdistance)[s])) { idistance[s] = Weight::One(); enqueued[s] = heap.Insert(s); ++num_visited; } while (!heap.Empty()) { s = heap.Top(); heap.Pop(); enqueued[s] = StateHeap::kNoKey; visited[s] = true; if (less(limit, Times(idistance[s], fst->Final(s)))) { fst->SetFinal(s, Weight::Zero()); } for (MutableArcIterator<MutableFst<Arc>> aiter(fst, s); !aiter.Done(); aiter.Next()) { auto arc = aiter.Value(); // Copy intended. if (!opts.filter(arc)) continue; const auto weight = Times(Times(idistance[s], arc.weight), arc.nextstate < fdistance->size() ? (*fdistance)[arc.nextstate] : Weight::Zero()); if (less(limit, weight)) { arc.nextstate = dead[0]; aiter.SetValue(arc); continue; } if (less(Times(idistance[s], arc.weight), idistance[arc.nextstate])) { idistance[arc.nextstate] = Times(idistance[s], arc.weight); } if (visited[arc.nextstate]) continue; if ((opts.state_threshold != kNoStateId) && (num_visited >= opts.state_threshold)) { continue; } if (enqueued[arc.nextstate] == StateHeap::kNoKey) { enqueued[arc.nextstate] = heap.Insert(arc.nextstate); ++num_visited; } else { heap.Update(enqueued[arc.nextstate], arc.nextstate); } } } for (StateId i = 0; i < visited.size(); ++i) { if (!visited[i]) dead.push_back(i); } fst->DeleteStates(dead); } template <class Arc, class ArcFilter, typename std::enable_if< (Arc::Weight::Properties() & kPath) != kPath>::type * = nullptr> void Prune(MutableFst<Arc> *fst, const PruneOptions<Arc, ArcFilter> &) { FSTERROR() << "Prune: Weight needs to have the path property: " << Arc::Weight::Type(); fst->SetProperties(kError, kError); } // Pruning algorithm: this version modifies its input and takes the // pruning threshold as an argument. It deletes states and arcs in the // FST that do not belong to a successful path whose weight is more // than the weight of the shortest path Times() the provided weight // threshold. When the state threshold is not kNoStateId, the output // FST is further restricted to have no more than the number of states // in opts.state_threshold. Weights must have the path property. The // weight of any cycle needs to be bounded; i.e., // // Plus(weight, Weight::One()) == Weight::One() template <class Arc> void Prune(MutableFst<Arc> *fst, typename Arc::Weight weight_threshold, typename Arc::StateId state_threshold = kNoStateId, float delta = kDelta) { const PruneOptions<Arc, AnyArcFilter<Arc>> opts( weight_threshold, state_threshold, AnyArcFilter<Arc>(), nullptr, delta); Prune(fst, opts); } // Pruning algorithm: this version writes the pruned input FST to an // output MutableFst and it takes an options class as an argument. The // output FST contains states and arcs that belong to a successful // path in the input FST whose weight is more than the weight of the // shortest path Times() the provided weight threshold. When the state // threshold is not kNoStateId, the output FST is further restricted // to have no more than the number of states in // opts.state_threshold. Weights have the path property. The weight // of any cycle needs to be bounded; i.e., // // Plus(weight, Weight::One()) == Weight::One() template <class Arc, class ArcFilter, typename std::enable_if<IsPath<typename Arc::Weight>::value>::type * = nullptr> void Prune(const Fst<Arc> &ifst, MutableFst<Arc> *ofst, const PruneOptions<Arc, ArcFilter> &opts) { using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using StateHeap = Heap<StateId, internal::PruneCompare<StateId, Weight>>; ofst->DeleteStates(); ofst->SetInputSymbols(ifst.InputSymbols()); ofst->SetOutputSymbols(ifst.OutputSymbols()); if (ifst.Start() == kNoStateId) return; NaturalLess<Weight> less; if (less(opts.weight_threshold, Weight::One()) || (opts.state_threshold == 0)) { return; } std::vector<Weight> idistance; std::vector<Weight> tmp; if (!opts.distance) ShortestDistance(ifst, &tmp, true, opts.delta); const auto *fdistance = opts.distance ? opts.distance : &tmp; if ((fdistance->size() <= ifst.Start()) || ((*fdistance)[ifst.Start()] == Weight::Zero())) { return; } internal::PruneCompare<StateId, Weight> compare(idistance, *fdistance); StateHeap heap(compare); std::vector<StateId> copy; std::vector<size_t> enqueued; std::vector<bool> visited; auto s = ifst.Start(); const auto limit = opts.threshold_initial ? Times(opts.weight_threshold, (*fdistance)[s]) : Times((*fdistance)[s], opts.weight_threshold); while (copy.size() <= s) copy.push_back(kNoStateId); copy[s] = ofst->AddState(); ofst->SetStart(copy[s]); while (idistance.size() <= s) idistance.push_back(Weight::Zero()); idistance[s] = Weight::One(); while (enqueued.size() <= s) { enqueued.push_back(StateHeap::kNoKey); visited.push_back(false); } enqueued[s] = heap.Insert(s); while (!heap.Empty()) { s = heap.Top(); heap.Pop(); enqueued[s] = StateHeap::kNoKey; visited[s] = true; if (!less(limit, Times(idistance[s], ifst.Final(s)))) { ofst->SetFinal(copy[s], ifst.Final(s)); } for (ArcIterator<Fst<Arc>> aiter(ifst, s); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); if (!opts.filter(arc)) continue; const auto weight = Times(Times(idistance[s], arc.weight), arc.nextstate < fdistance->size() ? (*fdistance)[arc.nextstate] : Weight::Zero()); if (less(limit, weight)) continue; if ((opts.state_threshold != kNoStateId) && (ofst->NumStates() >= opts.state_threshold)) { continue; } while (idistance.size() <= arc.nextstate) { idistance.push_back(Weight::Zero()); } if (less(Times(idistance[s], arc.weight), idistance[arc.nextstate])) { idistance[arc.nextstate] = Times(idistance[s], arc.weight); } while (copy.size() <= arc.nextstate) copy.push_back(kNoStateId); if (copy[arc.nextstate] == kNoStateId) { copy[arc.nextstate] = ofst->AddState(); } ofst->AddArc(copy[s], Arc(arc.ilabel, arc.olabel, arc.weight, copy[arc.nextstate])); while (enqueued.size() <= arc.nextstate) { enqueued.push_back(StateHeap::kNoKey); visited.push_back(false); } if (visited[arc.nextstate]) continue; if (enqueued[arc.nextstate] == StateHeap::kNoKey) { enqueued[arc.nextstate] = heap.Insert(arc.nextstate); } else { heap.Update(enqueued[arc.nextstate], arc.nextstate); } } } } template <class Arc, class ArcFilter, typename std::enable_if<!IsPath<typename Arc::Weight>::value>::type * = nullptr> void Prune(const Fst<Arc> &, MutableFst<Arc> *ofst, const PruneOptions<Arc, ArcFilter> &) { FSTERROR() << "Prune: Weight needs to have the path property: " << Arc::Weight::Type(); ofst->SetProperties(kError, kError); } // Pruning algorithm: this version writes the pruned input FST to an // output MutableFst and simply takes the pruning threshold as an // argument. The output FST contains states and arcs that belong to a // successful path in the input FST whose weight is no more than the // weight of the shortest path Times() the provided weight // threshold. When the state threshold is not kNoStateId, the output // FST is further restricted to have no more than the number of states // in opts.state_threshold. Weights must have the path property. The // weight of any cycle needs to be bounded; i.e., // // Plus(weight, Weight::One()) = Weight::One(); template <class Arc> void Prune(const Fst<Arc> &ifst, MutableFst<Arc> *ofst, typename Arc::Weight weight_threshold, typename Arc::StateId state_threshold = kNoStateId, float delta = kDelta) { const PruneOptions<Arc, AnyArcFilter<Arc>> opts( weight_threshold, state_threshold, AnyArcFilter<Arc>(), nullptr, delta); Prune(ifst, ofst, opts); } } // namespace fst #endif // FST_PRUNE_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/bin/fstconnect-main.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Removes useless (inaccessible or non-coaccessible) states and arcs from an // FST. #include <cstring> #include <memory> #include <string> #include <fst/flags.h> #include <fst/script/connect.h> int fstconnect_main(int argc, char **argv) { namespace s = fst::script; using fst::script::FstClass; using fst::script::MutableFstClass; string usage = "Removes useless states and arcs from an FST.\n\n Usage: "; usage += argv[0]; usage += " [in.fst [out.fst]]\n"; std::set_new_handler(FailedNewHandler); SET_FLAGS(usage.c_str(), &argc, &argv, true); if (argc > 3) { ShowUsage(); return 1; } const string in_name = (argc > 1 && strcmp(argv[1], "-") != 0) ? argv[1] : ""; const string out_name = argc > 2 ? argv[2] : ""; std::unique_ptr<MutableFstClass> fst(MutableFstClass::Read(in_name, true)); if (!fst) return 1; s::Connect(fst.get()); return !fst->Write(out_name); }
0
coqui_public_repos/inference-engine/src
coqui_public_repos/inference-engine/src/ctcdecode/setup.cfg
# temp_build is two deep because SWIG does not clean relative paths when # building, so ../kenlm pollutes the source directory. [build_ext] build-lib=temp_build/temp_build build-temp=temp_build/temp_build [build_py] build-lib=temp_build/temp_build [bdist_wheel] bdist-dir=temp_build/temp_build [install_lib] build-dir=temp_build/temp_build
0
coqui_public_repos/STT
coqui_public_repos/STT/bin/import_aishell.py
#!/usr/bin/env python import glob import os import tarfile import pandas from coqui_stt_training.util.importers import get_importers_parser COLUMNNAMES = ["wav_filename", "wav_filesize", "transcript"] def extract(archive_path, target_dir): print("Extracting {} into {}...".format(archive_path, target_dir)) with tarfile.open(archive_path) as tar: tar.extractall(target_dir) def preprocess_data(tgz_file, target_dir): # First extract main archive and sub-archives extract(tgz_file, target_dir) main_folder = os.path.join(target_dir, "data_aishell") wav_archives_folder = os.path.join(main_folder, "wav") for targz in glob.glob(os.path.join(wav_archives_folder, "*.tar.gz")): extract(targz, main_folder) # Folder structure is now: # - data_aishell/ # - train/S****/*.wav # - dev/S****/*.wav # - test/S****/*.wav # - wav/S****.tar.gz # - transcript/aishell_transcript_v0.8.txt # Transcripts file has one line per WAV file, where each line consists of # the WAV file name without extension followed by a single space followed # by the transcript. # Since the transcripts themselves can contain spaces, we split on space but # only once, then build a mapping from file name to transcript transcripts_path = os.path.join( main_folder, "transcript", "aishell_transcript_v0.8.txt" ) with open(transcripts_path) as fin: transcripts = dict((line.split(" ", maxsplit=1) for line in fin)) def load_set(glob_path): set_files = [] for wav in glob.glob(glob_path): try: wav_filename = wav wav_filesize = os.path.getsize(wav) transcript_key = os.path.splitext(os.path.basename(wav))[0] transcript = transcripts[transcript_key].strip("\n") set_files.append((wav_filename, wav_filesize, transcript)) except KeyError: print("Warning: Missing transcript for WAV file {}.".format(wav)) return set_files for subset in ("train", "dev", "test"): print("Loading {} set samples...".format(subset)) subset_files = load_set(os.path.join(main_folder, subset, "S*", "*.wav")) df = pandas.DataFrame(data=subset_files, columns=COLUMNNAMES) # Trim train set to under 10s by removing the last couple hundred samples if subset == "train": durations = (df["wav_filesize"] - 44) / 16000 / 2 df = df[durations <= 10.0] print("Trimming {} samples > 10 seconds".format((durations > 10.0).sum())) dest_csv = os.path.join(target_dir, "aishell_{}.csv".format(subset)) print("Saving {} set into {}...".format(subset, dest_csv)) df.to_csv(dest_csv, index=False) def main(): # http://www.openslr.org/33/ parser = get_importers_parser(description="Import AISHELL corpus") parser.add_argument("aishell_tgz_file", help="Path to data_aishell.tgz") parser.add_argument( "--target_dir", default="", help="Target folder to extract files into and put the resulting CSVs. Defaults to same folder as the main archive.", ) params = parser.parse_args() if not params.target_dir: params.target_dir = os.path.dirname(params.aishell_tgz_file) preprocess_data(params.aishell_tgz_file, params.target_dir) if __name__ == "__main__": main()
0
coqui_public_repos/STT
coqui_public_repos/STT/native_client/bazel_workspace_status_cmd.sh
#!/bin/bash set -ex # This script will be run bazel when building process starts to # generate key-value information that represents the status of the # workspace. The output should be like # # KEY1 VALUE1 # KEY2 VALUE2 # # Keys starting with STABLE_ cause dependent rules to be re-run when their value # changes. # # If the script exits with non-zero code, it's considered as a failure # and the output will be discarded. # The code below presents an implementation that works for git repository tf_git_rev=$(git describe --long --tags) echo "STABLE_TF_GIT_VERSION ${tf_git_rev}" # use this trick to be able to use the script from anywhere pushd $(dirname "$0") ds_git_rev=$(git describe --long --tags) echo "STABLE_DS_GIT_VERSION ${ds_git_rev}" ds_version=$(cat ../training/coqui_stt_training/VERSION) echo "STABLE_DS_VERSION ${ds_version}" ds_graph_version=$(cat ../training/coqui_stt_training/GRAPH_VERSION) echo "STABLE_DS_GRAPH_VERSION ${ds_graph_version}" popd
0
coqui_public_repos/TTS/TTS/tts/layers
coqui_public_repos/TTS/TTS/tts/layers/feed_forward/duration_predictor.py
from torch import nn from TTS.tts.layers.generic.res_conv_bn import Conv1dBN class DurationPredictor(nn.Module): """Speedy Speech duration predictor model. Predicts phoneme durations from encoder outputs. Note: Outputs interpreted as log(durations) To get actual durations, do exp transformation conv_BN_4x1 -> conv_BN_3x1 -> conv_BN_1x1 -> conv_1x1 Args: hidden_channels (int): number of channels in the inner layers. """ def __init__(self, hidden_channels): super().__init__() self.layers = nn.ModuleList( [ Conv1dBN(hidden_channels, hidden_channels, 4, 1), Conv1dBN(hidden_channels, hidden_channels, 3, 1), Conv1dBN(hidden_channels, hidden_channels, 1, 1), nn.Conv1d(hidden_channels, 1, 1), ] ) def forward(self, x, x_mask): """ Shapes: x: [B, C, T] x_mask: [B, 1, T] """ o = x for layer in self.layers: o = layer(o) * x_mask return o
0
coqui_public_repos/STT-examples/django_api_streaming/stt_app/static
coqui_public_repos/STT-examples/django_api_streaming/stt_app/static/stt/script.js
//(function(){ 'use strict' var constraints = { audio : true, }; var recorder = null; var audioStream = null; var audioData = null; var audioContext = null; var csrftoken = getCookie('csrftoken'); var socket = null; var interval; function getCookie(name) { var cookieValue = null; if (document.cookie && document.cookie != '') { var cookies = document.cookie.split(';'); for (var i = 0; i < cookies.length; i++) { var cookie = cookies[i].trim(); // Does this cookie string begin with the name we want? if (cookie.substring(0, name.length + 1) == (name + '=')) { cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); break; } } } return cookieValue; } function protocolHandler(){ if($('#ws-radio').prop('checked')){ $('#file').prop('disabled', true); $('#submitAudio').prop('disabled', true); } else { $('#file').prop('disabled', false); $('#submitAudio').prop('disabled', false); } } function initWebSocket(){ if(!socket){ socket = new WebSocket('ws://127.0.0.1:8000/stt/'); socket.onopen = function(){ interval = setInterval(function(){ recorder.exportWAV(function(blob){ audioData = blob; if(socket && socket.readyState == WebSocket.OPEN){ socket.send(audioData); } }, false); }, 2000); } socket.onmessage = function(res){ $('#result').text(res.data); } socket.onerror = function(error){ alert('web socket error: ' + error); } socket.onclose = function(e){ clearInterval(interval); console.log('websocket closed'); } } } function closeWebSocket(){ if(socket && socket.readyState != WebSocket.CLOSED){ socket.close(); } socket = null; } function startRecording(){ $("#file").val(""); if (navigator.mediaDevices.getUserMedia === undefined) { displayError("This browser doesn't support getUserMedia."); } navigator.mediaDevices.getUserMedia(constraints) .then(function(stream){ audioStream = stream; if(!audioContext){ audioContext = new AudioContext(); } var source = audioContext.createMediaStreamSource(stream); recorder = audioRecorder.fromSource(source); recorder.record(); if($('#ws-radio').prop('checked') && !socket){ initWebSocket(); } else if(socket){ closeWebSocket(); } }) .catch(function(err){ displayError("Error occurred while getting audio stream: " + err); }) } function stopRecording(){ recorder.stop(); clearInterval(interval); recorder.exportWAV(function(blob){ audioStream.getTracks()[0].stop(); audioStream = null; audioData = blob; var url = URL.createObjectURL(blob); var mt = document.createElement('audio'); mt.controls = true; mt.src = url; $('#player')[0].innerHTML = ""; $('#player').append(mt); if(socket && socket.readyState == WebSocket.OPEN){ socket.send(audioData); closeWebSocket(); } }, true); recorder.clear(); } function submitToServer(){ if(audioData == null) { displayError("There is no audio data here!"); return; } $('#error-panel').hide(); $('#progress-panel').show(); $('.progress-bar').css('width', '0%').attr('aria-valuenow', 0); $('.progress-bar').animate({ width: "100%" }, 1500); $.ajax({ url: "/stt/handleaudio/", type: "POST", contentType: 'application/octet-stream', data: audioData, processData: false, headers: { 'X-CSRFTOKEN': csrftoken }, success: function(response){ $('#result').text(response); $('#progress-panel').hide(); }, error: function(response){ $('#result').text(response.responseText); $('#progress-panel').hide(); } }); } var openFile = function(event) { var input = event.target; var isValid = checkValidity(input.files[0]); if(!isValid){ displayError("Only wav file type allowed."); return; } var url = URL.createObjectURL(input.files[0]); var mt = document.createElement('audio'); audioData = input.files[0]; mt.controls = true; mt.src = url; $('#player')[0].innerHTML = ""; $('#player').append(mt); }; function checkValidity(file){ var isValid = false; var allowedFileTypes = ['audio/x-wav', 'audio/wav']; isValid = allowedFileTypes.includes(file.type); return isValid; } function displayError(errorMsg){ $('#error-panel').addClass('alert-danger'); $('#error-message').text(errorMsg); $('#error-panel').show(); } $(window).on('load',function(){ $("#file").val(""); $("#file").change(openFile); }); //})())
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions/pdt/pdtcompose.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Composes a PDT and an FST. #include <cstring> #include <memory> #include <string> #include <vector> #include <fst/flags.h> #include <fst/log.h> #include <fst/extensions/pdt/getters.h> #include <fst/extensions/pdt/pdtscript.h> #include <fst/util.h> DEFINE_string(pdt_parentheses, "", "PDT parenthesis label pairs"); DEFINE_bool(left_pdt, true, "Is the first argument the PDT?"); DEFINE_bool(connect, true, "Trim output?"); DEFINE_string(compose_filter, "paren", "Composition filter, one of: \"expand\", \"expand_paren\", " "\"paren\""); int main(int argc, char **argv) { namespace s = fst::script; using fst::ReadLabelPairs; using fst::PdtComposeFilter; using fst::PdtComposeOptions; using fst::script::FstClass; using fst::script::VectorFstClass; string usage = "Compose a PDT and an FST.\n\n Usage: "; usage += argv[0]; usage += " in.pdt in.fst [out.pdt]\n"; usage += " in.fst in.pdt [out.pdt]\n"; std::set_new_handler(FailedNewHandler); SET_FLAGS(usage.c_str(), &argc, &argv, true); if (argc < 3 || argc > 4) { ShowUsage(); return 1; } const string in1_name = strcmp(argv[1], "-") == 0 ? "" : argv[1]; const string in2_name = strcmp(argv[2], "-") == 0 ? "" : argv[2]; const string out_name = argc > 3 ? argv[3] : ""; if (in1_name.empty() && in2_name.empty()) { LOG(ERROR) << argv[0] << ": Can't take both inputs from standard input."; return 1; } std::unique_ptr<FstClass> ifst1(FstClass::Read(in1_name)); if (!ifst1) return 1; std::unique_ptr<FstClass> ifst2(FstClass::Read(in2_name)); if (!ifst2) return 1; if (FLAGS_pdt_parentheses.empty()) { LOG(ERROR) << argv[0] << ": No PDT parenthesis label pairs provided"; return 1; } std::vector<s::LabelPair> parens; if (!ReadLabelPairs(FLAGS_pdt_parentheses, &parens, false)) return 1; VectorFstClass ofst(ifst1->ArcType()); PdtComposeFilter compose_filter; if (!s::GetPdtComposeFilter(FLAGS_compose_filter, &compose_filter)) { LOG(ERROR) << argv[0] << ": Unknown or unsupported compose filter type: " << FLAGS_compose_filter; return 1; } const PdtComposeOptions copts(FLAGS_connect, compose_filter); s::PdtCompose(*ifst1, *ifst2, parens, &ofst, copts, FLAGS_left_pdt); ofst.Write(out_name); return 0; }
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/extensions/far/info.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #ifndef FST_EXTENSIONS_FAR_INFO_H_ #define FST_EXTENSIONS_FAR_INFO_H_ #include <iomanip> #include <memory> #include <set> #include <string> #include <utility> #include <vector> #include <fst/extensions/far/far.h> #include <fst/extensions/far/getters.h> namespace fst { template <class Arc> void AccumulateStatesAndArcs(const Fst<Arc> &fst, size_t *nstate, size_t *narc, size_t *nfinal) { for (StateIterator<Fst<Arc>> siter(fst); !siter.Done(); siter.Next(), ++(*nstate)) { ArcIterator<Fst<Arc>> aiter(fst, siter.Value()); for (; !aiter.Done(); aiter.Next(), ++(*narc)) { } if (fst.Final(siter.Value()) != Arc::Weight::Zero()) ++(*nfinal); } } struct KeyInfo { string key; string type; size_t nstate = 0; size_t narc = 0; size_t nfinal = 0; }; struct FarInfoData { std::vector<KeyInfo> key_infos; string far_type; string arc_type; size_t nfst = 0; size_t nstate = 0; size_t narc = 0; size_t nfinal = 0; std::set<string> fst_types; }; template <class Arc> void GetFarInfo(const std::vector<string> &filenames, const string &begin_key, const string &end_key, const bool list_fsts, FarInfoData *far_info) { *far_info = FarInfoData(); std::unique_ptr<FarReader<Arc>> reader(FarReader<Arc>::Open(filenames)); if (!reader) { LOG(ERROR) << "GetFarInfo: failed to create far reader."; return; } if (!begin_key.empty()) reader->Find(begin_key); for (; !reader->Done(); reader->Next()) { const auto &key = reader->GetKey(); if (!end_key.empty() && end_key < key) break; ++far_info->nfst; const auto *fst = reader->GetFst(); far_info->fst_types.insert(fst->Type()); if (list_fsts) { KeyInfo info; info.key = key; info.type = fst->Type(); AccumulateStatesAndArcs(*fst, &info.nstate, &info.narc, &info.nfinal); far_info->nstate += info.nstate; far_info->narc += info.narc; far_info->nfinal += info.nfinal; far_info->key_infos.push_back(info); } else { AccumulateStatesAndArcs(*fst, &far_info->nstate, &far_info->narc, &far_info->nfinal); } } far_info->far_type = GetFarTypeString(reader->Type()); far_info->arc_type = Arc::Type(); } template <class Arc> void FarInfo(const std::vector<string> &filenames, const string &begin_key, const string &end_key, const bool list_fsts) { FarInfoData info; GetFarInfo<Arc>(filenames, begin_key, end_key, list_fsts, &info); if (!list_fsts) { std::cout << std::left << std::setw(50) << "far type" << info.far_type << std::endl; std::cout << std::left << std::setw(50) << "arc type" << Arc::Type() << std::endl; std::cout << std::left << std::setw(50) << "fst type"; for (auto iter = info.fst_types.begin(); iter != info.fst_types.end(); ++iter) { if (iter != info.fst_types.begin()) std::cout << ","; std::cout << *iter; } std::cout << std::endl; std::cout << std::left << std::setw(50) << "# of FSTs" << info.nfst << std::endl; std::cout << std::left << std::setw(50) << "total # of states" << info.nstate << std::endl; std::cout << std::left << std::setw(50) << "total # of arcs" << info.narc << std::endl; std::cout << std::left << std::setw(50) << "total # of final states" << info.nfinal << std::endl; } else { // FIXME(kbg): Grok, then document this. int wkey = 10; int wtype = 10; int wnstate = 14; int wnarc = 12; int wnfinal = 20; for (const auto &key_info : info.key_infos) { if (key_info.key.size() + 2 > wkey) wkey = key_info.key.size() + 2; if (key_info.type.size() + 2 > wtype) wtype = key_info.type.size() + 2; if (ceil(log10(key_info.nstate)) + 2 > wnstate) { wnstate = ceil(log10(key_info.nstate)) + 2; } if (ceil(log10(key_info.narc)) + 2 > wnarc) { wnarc = ceil(log10(key_info.narc)) + 2; } if (ceil(log10(key_info.nfinal)) + 2 > wnfinal) { wnfinal = ceil(log10(key_info.nfinal)) + 2; } } std::cout << std::left << std::setw(wkey) << "key" << std::setw(wtype) << "type" << std::right << std::setw(wnstate) << "# of states" << std::setw(wnarc) << "# of arcs" << std::setw(wnfinal) << "# of final states" << std::endl; for (const auto &key_info : info.key_infos) { std::cout << std::left << std::setw(wkey) << key_info.key << std::setw(wtype) << key_info.type << std::right << std::setw(wnstate) << key_info.nstate << std::setw(wnarc) << key_info.narc << std::setw(wnfinal) << key_info.nfinal << std::endl; } } } } // namespace fst #endif // FST_EXTENSIONS_FAR_INFO_H_
0
coqui_public_repos
coqui_public_repos/coqpit/.pylintrc
[MAIN] # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Load and enable all available extensions. Use --list-extensions to see a list # all available extensions. #enable-all-extensions= # In error mode, messages with a category besides ERROR or FATAL are # suppressed, and no reports are done by default. Error mode is compatible with # disabling specific errors. #errors-only= # Always return a 0 (non-error) status code, even if lint errors are found. # This is primarily useful in continuous integration scripts. #exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-allow-list= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) extension-pkg-whitelist= # Return non-zero exit code if any of these messages/categories are detected, # even if score is above --fail-under value. Syntax same as enable. Messages # specified are enabled, while categories only check already-enabled messages. fail-on= # Specify a score threshold under which the program will exit with error. fail-under=10 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. #from-stdin= # Files or directories to be skipped. They should be base names, not paths. ignore=CVS # Add files or directories matching the regular expressions patterns to the # ignore-list. The regex matches against paths and can be in Posix or Windows # format. Because '\' represents the directory delimiter on Windows systems, it # can't be used as an escape character. ignore-paths= # Files or directories matching the regular expression patterns are skipped. # The regex matches against base names, not paths. The default value ignores # Emacs file locks ignore-patterns=^\.# # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis). It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use, and will cap the count on Windows to # avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or # complex, nested conditions. limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. py-version=3.9 # Discover python modules and packages in the file system subtree. recursive=no # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # In verbose mode, extra non-checker-related info will be displayed. #verbose= [BASIC] # Naming style matching correct argument names. argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- # naming-style. If left empty, argument names will be checked with the set # naming style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= # Bad variable names which should always be refused, separated by a comma. bad-names=foo, bar, baz, toto, tutu, tata # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused bad-names-rgxs= # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. If left empty, class attribute names will be checked # with the set naming style. #class-attribute-rgx= # Naming style matching correct class constant names. class-const-naming-style=UPPER_CASE # Regular expression matching correct class constant names. Overrides class- # const-naming-style. If left empty, class constant names will be checked with # the set naming style. #class-const-rgx= # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- # style. If left empty, class names will be checked with the set naming style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- # style. If left empty, constant names will be checked with the set naming # style. #const-rgx= # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Naming style matching correct function names. function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- # naming-style. If left empty, function names will be checked with the set # naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. good-names=i, j, k, ex, Run, _ # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted good-names-rgxs= # Include a hint for the correct naming format with invalid-name. include-naming-hint=no # Naming style matching correct inline iteration names. inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. If left empty, inline iteration names will be checked # with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. #typevar-rgx= # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- # naming-style. If left empty, variable names will be checked with the set # naming style. #variable-rgx= [CLASSES] # Warn about protected attribute access inside special methods check-protected-access-in-special-methods=no # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__, __new__, setUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict, _fields, _replace, _source, _make # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=cls [DESIGN] # List of regular expressions of class ancestor names to ignore when counting # public methods (see R0903) exclude-too-few-public-methods= # List of qualified class names to ignore when counting class parents (see # R0901) ignored-parents= # Maximum number of arguments for function / method. max-args=5 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. max-branches=12 # Maximum number of locals for function / method body. max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of return / yield for function / method body. max-returns=6 # Maximum number of statements in function / method body. max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [EXCEPTIONS] # Exceptions that will emit a warning when caught. overgeneral-exceptions=BaseException, Exception [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )?<?https?://\S+>?$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Maximum number of characters on a single line. max-line-length=100 # Maximum number of lines in a module. max-module-lines=1000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [IMPORTS] # List of modules that can be imported at any level, not just the top level # one. allow-any-import-level= # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Deprecated modules which should not be used, separated by a comma. deprecated-modules= # Output a graph (.gv or any supported image format) of external dependencies # to the given file (report RP0402 must not be disabled). ext-import-graph= # Output a graph (.gv or any supported image format) of all (i.e. internal and # external) dependencies to the given file (report RP0402 must not be # disabled). import-graph= # Output a graph (.gv or any supported image format) of internal dependencies # to the given file (report RP0402 must not be disabled). int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant # Couples of modules and preferred modules, separated by a comma. preferred-modules= [LOGGING] # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules=logging [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, # UNDEFINED. confidence=HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to # disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable=raw-checker-failed, bad-inline-option, locally-disabled, file-ignored, suppressed-message, useless-suppression, deprecated-pragma, use-symbolic-message-instead, line-too-long, missing-function-docstring, missing-class-docstring, missing-module-docstring, too-many-arguments, too-many-instance-attributes, useless-option-value, fixme, no-member, too-few-public-methods, arguments-differ, too-many-branches, duplicate-code, invalid-name, # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable=c-extension-no-member [METHOD_ARGS] # List of qualified names (i.e., library.method) which require a timeout # parameter e.g. 'requests.api.get,requests.api.post' timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME, XXX, TODO # Regular expression of note tags to take in consideration. notes-rgx= [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 # Complete name of functions that never returns. When checking for # inconsistent-return-statements if a never returning function is called then # it will be considered as an explicit return statement and no message will be # printed. never-returning-functions=sys.exit,argparse.parse_error [REPORTS] # Python expression which should return a score less than or equal to 10. You # have access to the variables 'fatal', 'error', 'warning', 'refactor', # 'convention', and 'info' which contain the number of messages in each # category, as well as 'statement' which is the total number of statements # analyzed. This score is used by the global evaluation report (RP0004). evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. msg-template= # Set the output format. Available formats are text, parseable, colorized, json # and msvs (visual studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= # Tells whether to display a full report or only the messages. reports=no # Activate the evaluation score. score=yes [SIMILARITIES] # Comments are removed from the similarity computation ignore-comments=yes # Docstrings are removed from the similarity computation ignore-docstrings=yes # Imports are removed from the similarity computation ignore-imports=yes # Signatures are removed from the similarity computation ignore-signatures=yes # Minimum lines number of a similarity. min-similarity-lines=4 [SPELLING] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 # Spelling dictionary name. Available dictionaries: none. To make it work, # install the 'python-enchant' package. spelling-dict= # List of comma separated words that should be considered directives if they # appear at the beginning of a comment and should not be checked. spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to the private dictionary (see the # --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no [STRING] # This flag controls whether inconsistent-quotes generates a warning when the # character used as a quote delimiter is used inconsistently within a module. check-quote-consistency=no # This flag controls whether the implicit-str-concat should generate a warning # on implicit string concatenation in sequences defined over several lines. check-str-concat-over-line-jumps=no [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of symbolic message names to ignore for Mixin members. ignored-checks-for-mixins=no-member, not-async-context-manager, not-context-manager, attribute-defined-outside-init # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 # Regex pattern to define which classes are considered mixins. mixin-class-rgx=.*[Mm]ixin # List of decorators that change the signature of a decorated function. signature-mutators= [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid defining new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of names allowed to shadow builtins allowed-redefined-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include
coqui_public_repos/inference-engine/third_party/openfst-1.6.9-win/src/include/fst/synchronize.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Synchronize an FST with bounded delay. #ifndef FST_SYNCHRONIZE_H_ #define FST_SYNCHRONIZE_H_ #include <algorithm> #include <string> #include <unordered_map> #include <unordered_set> #include <utility> #include <vector> #include <fst/cache.h> #include <fst/test-properties.h> namespace fst { using SynchronizeFstOptions = CacheOptions; namespace internal { // Implementation class for SynchronizeFst. // TODO(kbg,sorenj): Refactor to guarantee thread-safety. template <class Arc> class SynchronizeFstImpl : public CacheImpl<Arc> { public: using Label = typename Arc::Label; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using FstImpl<Arc>::SetType; using FstImpl<Arc>::SetProperties; using FstImpl<Arc>::SetInputSymbols; using FstImpl<Arc>::SetOutputSymbols; using CacheBaseImpl<CacheState<Arc>>::PushArc; using CacheBaseImpl<CacheState<Arc>>::HasArcs; using CacheBaseImpl<CacheState<Arc>>::HasFinal; using CacheBaseImpl<CacheState<Arc>>::HasStart; using CacheBaseImpl<CacheState<Arc>>::SetArcs; using CacheBaseImpl<CacheState<Arc>>::SetFinal; using CacheBaseImpl<CacheState<Arc>>::SetStart; using String = basic_string<Label>; struct Element { Element() {} Element(StateId state_, const String *i, const String *o) : state(state_), istring(i), ostring(o) {} StateId state; // Input state ID. const String *istring; // Residual input labels. const String *ostring; // Residual output labels. // Residual strings are represented by const pointers to // basic_string<Label> and are stored in a hash_set. The pointed // memory is owned by the hash_set string_set_. }; SynchronizeFstImpl(const Fst<Arc> &fst, const SynchronizeFstOptions &opts) : CacheImpl<Arc>(opts), fst_(fst.Copy()) { SetType("synchronize"); const auto props = fst.Properties(kFstProperties, false); SetProperties(SynchronizeProperties(props), kCopyProperties); SetInputSymbols(fst.InputSymbols()); SetOutputSymbols(fst.OutputSymbols()); } SynchronizeFstImpl(const SynchronizeFstImpl &impl) : CacheImpl<Arc>(impl), fst_(impl.fst_->Copy(true)) { SetType("synchronize"); SetProperties(impl.Properties(), kCopyProperties); SetInputSymbols(impl.InputSymbols()); SetOutputSymbols(impl.OutputSymbols()); } ~SynchronizeFstImpl() override { for (const auto *ptr : string_set_) delete ptr; } StateId Start() { if (!HasStart()) { auto start = fst_->Start(); if (start == kNoStateId) return kNoStateId; const auto *empty = FindString(new String()); start = FindState(Element(fst_->Start(), empty, empty)); SetStart(start); } return CacheImpl<Arc>::Start(); } Weight Final(StateId s) { if (!HasFinal(s)) { const auto &element = elements_[s]; const auto weight = element.state == kNoStateId ? Weight::One() : fst_->Final(element.state); if ((weight != Weight::Zero()) && (element.istring)->empty() && (element.ostring)->empty()) { SetFinal(s, weight); } else { SetFinal(s, Weight::Zero()); } } return CacheImpl<Arc>::Final(s); } size_t NumArcs(StateId s) { if (!HasArcs(s)) Expand(s); return CacheImpl<Arc>::NumArcs(s); } size_t NumInputEpsilons(StateId s) { if (!HasArcs(s)) Expand(s); return CacheImpl<Arc>::NumInputEpsilons(s); } size_t NumOutputEpsilons(StateId s) { if (!HasArcs(s)) Expand(s); return CacheImpl<Arc>::NumOutputEpsilons(s); } uint64_t Properties() const override { return Properties(kFstProperties); } // Sets error if found, returning other FST impl properties. uint64_t Properties(uint64_t mask) const override { if ((mask & kError) && fst_->Properties(kError, false)) { SetProperties(kError, kError); } return FstImpl<Arc>::Properties(mask); } void InitArcIterator(StateId s, ArcIteratorData<Arc> *data) { if (!HasArcs(s)) Expand(s); CacheImpl<Arc>::InitArcIterator(s, data); } // Returns the first character of the string obtained by concatenating the // string and the label. Label Car(const String *str, Label label = 0) const { if (!str->empty()) { return (*str)[0]; } else { return label; } } // Computes the residual string obtained by removing the first // character in the concatenation of the string and the label. const String *Cdr(const String *str, Label label = 0) { auto *r = new String(); for (size_t i = 1; i < str->size(); ++i) r->push_back((*str)[i]); if (label && !(str->empty())) r->push_back(label); return FindString(r); } // Computes the concatenation of the string and the label. const String *Concat(const String *str, Label label = 0) { auto *r = new String(); for (size_t i = 0; i < str->size(); ++i) r->push_back((*str)[i]); if (label) r->push_back(label); return FindString(r); } // Tests if the concatenation of the string and label is empty. bool Empty(const String *str, Label label = 0) const { if (str->empty()) { return label == 0; } else { return false; } } // Finds the string pointed by s in the hash set. Transfers the pointer // ownership to the hash set. const String *FindString(const String *str) { const auto insert_result = string_set_.insert(str); if (!insert_result.second) { delete str; } return *insert_result.first; } // Finds state corresponding to an element. Creates new state if element // is not found. StateId FindState(const Element &element) { const auto insert_result = element_map_.insert(std::make_pair(element, elements_.size())); if (insert_result.second) { elements_.push_back(element); } return insert_result.first->second; } // Computes the outgoing transitions from a state, creating new destination // states as needed. void Expand(StateId s) { const auto element = elements_[s]; if (element.state != kNoStateId) { for (ArcIterator<Fst<Arc>> aiter(*fst_, element.state); !aiter.Done(); aiter.Next()) { const auto &arc = aiter.Value(); if (!Empty(element.istring, arc.ilabel) && !Empty(element.ostring, arc.olabel)) { const auto *istring = Cdr(element.istring, arc.ilabel); const auto *ostring = Cdr(element.ostring, arc.olabel); PushArc(s, Arc(Car(element.istring, arc.ilabel), Car(element.ostring, arc.olabel), arc.weight, FindState(Element(arc.nextstate, istring, ostring)))); } else { const auto *istring = Concat(element.istring, arc.ilabel); const auto *ostring = Concat(element.ostring, arc.olabel); PushArc(s, Arc(0, 0, arc.weight, FindState(Element(arc.nextstate, istring, ostring)))); } } } const auto weight = element.state == kNoStateId ? Weight::One() : fst_->Final(element.state); if ((weight != Weight::Zero()) && ((element.istring)->size() + (element.ostring)->size() > 0)) { const auto *istring = Cdr(element.istring); const auto *ostring = Cdr(element.ostring); PushArc(s, Arc(Car(element.istring), Car(element.ostring), weight, FindState(Element(kNoStateId, istring, ostring)))); } SetArcs(s); } private: // Equality function for Elements; assumes strings have been hashed. class ElementEqual { public: bool operator()(const Element &x, const Element &y) const { return x.state == y.state && x.istring == y.istring && x.ostring == y.ostring; } }; // Hash function for Elements to FST states. class ElementKey { public: size_t operator()(const Element &x) const { size_t key = x.state; key = (key << 1) ^ (x.istring)->size(); for (size_t i = 0; i < (x.istring)->size(); ++i) { key = (key << 1) ^ (*x.istring)[i]; } key = (key << 1) ^ (x.ostring)->size(); for (size_t i = 0; i < (x.ostring)->size(); ++i) { key = (key << 1) ^ (*x.ostring)[i]; } return key; } }; // Equality function for strings. class StringEqual { public: bool operator()(const String *const &x, const String *const &y) const { if (x->size() != y->size()) return false; for (size_t i = 0; i < x->size(); ++i) { if ((*x)[i] != (*y)[i]) return false; } return true; } }; // Hash function for set of strings class StringKey { public: size_t operator()(const String *const &x) const { size_t key = x->size(); for (size_t i = 0; i < x->size(); ++i) key = (key << 1) ^ (*x)[i]; return key; } }; using ElementMap = std::unordered_map<Element, StateId, ElementKey, ElementEqual>; using StringSet = std::unordered_set<const String *, StringKey, StringEqual>; std::unique_ptr<const Fst<Arc>> fst_; std::vector<Element> elements_; // Maps FST state to Elements. ElementMap element_map_; // Maps Elements to FST state. StringSet string_set_; }; } // namespace internal // Synchronizes a transducer. This version is a delayed FST. The result is an // equivalent FST that has the property that during the traversal of a path, // the delay is either zero or strictly increasing, where the delay is the // difference between the number of non-epsilon output labels and input labels // along the path. // // For the algorithm to terminate, the input transducer must have bounded // delay, i.e., the delay of every cycle must be zero. // // Complexity: // // - A has bounded delay: exponential. // - A does not have bounded delay: does not terminate. // // For more information, see: // // Mohri, M. 2003. Edit-distance of weighted automata: General definitions and // algorithms. International Journal of Computer Science 14(6): 957-982. // // This class attaches interface to implementation and handles reference // counting, delegating most methods to ImplToFst. template <class A> class SynchronizeFst : public ImplToFst<internal::SynchronizeFstImpl<A>> { public: using Arc = A; using StateId = typename Arc::StateId; using Weight = typename Arc::Weight; using Store = DefaultCacheStore<Arc>; using State = typename Store::State; using Impl = internal::SynchronizeFstImpl<A>; friend class ArcIterator<SynchronizeFst<A>>; friend class StateIterator<SynchronizeFst<A>>; explicit SynchronizeFst( const Fst<A> &fst, const SynchronizeFstOptions &opts = SynchronizeFstOptions()) : ImplToFst<Impl>(std::make_shared<Impl>(fst, opts)) {} // See Fst<>::Copy() for doc. SynchronizeFst(const SynchronizeFst<Arc> &fst, bool safe = false) : ImplToFst<Impl>(fst, safe) {} // Gets a copy of this SynchronizeFst. See Fst<>::Copy() for further doc. SynchronizeFst<Arc> *Copy(bool safe = false) const override { return new SynchronizeFst<Arc>(*this, safe); } inline void InitStateIterator(StateIteratorData<Arc> *data) const override; void InitArcIterator(StateId s, ArcIteratorData<Arc> *data) const override { GetMutableImpl()->InitArcIterator(s, data); } private: using ImplToFst<Impl>::GetImpl; using ImplToFst<Impl>::GetMutableImpl; SynchronizeFst &operator=(const SynchronizeFst &) = delete; }; // Specialization for SynchronizeFst. template <class Arc> class StateIterator<SynchronizeFst<Arc>> : public CacheStateIterator<SynchronizeFst<Arc>> { public: explicit StateIterator(const SynchronizeFst<Arc> &fst) : CacheStateIterator<SynchronizeFst<Arc>>(fst, fst.GetMutableImpl()) {} }; // Specialization for SynchronizeFst. template <class Arc> class ArcIterator<SynchronizeFst<Arc>> : public CacheArcIterator<SynchronizeFst<Arc>> { public: using StateId = typename Arc::StateId; ArcIterator(const SynchronizeFst<Arc> &fst, StateId s) : CacheArcIterator<SynchronizeFst<Arc>>(fst.GetMutableImpl(), s) { if (!fst.GetImpl()->HasArcs(s)) fst.GetMutableImpl()->Expand(s); } }; template <class Arc> inline void SynchronizeFst<Arc>::InitStateIterator( StateIteratorData<Arc> *data) const { data->base = new StateIterator<SynchronizeFst<Arc>>(*this); } // Synchronizes a transducer. This version writes the synchronized result to a // MutableFst. The result will be an equivalent FST that has the property that // during the traversal of a path, the delay is either zero or strictly // increasing, where the delay is the difference between the number of // non-epsilon output labels and input labels along the path. // // For the algorithm to terminate, the input transducer must have bounded // delay, i.e., the delay of every cycle must be zero. // // Complexity: // // - A has bounded delay: exponential. // - A does not have bounded delay: does not terminate. // // For more information, see: // // Mohri, M. 2003. Edit-distance of weighted automata: General definitions and // algorithms. International Journal of Computer Science 14(6): 957-982. template <class Arc> void Synchronize(const Fst<Arc> &ifst, MutableFst<Arc> *ofst) { // Caches only the last state for fastest copy. const SynchronizeFstOptions opts(FLAGS_fst_default_cache_gc, 0); *ofst = SynchronizeFst<Arc>(ifst, opts); } } // namespace fst #endif // FST_SYNCHRONIZE_H_
0
coqui_public_repos/STT
coqui_public_repos/STT/taskcluster/test-electronjs_v7.0-darwin-amd64-opt.yml
build: template_file: test-darwin-opt-base.tyml dependencies: - "darwin-amd64-cpu-opt" - "test-training_16k-linux-amd64-py36m-opt" - "homebrew_tests-darwin-amd64" test_model_task: "test-training_16k-linux-amd64-py36m-opt" system_setup: > ${nodejs.brew.prep_12} args: tests_cmdline: "$TASKCLUSTER_TASK_DIR/DeepSpeech/ds/taskcluster/tc-electron-tests.sh 12.x 7.0.1 16k" metadata: name: "DeepSpeech OSX AMD64 CPU ElectronJS v7.0 tests" description: "Testing DeepSpeech for OSX/AMD64 on ElectronJS v7.0, CPU only, optimized version"
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/include/fst/interval-set.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Class to represent and operate on sets of intervals. #ifndef FST_INTERVAL_SET_H_ #define FST_INTERVAL_SET_H_ #include <algorithm> #include <iostream> #include <vector> #include <fst/util.h> namespace fst { // Half-open integral interval [a, b) of signed integers of type T. template <class T> struct IntInterval { T begin; T end; IntInterval() : begin(-1), end(-1) {} IntInterval(T begin, T end) : begin(begin), end(end) {} bool operator<(const IntInterval<T> &i) const { return begin < i.begin || (begin == i.begin && end > i.end); } bool operator==(const IntInterval<T> &i) const { return begin == i.begin && end == i.end; } bool operator!=(const IntInterval<T> &i) const { return begin != i.begin || end != i.end; } std::istream &Read(std::istream &strm) { T n; ReadType(strm, &n); begin = n; ReadType(strm, &n); end = n; return strm; } std::ostream &Write(std::ostream &strm) const { T n = begin; WriteType(strm, n); n = end; WriteType(strm, n); return strm; } }; // Stores IntIntervals<T> in a vector. In addition, keeps the count of points in // all intervals. template <class T> class VectorIntervalStore { public: using Interval = IntInterval<T>; using Iterator = typename std::vector<Interval>::const_iterator; VectorIntervalStore() : count_(-1) {} std::vector<Interval> *MutableIntervals() { return &intervals_; } const Interval *Intervals() const { return intervals_.data(); } T Size() const { return intervals_.size(); } T Count() const { return count_; } void SetCount(T count) { count_ = count; } void Clear() { intervals_.clear(); count_ = 0; } Iterator begin() const { return intervals_.begin(); } Iterator end() const { return intervals_.end(); } std::istream &Read(std::istream &strm) { ReadType(strm, &intervals_); return ReadType(strm, &count_); } std::ostream &Write(std::ostream &strm) const { WriteType(strm, intervals_); return WriteType(strm, count_); } private: std::vector<Interval> intervals_; T count_; }; // Stores and operates on a set of half-open integral intervals [a, b) // of signed integers of type T. template <class T, class Store = VectorIntervalStore<T>> class IntervalSet { public: using Interval = IntInterval<T>; template <class... A> explicit IntervalSet(A... args) : intervals_(args...) {} // Returns the interval set as a vector. std::vector<Interval> *MutableIntervals() { return intervals_.MutableIntervals(); } // Returns a pointer to an array of Size() elements. const Interval *Intervals() const { return intervals_.Intervals(); } bool Empty() const { return Size() == 0; } T Size() const { return intervals_.Size(); } // Number of points in the intervals (undefined if not normalized). T Count() const { return intervals_.Count(); } void Clear() { intervals_.Clear(); } // Adds an interval set to the set. The result may not be normalized. void Union(const IntervalSet<T, Store> &iset) { intervals_.MutableIntervals()->insert(intervals_.MutableIntervals()->end(), iset.intervals_.begin(), iset.intervals_.end()); } // Requires intervals be normalized. bool Member(T value) const { const Interval interval(value, value); auto lb = std::lower_bound(intervals_.begin(), intervals_.end(), interval); if (lb == intervals_.begin()) return false; return (--lb)->end > value; } // Requires intervals be normalized. bool operator==(const IntervalSet<T, Store> &iset) const { return Size() == iset.Size() && std::equal(intervals_.begin(), intervals_.end(), iset.intervals_.begin()); } // Requires intervals be normalized. bool operator!=(const IntervalSet<T, Store> &iset) const { return Size() != iset.Size() || !std::equal(intervals_.begin(), intervals_.end(), iset.intervals_.begin()); } bool Singleton() const { return Size() == 1 && intervals_.begin()->begin + 1 == intervals_.begin()->end; } // Sorts, collapses overlapping and adjacent interals, and sets count. void Normalize(); // Intersects an interval set with the set. Requires intervals be normalized. // The result is normalized. void Intersect(const IntervalSet<T, Store> &iset, IntervalSet<T, Store> *oset) const; // Complements the set w.r.t [0, maxval). Requires intervals be normalized. // The result is normalized. void Complement(T maxval, IntervalSet<T, Store> *oset) const; // Subtract an interval set from the set. Requires intervals be normalized. // The result is normalized. void Difference(const IntervalSet<T, Store> &iset, IntervalSet<T, Store> *oset) const; // Determines if an interval set overlaps with the set. Requires intervals be // normalized. bool Overlaps(const IntervalSet<T, Store> &iset) const; // Determines if an interval set overlaps with the set but neither is // contained in the other. Requires intervals be normalized. bool StrictlyOverlaps(const IntervalSet<T, Store> &iset) const; // Determines if an interval set is contained within the set. Requires // intervals be normalized. bool Contains(const IntervalSet<T, Store> &iset) const; std::istream &Read(std::istream &strm) { return intervals_.Read(strm); } std::ostream &Write(std::ostream &strm) const { return intervals_.Write(strm); } typename Store::Iterator begin() const { return intervals_.begin(); } typename Store::Iterator end() const { return intervals_.end(); } private: Store intervals_; }; // Sorts, collapses overlapping and adjacent intervals, and sets count. template <typename T, class Store> void IntervalSet<T, Store>::Normalize() { auto &intervals = *intervals_.MutableIntervals(); std::sort(intervals.begin(), intervals.end()); T count = 0; T size = 0; for (T i = 0; i < intervals.size(); ++i) { auto &inti = intervals[i]; if (inti.begin == inti.end) continue; for (T j = i + 1; j < intervals.size(); ++j) { auto &intj = intervals[j]; if (intj.begin > inti.end) break; if (intj.end > inti.end) inti.end = intj.end; ++i; } count += inti.end - inti.begin; intervals[size++] = inti; } intervals.resize(size); intervals_.SetCount(count); } // Intersects an interval set with the set. Requires intervals be normalized. // The result is normalized. template <typename T, class Store> void IntervalSet<T, Store>::Intersect(const IntervalSet<T, Store> &iset, IntervalSet<T, Store> *oset) const { auto *ointervals = oset->MutableIntervals(); auto it1 = intervals_.begin(); auto it2 = iset.intervals_.begin(); ointervals->clear(); T count = 0; while (it1 != intervals_.end() && it2 != iset.intervals_.end()) { if (it1->end <= it2->begin) { ++it1; } else if (it2->end <= it1->begin) { ++it2; } else { ointervals->emplace_back(std::max(it1->begin, it2->begin), std::min(it1->end, it2->end)); count += ointervals->back().end - ointervals->back().begin; if (it1->end < it2->end) { ++it1; } else { ++it2; } } } oset->intervals_.SetCount(count); } // Complements the set w.r.t [0, maxval). Requires intervals be normalized. // The result is normalized. template <typename T, class Store> void IntervalSet<T, Store>::Complement(T maxval, IntervalSet<T, Store> *oset) const { auto *ointervals = oset->MutableIntervals(); ointervals->clear(); T count = 0; Interval interval; interval.begin = 0; for (auto it = intervals_.begin(); it != intervals_.end(); ++it) { interval.end = std::min(it->begin, maxval); if ((interval.begin) < (interval.end)) { ointervals->push_back(interval); count += interval.end - interval.begin; } interval.begin = it->end; } interval.end = maxval; if ((interval.begin) < (interval.end)) { ointervals->push_back(interval); count += interval.end - interval.begin; } oset->intervals_.SetCount(count); } // Subtract an interval set from the set. Requires intervals be normalized. // The result is normalized. template <typename T, class Store> void IntervalSet<T, Store>::Difference(const IntervalSet<T, Store> &iset, IntervalSet<T, Store> *oset) const { if (Empty()) { oset->MutableIntervals()->clear(); oset->intervals_.SetCount(0); } else { IntervalSet<T, Store> cset; iset.Complement(intervals_.Intervals()[intervals_.Size() - 1].end, &cset); Intersect(cset, oset); } } // Determines if an interval set overlaps with the set. Requires intervals be // normalized. template <typename T, class Store> bool IntervalSet<T, Store>::Overlaps(const IntervalSet<T, Store> &iset) const { auto it1 = intervals_.begin(); auto it2 = iset.intervals_.begin(); while (it1 != intervals_.end() && it2 != iset.intervals_.end()) { if (it1->end <= it2->begin) { ++it1; } else if (it2->end <= it1->begin) { ++it2; } else { return true; } } return false; } // Determines if an interval set overlaps with the set but neither is contained // in the other. Requires intervals be normalized. template <typename T, class Store> bool IntervalSet<T, Store>::StrictlyOverlaps( const IntervalSet<T, Store> &iset) const { auto it1 = intervals_.begin(); auto it2 = iset.intervals_.begin(); bool only1 = false; // Point in intervals_ but not intervals. bool only2 = false; // Point in intervals but not intervals_. bool overlap = false; // Point in both intervals_ and intervals. while (it1 != intervals_.end() && it2 != iset.intervals_.end()) { if (it1->end <= it2->begin) { // no overlap - it1 first only1 = true; ++it1; } else if (it2->end <= it1->begin) { // no overlap - it2 first only2 = true; ++it2; } else if (it2->begin == it1->begin && it2->end == it1->end) { // equals overlap = true; ++it1; ++it2; } else if (it2->begin <= it1->begin && it2->end >= it1->end) { // 1 c 2 only2 = true; overlap = true; ++it1; } else if (it1->begin <= it2->begin && it1->end >= it2->end) { // 2 c 1 only1 = true; overlap = true; ++it2; } else { // Strict overlap. only1 = true; only2 = true; overlap = true; } if (only1 == true && only2 == true && overlap == true) return true; } if (it1 != intervals_.end()) only1 = true; if (it2 != iset.intervals_.end()) only2 = true; return only1 == true && only2 == true && overlap == true; } // Determines if an interval set is contained within the set. Requires intervals // be normalized. template <typename T, class Store> bool IntervalSet<T, Store>::Contains(const IntervalSet<T, Store> &iset) const { if (iset.Count() > Count()) return false; auto it1 = intervals_.begin(); auto it2 = iset.intervals_.begin(); while (it1 != intervals_.end() && it2 != iset.intervals_.end()) { if ((it1->end) <= (it2->begin)) { // No overlap; it1 first. ++it1; } else if ((it2->begin) < (it1->begin) || (it2->end) > (it1->end)) { // No C. return false; } else if (it2->end == it1->end) { ++it1; ++it2; } else { ++it2; } } return it2 == iset.intervals_.end(); } template <typename T, class Store> std::ostream &operator<<(std::ostream &strm, const IntervalSet<T, Store> &s) { strm << "{"; for (T i = 0; i < s.Size(); ++i) { if (i > 0) { strm << ","; } const auto &interval = s.Intervals()[i]; strm << "[" << interval.begin << "," << interval.end << ")"; } strm << "}"; return strm; } } // namespace fst #endif // FST_INTERVAL_SET_H_
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/extensions/python/Makefile.in
# Makefile.in generated by automake 1.15.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2017 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # NB: we use the Cython-generated .cc files rather than the *.pxd/.pyx sources # used to generate them. Consequently, modifications to the .pyx files will not # influence the build unless the .cc files are regenerated using Cython. VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src/extensions/python ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/ac_python_devel.m4 \ $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \ $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \ $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/include/fst/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pythondir)" LTLIBRARIES = $(python_LTLIBRARIES) am__DEPENDENCIES_1 = pywrapfst_la_DEPENDENCIES = ../far/libfstfarscript.la \ ../far/libfstfar.la ../../script/libfstscript.la \ ../../lib/libfst.la $(am__DEPENDENCIES_1) am_pywrapfst_la_OBJECTS = pywrapfst_la-pywrapfst.lo pywrapfst_la_OBJECTS = $(am_pywrapfst_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = pywrapfst_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(pywrapfst_la_LDFLAGS) $(LDFLAGS) -o $@ AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CXXFLAGS) $(CXXFLAGS) AM_V_CXX = $(am__v_CXX_@AM_V@) am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@) am__v_CXX_0 = @echo " CXX " $@; am__v_CXX_1 = CXXLD = $(CXX) CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CXXLD = $(am__v_CXXLD_@AM_V@) am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@) am__v_CXXLD_0 = @echo " CXXLD " $@; am__v_CXXLD_1 = SOURCES = $(pywrapfst_la_SOURCES) DIST_SOURCES = $(pywrapfst_la_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DL_LIBS = @DL_LIBS@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PYTHON = @PYTHON@ PYTHON_CPPFLAGS = @PYTHON_CPPFLAGS@ PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@ PYTHON_EXTRA_LDFLAGS = @PYTHON_EXTRA_LDFLAGS@ PYTHON_EXTRA_LIBS = @PYTHON_EXTRA_LIBS@ PYTHON_LDFLAGS = @PYTHON_LDFLAGS@ PYTHON_PLATFORM = @PYTHON_PLATFORM@ PYTHON_PREFIX = @PYTHON_PREFIX@ PYTHON_SITE_PKG = @PYTHON_SITE_PKG@ PYTHON_VERSION = @PYTHON_VERSION@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ libfstdir = @libfstdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgpyexecdir = @pkgpyexecdir@ pkgpythondir = @pkgpythondir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ pyexecdir = @pyexecdir@ pythondir = @pythondir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ python_LTLIBRARIES = pywrapfst.la pyexec_LTILIBRARIES = pywrapfst.la pywrapfst_la_SOURCES = pywrapfst.cc pywrapfst_la_CPPFLAGS = -I$(srcdir)/../../include $(PYTHON_CPPFLAGS) pywrapfst_la_LDFLAGS = $(PYTHON_LDFLAGS) -avoid-version -module pywrapfst_la_LIBADD = ../far/libfstfarscript.la ../far/libfstfar.la \ ../../script/libfstscript.la ../../lib/libfst.la \ -lm $(DL_LIBS) # Exports the *.pxd/*.pxd source files. EXTRA_DIST = basictypes.pxd fst.pxd ios.pxd memory.pxd pywrapfst.pxd \ pywrapfst.pyx all: all-am .SUFFIXES: .SUFFIXES: .cc .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/extensions/python/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign src/extensions/python/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): install-pythonLTLIBRARIES: $(python_LTLIBRARIES) @$(NORMAL_INSTALL) @list='$(python_LTLIBRARIES)'; test -n "$(pythondir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ list2="$$list2 $$p"; \ else :; fi; \ done; \ test -z "$$list2" || { \ echo " $(MKDIR_P) '$(DESTDIR)$(pythondir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pythondir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(pythondir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(pythondir)"; \ } uninstall-pythonLTLIBRARIES: @$(NORMAL_UNINSTALL) @list='$(python_LTLIBRARIES)'; test -n "$(pythondir)" || list=; \ for p in $$list; do \ $(am__strip_dir) \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(pythondir)/$$f'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(pythondir)/$$f"; \ done clean-pythonLTLIBRARIES: -test -z "$(python_LTLIBRARIES)" || rm -f $(python_LTLIBRARIES) @list='$(python_LTLIBRARIES)'; \ locs=`for p in $$list; do echo $$p; done | \ sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ sort -u`; \ test -z "$$locs" || { \ echo rm -f $${locs}; \ rm -f $${locs}; \ } pywrapfst.la: $(pywrapfst_la_OBJECTS) $(pywrapfst_la_DEPENDENCIES) $(EXTRA_pywrapfst_la_DEPENDENCIES) $(AM_V_CXXLD)$(pywrapfst_la_LINK) -rpath $(pythondir) $(pywrapfst_la_OBJECTS) $(pywrapfst_la_LIBADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pywrapfst_la-pywrapfst.Plo@am__quote@ .cc.o: @am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\ @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $< .cc.obj: @am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\ @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ `$(CYGPATH_W) '$<'` &&\ @am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cc.lo: @am__fastdepCXX_TRUE@ $(AM_V_CXX)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\ @am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepCXX_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $< pywrapfst_la-pywrapfst.lo: pywrapfst.cc @am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(pywrapfst_la_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT pywrapfst_la-pywrapfst.lo -MD -MP -MF $(DEPDIR)/pywrapfst_la-pywrapfst.Tpo -c -o pywrapfst_la-pywrapfst.lo `test -f 'pywrapfst.cc' || echo '$(srcdir)/'`pywrapfst.cc @am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/pywrapfst_la-pywrapfst.Tpo $(DEPDIR)/pywrapfst_la-pywrapfst.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='pywrapfst.cc' object='pywrapfst_la-pywrapfst.lo' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(pywrapfst_la_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o pywrapfst_la-pywrapfst.lo `test -f 'pywrapfst.cc' || echo '$(srcdir)/'`pywrapfst.cc mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(LTLIBRARIES) installdirs: for dir in "$(DESTDIR)$(pythondir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-pythonLTLIBRARIES \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-pythonLTLIBRARIES install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-pythonLTLIBRARIES .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-pythonLTLIBRARIES cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-pythonLTLIBRARIES \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \ uninstall-pythonLTLIBRARIES .PRECIOUS: Makefile # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT:
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.7/src/extensions/far/Makefile.am
AM_CPPFLAGS = -I$(srcdir)/../../include $(ICU_CPPFLAGS) if HAVE_SCRIPT lib_LTLIBRARIES = libfstfar.la libfstfarscript.la else lib_LTLIBRARIES = libfstfar.la endif libfstfar_la_SOURCES = sttable.cc stlist.cc libfstfar_la_LDFLAGS = -version-info 10:0:0 libfstfar_la_LIBADD = ../../lib/libfst.la -lm $(DL_LIBS) if HAVE_SCRIPT libfstfarscript_la_SOURCES = far-class.cc farscript.cc getters.cc script-impl.cc \ strings.cc libfstfarscript_la_LDFLAGS = -version-info 10:0:0 libfstfarscript_la_LIBADD = \ libfstfar.la ../../script/libfstscript.la \ ../../lib/libfst.la -lm $(DL_LIBS) endif if HAVE_BIN bin_PROGRAMS = farcompilestrings farcreate farequal farextract farinfo \ farisomorphic farprintstrings LDADD = libfstfarscript.la ../../script/libfstscript.la \ ../../lib/libfst.la -lm $(DL_LIBS) farcompilestrings_SOURCES = farcompilestrings.cc farcreate_SOURCES = farcreate.cc farequal_SOURCES = farequal.cc farextract_SOURCES = farextract.cc farinfo_SOURCES = farinfo.cc farisomorphic_SOURCES = farisomorphic.cc farprintstrings_SOURCES = farprintstrings.cc endif
0
coqui_public_repos/TTS/tests
coqui_public_repos/TTS/tests/vocoder_tests/test_vocoder_gan_datasets.py
import os import numpy as np from torch.utils.data import DataLoader from tests import get_tests_output_path, get_tests_path from TTS.utils.audio import AudioProcessor from TTS.vocoder.configs import BaseGANVocoderConfig from TTS.vocoder.datasets.gan_dataset import GANDataset from TTS.vocoder.datasets.preprocess import load_wav_data file_path = os.path.dirname(os.path.realpath(__file__)) OUTPATH = os.path.join(get_tests_output_path(), "loader_tests/") os.makedirs(OUTPATH, exist_ok=True) C = BaseGANVocoderConfig() test_data_path = os.path.join(get_tests_path(), "data/ljspeech/") ok_ljspeech = os.path.exists(test_data_path) def gan_dataset_case( batch_size, seq_len, hop_len, conv_pad, return_pairs, return_segments, use_noise_augment, use_cache, num_workers ): """Run dataloader with given parameters and check conditions""" ap = AudioProcessor(**C.audio) _, train_items = load_wav_data(test_data_path, 10) dataset = GANDataset( ap, train_items, seq_len=seq_len, hop_len=hop_len, pad_short=2000, conv_pad=conv_pad, return_pairs=return_pairs, return_segments=return_segments, use_noise_augment=use_noise_augment, use_cache=use_cache, ) loader = DataLoader( dataset=dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, drop_last=True ) max_iter = 10 count_iter = 0 def check_item(feat, wav): """Pass a single pair of features and waveform""" feat = feat.numpy() wav = wav.numpy() expected_feat_shape = (batch_size, ap.num_mels, seq_len // hop_len + conv_pad * 2) # check shapes assert np.all(feat.shape == expected_feat_shape), f" [!] {feat.shape} vs {expected_feat_shape}" assert (feat.shape[2] - conv_pad * 2) * hop_len == wav.shape[2] # check feature vs audio match if not use_noise_augment: for idx in range(batch_size): audio = wav[idx].squeeze() feat = feat[idx] mel = ap.melspectrogram(audio) # the first 2 and the last 2 frames are skipped due to the padding # differences in stft max_diff = abs((feat - mel[:, : feat.shape[-1]])[:, 2:-2]).max() assert max_diff <= 1e-6, f" [!] {max_diff}" # return random segments or return the whole audio if return_segments: if return_pairs: for item1, item2 in loader: feat1, wav1 = item1 feat2, wav2 = item2 check_item(feat1, wav1) check_item(feat2, wav2) count_iter += 1 else: for item1 in loader: feat1, wav1 = item1 check_item(feat1, wav1) count_iter += 1 else: for item in loader: feat, wav = item expected_feat_shape = (batch_size, ap.num_mels, (wav.shape[-1] // hop_len) + (conv_pad * 2)) assert np.all(feat.shape == expected_feat_shape), f" [!] {feat.shape} vs {expected_feat_shape}" assert (feat.shape[2] - conv_pad * 2) * hop_len == wav.shape[2] count_iter += 1 if count_iter == max_iter: break def test_parametrized_gan_dataset(): """test dataloader with different parameters""" params = [ [32, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, True, True, False, True, 0], [32, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, True, True, False, True, 4], [1, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, True, True, True, True, 0], [1, C.audio["hop_length"], C.audio["hop_length"], 0, True, True, True, True, 0], [1, C.audio["hop_length"] * 10, C.audio["hop_length"], 2, True, True, True, True, 0], [1, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, True, False, True, True, 0], [1, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, True, True, False, True, 0], [1, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, False, True, True, False, 0], [1, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, True, False, False, False, 0], [1, C.audio["hop_length"] * 10, C.audio["hop_length"], 0, True, False, False, False, 0], ] for param in params: print(param) gan_dataset_case(*param)
0
coqui_public_repos/inference-engine/third_party
coqui_public_repos/inference-engine/third_party/onnxruntime/GIT_COMMIT_ID
d4106deeb65c21eed3ed40df149efefeb72fe9a4
0
coqui_public_repos
coqui_public_repos/STT/LICENSE
Mozilla Public License Version 2.0 ================================== 1. Definitions -------------- 1.1. "Contributor" means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 1.2. "Contributor Version" means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor's Contribution. 1.3. "Contribution" means Covered Software of a particular Contributor. 1.4. "Covered Software" means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 1.5. "Incompatible With Secondary Licenses" means (a) that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or (b) that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 1.6. "Executable Form" means any form of the work other than Source Code Form. 1.7. "Larger Work" means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 1.8. "License" means this document. 1.9. "Licensable" means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 1.10. "Modifications" means any of the following: (a) any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or (b) any new file in Source Code Form that contains any Covered Software. 1.11. "Patent Claims" of a Contributor means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 1.12. "Secondary License" means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 1.13. "Source Code Form" means the form of the work preferred for making modifications. 1.14. "You" (or "Your") means an individual or a legal entity exercising rights under this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, "control" means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 2. License Grants and Conditions -------------------------------- 2.1. Grants Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: (a) under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and (b) under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 2.2. Effective Date The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 2.3. Limitations on Grant Scope The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: (a) for any code that a Contributor has removed from Covered Software; or (b) for infringements caused by: (i) Your and any other third party's modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or (c) under Patent Claims infringed by Covered Software in the absence of its Contributions. This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 2.4. Subsequent Licenses No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 2.5. Representation Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 2.6. Fair Use This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 2.7. Conditions Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 3. Responsibilities ------------------- 3.1. Distribution of Source Form All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients' rights in the Source Code Form. 3.2. Distribution of Executable Form If You distribute Covered Software in Executable Form then: (a) such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and (b) You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients' rights in the Source Code Form under this License. 3.3. Distribution of a Larger Work You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 3.4. Notices You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 3.5. Application of Additional Terms You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 4. Inability to Comply Due to Statute or Regulation --------------------------------------------------- If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 5. Termination -------------- 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. ************************************************************************ * * * 6. Disclaimer of Warranty * * ------------------------- * * * * Covered Software is provided under this License on an "as is" * * basis, without warranty of any kind, either expressed, implied, or * * statutory, including, without limitation, warranties that the * * Covered Software is free of defects, merchantable, fit for a * * particular purpose or non-infringing. The entire risk as to the * * quality and performance of the Covered Software is with You. * * Should any Covered Software prove defective in any respect, You * * (not any Contributor) assume the cost of any necessary servicing, * * repair, or correction. This disclaimer of warranty constitutes an * * essential part of this License. No use of any Covered Software is * * authorized under this License except under this disclaimer. * * * ************************************************************************ ************************************************************************ * * * 7. Limitation of Liability * * -------------------------- * * * * Under no circumstances and under no legal theory, whether tort * * (including negligence), contract, or otherwise, shall any * * Contributor, or anyone who distributes Covered Software as * * permitted above, be liable to You for any direct, indirect, * * special, incidental, or consequential damages of any character * * including, without limitation, damages for lost profits, loss of * * goodwill, work stoppage, computer failure or malfunction, or any * * and all other commercial damages or losses, even if such party * * shall have been informed of the possibility of such damages. This * * limitation of liability shall not apply to liability for death or * * personal injury resulting from such party's negligence to the * * extent applicable law prohibits such limitation. Some * * jurisdictions do not allow the exclusion or limitation of * * incidental or consequential damages, so this exclusion and * * limitation may not apply to You. * * * ************************************************************************ 8. Litigation ------------- Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party's ability to bring cross-claims or counter-claims. 9. Miscellaneous ---------------- This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 10. Versions of the License --------------------------- 10.1. New Versions Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 10.2. Effect of New Versions You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 10.3. Modified Versions If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. Exhibit A - Source Code Form License Notice ------------------------------------------- This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. You may add additional accurate notices of copyright ownership. Exhibit B - "Incompatible With Secondary Licenses" Notice --------------------------------------------------------- This Source Code Form is "Incompatible With Secondary Licenses", as defined by the Mozilla Public License, v. 2.0.
0
coqui_public_repos
coqui_public_repos/snakepit/package.json
{ "name": "snakepit", "version": "0.0.1", "description": "Machine learning job scheduler", "bin": { "pit": "src/service.js" }, "scripts": { "start": "node src/service.js", "postinstall": "node node_modules/pegjs/bin/pegjs src/clusterParser.pegjs" }, "repository": { "type": "git", "url": "git+https://github.com/mozilla/snakepit.git" }, "keywords": [ "machine", "learning", "job", "scheduler", "daemon", "cli", "cuda", "gpu", "cluster", "worker", "node", "js" ], "author": "Tilman Kamp", "license": "MPL-2.0", "bugs": { "url": "https://github.com/mozilla/snakepit/issues" }, "homepage": "https://github.com/mozilla/snakepit#readme", "dependencies": { "assign-deep": "^1.0.1", "async": "^2.6.1", "async-parallel": "^1.2.3", "axios": "^0.18.1", "bcrypt": "^3.0.6", "body-parser": "^1.18.3", "buffer-serializer": "^1.1.0", "commander": "^2.14.1", "express": "^4.16.2", "express-promise-router": "^3.0.3", "fs-extra": "^7.0.1", "js-yaml": "^3.12.0", "jsonwebtoken": "^8.2.2", "morgan": "^1.9.0", "multi-integer-range": "^4.0.4", "parse-duration": "^0.1.1", "pegjs": "^0.10.0", "pg": "^7.7.1", "range-parser": "^1.2.0", "sequelize": ">=5.3.0", "tail": "^2.0.2", "uuid": "^8.3.0", "ws": "^6.2.0" } }
0
coqui_public_repos/STT/native_client/dotnet
coqui_public_repos/STT/native_client/dotnet/STTWPF/STT.WPF.sln
 Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.2.32630.192 MinimumVisualStudioVersion = 10.0.40219.1 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "STT.WPF", "STT.WPF.csproj", "{54BFD766-4305-4F4C-BA59-AF45505DF3C1}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "STTClient", "..\STTClient\STTClient.csproj", "{56DE4091-BBBE-47E4-852D-7268B33B971F}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU Debug|x64 = Debug|x64 Release|Any CPU = Release|Any CPU Release|x64 = Release|x64 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Debug|Any CPU.ActiveCfg = Debug|x64 {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Debug|Any CPU.Build.0 = Debug|x64 {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Debug|x64.ActiveCfg = Debug|x64 {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Debug|x64.Build.0 = Debug|x64 {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Release|Any CPU.ActiveCfg = Release|x64 {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Release|Any CPU.Build.0 = Release|x64 {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Release|x64.ActiveCfg = Release|x64 {54BFD766-4305-4F4C-BA59-AF45505DF3C1}.Release|x64.Build.0 = Release|x64 {56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|Any CPU.Build.0 = Debug|Any CPU {56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|x64.ActiveCfg = Debug|Any CPU {56DE4091-BBBE-47E4-852D-7268B33B971F}.Debug|x64.Build.0 = Debug|Any CPU {56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|Any CPU.ActiveCfg = Release|Any CPU {56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|Any CPU.Build.0 = Release|Any CPU {56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|x64.ActiveCfg = Release|x64 {56DE4091-BBBE-47E4-852D-7268B33B971F}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {19C58802-CCEC-4FD1-8D17-A6EB766116F7} EndGlobalSection EndGlobal
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/script/prune.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #ifndef FST_SCRIPT_PRUNE_H_ #define FST_SCRIPT_PRUNE_H_ #include <tuple> #include <utility> #include <fst/prune.h> #include <fst/script/fst-class.h> #include <fst/script/weight-class.h> namespace fst { namespace script { using PruneArgs1 = std::tuple<const FstClass &, MutableFstClass *, const WeightClass &, int64, float>; template <class Arc> void Prune(PruneArgs1 *args) { using Weight = typename Arc::Weight; const Fst<Arc> &ifst = *(std::get<0>(*args).GetFst<Arc>()); MutableFst<Arc> *ofst = std::get<1>(*args)->GetMutableFst<Arc>(); const auto weight_threshold = *(std::get<2>(*args).GetWeight<Weight>()); Prune(ifst, ofst, weight_threshold, std::get<3>(*args), std::get<4>(*args)); } using PruneArgs2 = std::tuple<MutableFstClass *, const WeightClass &, int64, float>; template <class Arc> void Prune(PruneArgs2 *args) { using Weight = typename Arc::Weight; MutableFst<Arc> *fst = std::get<0>(*args)->GetMutableFst<Arc>(); const auto weight_threshold = *(std::get<1>(*args).GetWeight<Weight>()); Prune(fst, weight_threshold, std::get<2>(*args), std::get<3>(*args)); } void Prune(const FstClass &ifst, MutableFstClass *ofst, const WeightClass &weight_threshold, int64 state_threshold = kNoStateId, float delta = kDelta); void Prune(MutableFstClass *fst, const WeightClass &weight_threshold, int64 state_threshold = kNoStateId, float delta = kDelta); } // namespace script } // namespace fst #endif // FST_SCRIPT_PRUNE_H_
0
coqui_public_repos/STT-examples/uwp
coqui_public_repos/STT-examples/uwp/STTUWP/App.xaml.cs
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices.WindowsRuntime; using Windows.ApplicationModel; using Windows.ApplicationModel.Activation; using Windows.Foundation; using Windows.Foundation.Collections; using Windows.UI.ViewManagement; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Controls.Primitives; using Windows.UI.Xaml.Data; using Windows.UI.Xaml.Input; using Windows.UI.Xaml.Media; using Windows.UI.Xaml.Navigation; namespace STTUWP { /// <summary> /// Provides application-specific behavior to supplement the default Application class. /// </summary> sealed partial class App : Application { /// <summary> /// Initializes the singleton application object. This is the first line of authored code /// executed, and as such is the logical equivalent of main() or WinMain(). /// </summary> public App() { this.InitializeComponent(); this.Suspending += OnSuspending; } /// <summary> /// Invoked when the application is launched normally by the end user. Other entry points /// will be used such as when the application is launched to open a specific file. /// </summary> /// <param name="e">Details about the launch request and process.</param> protected override void OnLaunched(LaunchActivatedEventArgs e) { Frame rootFrame = Window.Current.Content as Frame; // Do not repeat app initialization when the Window already has content, // just ensure that the window is active if (rootFrame == null) { // Create a Frame to act as the navigation context and navigate to the first page rootFrame = new Frame(); rootFrame.NavigationFailed += OnNavigationFailed; if (e.PreviousExecutionState == ApplicationExecutionState.Terminated) { //TODO: Load state from previously suspended application } // Place the frame in the current Window Window.Current.Content = rootFrame; } if (e.PrelaunchActivated == false) { if (rootFrame.Content == null) { // When the navigation stack isn't restored navigate to the first page, // configuring the new page by passing required information as a navigation // parameter rootFrame.Navigate(typeof(MainPage), e.Arguments); } ApplicationView.PreferredLaunchViewSize = new Size(760, 640); ApplicationView.PreferredLaunchWindowingMode = ApplicationViewWindowingMode.PreferredLaunchViewSize; // Ensure the current window is active Window.Current.Activate(); } } /// <summary> /// Invoked when Navigation to a certain page fails /// </summary> /// <param name="sender">The Frame which failed navigation</param> /// <param name="e">Details about the navigation failure</param> void OnNavigationFailed(object sender, NavigationFailedEventArgs e) { throw new Exception("Failed to load Page " + e.SourcePageType.FullName); } /// <summary> /// Invoked when application execution is being suspended. Application state is saved /// without knowing whether the application will be terminated or resumed with the contents /// of memory still intact. /// </summary> /// <param name="sender">The source of the suspend request.</param> /// <param name="e">Details about the suspend request.</param> private void OnSuspending(object sender, SuspendingEventArgs e) { var deferral = e.SuspendingOperation.GetDeferral(); //TODO: Save application state and stop any background activity deferral.Complete(); } } }
0
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include
coqui_public_repos/STT/native_client/ctcdecode/third_party/openfst-1.6.9-win/src/include/fst/expectation-weight.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. // // Expectation semiring as described by Jason Eisner: // See: doi=10.1.1.22.9398 // Multiplex semiring operations and identities: // One: <One, Zero> // Zero: <Zero, Zero> // Plus: <a1, b1> + <a2, b2> = < (a1 + a2) , (b1 + b2) > // Times: <a1, b1> * <a2, b2> = < (a1 * a2) , [(a1 * b2) + (a2 * b1)] > // Division: Undefined (currently) // // Usually used to store the pair <probability, random_variable> so that // ShortestDistance[Fst<ArcTpl<ExpectationWeight<P, V>>>] // == < PosteriorProbability, Expected_Value[V] > #ifndef FST_EXPECTATION_WEIGHT_H_ #define FST_EXPECTATION_WEIGHT_H_ #include <string> #include <fst/log.h> #include <fst/pair-weight.h> #include <fst/product-weight.h> namespace fst { // X1 is usually a probability weight like LogWeight. // X2 is usually a random variable or vector (see SignedLogWeight or // SparsePowerWeight). // // If X1 is distinct from X2, it is required that there is an external product // between X1 and X2 and if both semriring are commutative, or left or right // semirings, then result must have those properties. template <class X1, class X2> class ExpectationWeight : public PairWeight<X1, X2> { public: using PairWeight<X1, X2>::Value1; using PairWeight<X1, X2>::Value2; using PairWeight<X1, X2>::Reverse; using PairWeight<X1, X2>::Quantize; using PairWeight<X1, X2>::Member; using ReverseWeight = ExpectationWeight<typename X1::ReverseWeight, typename X2::ReverseWeight>; ExpectationWeight() : PairWeight<X1, X2>(Zero()) {} ExpectationWeight(const ExpectationWeight &weight) : PairWeight<X1, X2>(weight) {} explicit ExpectationWeight(const PairWeight<X1, X2> &weight) : PairWeight<X1, X2>(weight) {} ExpectationWeight(const X1 &x1, const X2 &x2) : PairWeight<X1, X2>(x1, x2) {} static const ExpectationWeight &Zero() { static const ExpectationWeight zero(X1::Zero(), X2::Zero()); return zero; } static const ExpectationWeight &One() { static const ExpectationWeight one(X1::One(), X2::Zero()); return one; } static const ExpectationWeight &NoWeight() { static const ExpectationWeight no_weight(X1::NoWeight(), X2::NoWeight()); return no_weight; } static const string &Type() { static const string *const type = new string("expectation_" + X1::Type() + "_" + X2::Type()); return *type; } PairWeight<X1, X2> Quantize(float delta = kDelta) const { return ExpectationWeight(PairWeight<X1, X2>::Quantize()); } ReverseWeight Reverse() const { return ReverseWeight(PairWeight<X1, X2>::Reverse()); } bool Member() const { return PairWeight<X1, X2>::Member(); } static constexpr uint64_t Properties() { return X1::Properties() & X2::Properties() & (kLeftSemiring | kRightSemiring | kCommutative | kIdempotent); } }; template <class X1, class X2> inline ExpectationWeight<X1, X2> Plus(const ExpectationWeight<X1, X2> &w1, const ExpectationWeight<X1, X2> &w2) { return ExpectationWeight<X1, X2>(Plus(w1.Value1(), w2.Value1()), Plus(w1.Value2(), w2.Value2())); } template <class X1, class X2> inline ExpectationWeight<X1, X2> Times(const ExpectationWeight<X1, X2> &w1, const ExpectationWeight<X1, X2> &w2) { return ExpectationWeight<X1, X2>( Times(w1.Value1(), w2.Value1()), Plus(Times(w1.Value1(), w2.Value2()), Times(w1.Value2(), w2.Value1()))); } template <class X1, class X2> inline ExpectationWeight<X1, X2> Divide(const ExpectationWeight<X1, X2> &w1, const ExpectationWeight<X1, X2> &w2, DivideType typ = DIVIDE_ANY) { FSTERROR() << "ExpectationWeight::Divide: Not implemented"; return ExpectationWeight<X1, X2>::NoWeight(); } // This function object generates weights by calling the underlying generators // for the template weight types, like all other pair weight types. This is // intended primarily for testing. template <class X1, class X2> class WeightGenerate<ExpectationWeight<X1, X2>> : public WeightGenerate<PairWeight<X1, X2>> { public: using Weight = ExpectationWeight<X1, X2>; using Generate = WeightGenerate<PairWeight<X1, X2>>; explicit WeightGenerate(bool allow_zero = true) : Generate(allow_zero) {} Weight operator()() const { return Weight(Generate::operator()()); } }; } // namespace fst #endif // FST_EXPECTATION_WEIGHT_H_
0
coqui_public_repos
coqui_public_repos/STT/GRAPH_VERSION
6
0
coqui_public_repos/snakepit-client
coqui_public_repos/snakepit-client/src/pit.js
#! /usr/bin/env node const fs = require('fs-extra') const os = require('os') const net = require('net') const url = require('url') const tmp = require('tmp') const path = require('path') const zlib = require('zlib') const tar = require('tar-fs') const ignore = require('ignore') const WebSocket = require('ws') const websocket = require('websocket-stream') const request = require('request') const filesize = require('filesize') const program = require('commander') const multiplex = require('multiplex') const ProgressBar = require('progress') const randomstring = require("randomstring") const readlineSync = require('readline-sync') const { spawn, execFileSync } = require('child_process') const USER_FILE = '.pituser.txt' const CONNECT_FILE = '.pitconnect.txt' const REQUEST_FILE = '.pitrequest.txt' const JOB_ARCHIVE = '.job-upload.tar.gz' const JOB_COMMAND = '.job-upload.json' const githubGitPrefix = 'git@github.com:' const githubHttpsPrefix = 'https://github.com/' var globalunmount function fail(message) { console.error('Command failed: ' + message) process.exit(1) } function promptUserInfo(user) { user = user || {} if (!user.fullname) { user.fullname = readlineSync.question('Full name: ') } if (!user.email) { user.email = readlineSync.questionEMail('E-Mail address: ') } if (!user.password) { user.password = readlineSync.questionNewPassword('New password: ') } return user } function promptNodeInfo(node) { node = node || {} if (!node.endpoint) { node.endpoint = readlineSync.question('LXD endpoint: ') } if (!node.password) { node.password = readlineSync.question('LXD endpoint password: ', { hideEchoBack: true }) } return node } function promptAliasInfo(alias) { alias = alias || {} if (!alias.name) { alias.name = readlineSync.question('Exact resource name: ') } return alias } function promptGroupInfo(group) { group = group || {} if (!group.title) { group.title = readlineSync.question('Group title: ') } return group } function callPit(verb, resource, content, callback, callOptions) { if (content instanceof Function) { callOptions = callback callback = content content = undefined } var connectFile = CONNECT_FILE if(!fs.existsSync(connectFile)) { connectFile = path.join(os.homedir(), connectFile) if(!fs.existsSync(connectFile)) { console.error('Unable to find connectivity info about your pit.') console.error( 'If you know your pit\'s URL, ' + 'use "pit connect <URL>" to configure the connection.' ) console.error( 'If your pit admin provided a "' + CONNECT_FILE + '" file, place it either in your home directory' + '(as default pit) or the (overruling) project root.' ) process.exit(1) } } var connectContent = fs.readFileSync(connectFile, 'utf-8').split('\n') var pitUrl = connectContent[0] connectContent.shift() var agentOptions = null if (connectContent.length > 0) { agentOptions = { ca: connectContent.join('\n') } } var userFile = USER_FILE var username var token = '' function sendRequest(verb, resource, content, callback, callOptions) { if (content instanceof Function) { callOptions = callback callback = content content = undefined } let headers = { 'X-Auth-Token': token, 'Content-Type': 'application/json' } if (callOptions && callOptions.offset) { headers['Range'] = 'bytes=' + callOptions.offset + '-' } if (callOptions && callOptions.headers) { headers = Object.assign(headers, callOptions.headers) } let creqoptions = { url: pitUrl + '/' + resource, agentOptions: agentOptions, headers: headers } if (content && (typeof content.pipe != 'function')) { creqoptions.body = JSON.stringify(content) } let creq = request[verb](creqoptions) .on('error', err => fail('Unable to reach pit: ' + err.code)) .on('response', res => { if (res.statusCode === 401) { var password = readlineSync.question('Please enter password: ', { hideEchoBack: true }) authenticate( username, password, () => sendRequest(verb, resource, content, callback, callOptions) ) } else if (callOptions && callOptions.asStream) { callback(res.statusCode, creq) } else { let chunks = [] creq.on('data', chunk => chunks.push(chunk)) creq.on('end', () => { let body = Buffer.concat(chunks) let contentType = res.headers['content-type'] if (contentType && contentType.startsWith('application/json')) { try { body = JSON.parse(body.toString()) } catch (ex) { fail('Problem parsing pit response.') } } callback(res.statusCode, body) }) } }) if (content && (typeof content.pipe == 'function')) { content.pipe(creq) } } function authenticate(username, password, callback) { sendRequest('post', 'users/' + username + '/authenticate', { password: password }, function(code, body) { if (code == 200) { token = body.token fs.writeFile(userFile, username + '\n' + token, { mode: parseInt('600', 8) }, function(err) { if(err) { console.error('Unable to store user info: ' + err) process.exit(1) } else { if (callback instanceof Function) { callback() } } }) } else { console.error( 'Unable to authenticate. If user "' + username + '" is not valid anymore, remove "' + USER_FILE + '" from this directory or your home folder and start over.' ) process.exit(1) } }) } function loadUser() { var userContent = fs.readFileSync(userFile, 'utf-8').split('\n') username = userContent[0] token = userContent[1] } function sendCommand() { if (verb == 'connection') { callback({ url: pitUrl, token: token, ca: agentOptions && agentOptions.ca, user: username }) } else { sendRequest(verb, resource, content, callback, callOptions) } } if(!fs.existsSync(userFile)) { userFile = path.join(os.homedir(), userFile) if(!fs.existsSync(userFile)) { userFile = USER_FILE console.log('No user info found. Seems like a new user or first time login from this machine.') username = readlineSync.question('Please enter an existing or new username: ') var userPath = 'users/' + username sendRequest('get', userPath + '/exists', function(code, body) { if (code == 200) { console.log('The user already exists.') var password = readlineSync.question( 'Please enter password (or Ctrl-C to abort): ', { hideEchoBack: true } ) authenticate(username, password, sendCommand) } else { console.log('Found no user of that name.') var register = readlineSync.question( 'Do you want to register this usename (yN)? ', { trueValue: ['yes', 'y'] } ) if (register === true) { let user = promptUserInfo() sendRequest('put', userPath, user, function(code, body) { if (code == 200) { authenticate(username, user.password, sendCommand) } else { console.error('Unable to register user.') process.exit(1) } }) } else { process.exit(0) } } }) } else { loadUser() sendCommand() } } else { loadUser() sendCommand() } } function getConnectionSettings(callback) { callPit('connection', null, null, callback) } const jobStates = { NEW: 0, PREPARING: 1, WAITING: 2, STARTING: 3, RUNNING: 4, STOPPING: 5, CLEANING: 6, DONE: 7, FAILED: 8 } const jobStateNames = [ 'NEW', 'PRE', 'WAI', 'STA', 'RUN', 'STO', 'CLN', 'FIN', 'ARC' ] const nodeStateNames = [ 'OFFLINE', 'ONLINE' ] const indent = ' ' const entityUser = 'user:<username>' const entityGroup = 'group:<group name>' const entityNode = 'node:<node name>' const entityJob = 'job:<job number>' const entityAlias = 'alias:<alias>' const entityDescriptors = { 'user': { 'id': 'Username', 'fullname': 'Full name', 'email': 'E-Mail address', 'groups': (o, v) => v && ['Groups', v.join(' ')], 'autoshare': (o, v) => v && ['Auto share', v.join(' ')], 'admin': 'Is administrator' }, 'node': { 'id': 'Node name', 'address': 'Address', 'online': (o, v) => ['State', v ? 'ONLINE' : 'OFFLINE'], 'since': 'Since', 'resources': (o, v) => v && [ 'Resources', '\n' + v.map((r, i) => ' ' + i + ': "' + r.name + '"' + (r.alias ? ' aka "' + r.alias + '"' : '') + ' (' + r.type + ' ' + r.index + ')' + (r.groups ? ' - Groups: ' + r.groups.join(' ') : '') ).join('\n') ] }, 'job': { 'id': 'Job number', 'continueJob': 'Continued job', 'description': 'Title', 'user': 'Owner', 'groups': (o, v) => v && ['Groups', v.join(' ')], 'error': (o, v) => v && ['Error', '"' + v + '"'], 'provisioning': 'Provisioning', 'resources': 'Resources', 'utilComp': (o, v) => v && ['Util. GPU', Math.round(v * 100.0) + ' %'], 'utilMem': (o, v) => v && ['Util. memory', Math.round(v * 100.0) + ' %'], 'state': (o, v) => ['State', jobStateNames[v] + (v == jobStates.WAITING ? ' (position ' + o.schedulePosition + ')' : '')], 'processes': (o, v) => v && [ 'Processes', '\n' + v.map(p => ' [' + p.groupIndex + ', ' + p.processIndex + ']: Status code: ' + p.status + (p.result ? (' - ' + p.result) : '')).join('\n') ], 'stateChanges': (o, v) => v && [ 'State changes', '\n' + v.map(sc => ' ' + jobStateNames[sc.state] + ': ' + sc.since + (sc.reason ? (' - ' + sc.reason) : '')).join('\n') ] }, 'alias': { 'id': 'Alias', 'name': 'For' }, 'group': { 'id': 'Name', 'title': 'Title' } } const httpCodes = { 400: 'Bad Request', 401: 'Unauthorized', 402: 'Payment Required', 403: 'Forbidden', 404: 'Not Found', 405: 'Method Not Allowed', 406: 'Not Acceptable', 407: 'Proxy Authentication Required', 408: 'Request Time-out', 409: 'Conflict', 410: 'Gone', 411: 'Length Required', 412: 'Precondition Failed', 413: 'Request Entity Too Large', 414: 'Request-URI Too Large', 415: 'Unsupported Media Type', 416: 'Requested Range Not Satisfiable', 417: 'Expectation Failed', 418: 'I\'m a teapot', 422: 'Unprocessable Entity', 423: 'Locked', 424: 'Failed Dependency', 425: 'Unordered Collection', 426: 'Upgrade Required', 428: 'Precondition Required', 429: 'Too Many Requests', 431: 'Request Header Fields Too Large', 500: 'Internal Server Error', 501: 'Not Implemented', 502: 'Bad Gateway', 503: 'Service Unavailable', 504: 'Gateway Time-out', 505: 'HTTP Version Not Supported', 506: 'Variant Also Negotiates', 507: 'Insufficient Storage', 509: 'Bandwidth Limit Exceeded', 510: 'Not Extended', 511: 'Network Authentication Required' } function printLine(msg) { console.log(msg ? (indent + (msg || '')) : '') } function printIntro() { printLine() printLine(indent + 'Examples:') printLine() } function printEntityHelp() { printLine('Accepted values for "entity": ' + Array.prototype.slice.call(arguments).join(', ') + '.') } function printJobNumberHelp() { printLine('"jobNumber": Number of the targeted job') } function printPropertyHelp() { printLine('Properties are pairs of property-name and value of the form "property=value".') } function printUserPropertyHelp() { printLine('User properties: "fullname", "email", "password" (prompted if omitted), "admin" ("yes" or "no").') } function printNodePropertyHelp() { printLine('Node properties: "address" (mandatory), "port", "minPort", "maxPort", "cvd" (CUDA_VISIBLE_DEVICES), "user".') } function printAliasPropertyHelp() { printLine('alias properties: "name".') } function printExample(line) { printLine(indent + '$ ' + line) } function splitPair(value, separator, ...names) { var obj = {} var parts = value.split(separator) for (let index in parts) { obj[names[index]] = parts[index] } return obj } function parseEntity(entity, indexAllowed) { let pair = splitPair(entity, ':', 'type', 'id', 'index') pair.plural = (pair.type == 'alias') ? 'aliases' : (pair.type + 's') if (!indexAllowed && pair.hasOwnProperty('index')) { fail('Indices not allowed for ' + pair.type + ' entities') } return pair } function parseAssignment(assignment) { return splitPair(assignment, '=', 'property', 'value') } function parseEntityProperties(entity, properties) { let obj = {} if (properties) { properties.forEach(assignment => { assignment = parseAssignment(assignment) if (assignment.property == 'cvd') { assignment.value = assignment.value.split(',').map(v => Number(v)) } else if (assignment.property == 'autoshare') { assignment.value = assignment.value.split(',') } obj[assignment.property] = assignment.value }) } return obj } function formatDuration(d) { let two = n => ('0' + n).slice(-2) return d.days > 99 ? d.days + 'd' : two(d.days) + 'd ' + two(d.hours) + ':' + two(d.minutes) + ':' + two(d.seconds) } function evaluateResponse(code, body) { if (code > 299) { fail((body && body.message) || httpCodes[code] || code) } } function _runCommand(args, exitOnError) { let file = args.shift() let options = { encoding: 'utf8' } if (!exitOnError) { options.stdio = ['pipe', 'pipe', 'ignore'] } try { return execFileSync(file, args, options).trim() } catch (err) { if (!exitOnError) { return } var message = err.message.includes('ENOENT') ? 'Not found' : err.message fail('Problem executing "' + file + '": ' + message) } } function tryCommand() { return _runCommand(Array.prototype.slice.call(arguments), false) } function runCommand() { return _runCommand(Array.prototype.slice.call(arguments), true) } function showLog(jobNumber) { let logPath = 'jobs/' + jobNumber + '/log' callPit('get', logPath, (code, res) => { evaluateResponse(code) res.on('data', chunk => { process.stdout.write(chunk) }) }, { asStream: true }) } function printJobGroups(groups, asDate) { let fixed = 6 + 3 + (asDate ? 24 : 12) + 3 + 3 + 10 + 40 + 7 let rest = process.stdout.columns if (rest && rest >= fixed) { rest = rest - fixed } else { rest = 30 } writeFragment('JOB', 6, true, ' ') writeFragment('S', 3, true, ' ') writeFragment(asDate ? 'DATE' : 'SINCE', asDate ? 24 : 12, false, ' ') writeFragment('UC%', 3, true, ' ') writeFragment('UM%', 3, true, ' ') writeFragment('USER', 10, false, ' ') writeFragment('TITLE', 40, false, ' ') writeFragment('RESOURCE', rest, false, '\n') let printJobs = (jobs, caption) => { if (jobs.length > 0) { if (caption) { console.log(caption + ':') } for(let job of jobs) { writeFragment(job.id, 6, true, ' ') writeFragment(jobStateNames[job.state], 3, true, ' ') writeFragment(asDate ? job.date : formatDuration(job.since), asDate ? 24 : 12, false, ' ') writeFragment(Math.round(job.utilComp * 100.0), 3, true, ' ') writeFragment(Math.round(job.utilMem * 100.0), 3, true, ' ') writeFragment(job.user, 10, false, ' ') writeFragment(job.description, 40, false, ' ') writeFragment(job.resources, rest, false, '\n') } } } for(let group of groups) { printJobs(group.jobs, group.caption) } } function getEntityPath (entitySpec) { entity = parseEntity(entitySpec) if (entity.type == 'home') { return 'users/~' } if (entity.type == 'group' || entity.type == 'user' || entity.type == 'job') { return '' + entity.plural + '/' + entity.id } if (entity.type == 'shared') { return 'shared' } if (entitySpec.match(/^[0-9]+$/)) { return getEntityPath('job:' + entitySpec); } fail('Unsupported entity type "' + entity.type + '"') } function getResourcePath (remotePath) { return remotePath ? (remotePath.startsWith('/') ? remotePath.slice(1) : remotePath) : '' } function createProgressBar (caption, offset, size) { let bar = new ProgressBar(' ' + caption + ' [:bar] :percent :speed :etas', { complete: '=', incomplete: ' ', width: 40, total: size }) bar.tick(offset) let origTick = bar.tick let intervalStart = Date.now() let intervalTicks = 0 let pastTicks = [{time: intervalStart, ticks: 0}] let speed = '' bar.tick = function(ticks) { let now = Date.now() intervalTicks += ticks if (now - intervalStart > 100) { pastTicks.push({time: intervalStart, ticks: intervalTicks}) intervalStart = now intervalTicks = 0 pastTicks = pastTicks.reverse().slice(0, 10).reverse() let transfer = pastTicks.map(t => t.ticks).reduce((t, v) => t + v, 0) let timeDiff = (now - pastTicks[0].time) / 1000 speed = filesize(transfer / timeDiff, {round: 0}) + '/s' } origTick.apply(bar, [ticks, { speed: speed }]) } return bar } function pullContent (entity, remotePath, localPath, options) { options = options || {} let entityPath = getEntityPath(entity) let resource = getResourcePath(remotePath) callPit('get', entityPath + '/simplefs/stats/' + resource, (code, stats) => { evaluateResponse(code) if (stats.isFile) { if (localPath) { let offset = 0 if (fs.existsSync(localPath)) { let localStats = fs.statSync(localPath) if (localStats.isDirectory()) { let rname = remotePath.substring(remotePath.lastIndexOf('/') + 1) if (rname.length > 0) { localPath = path.join(localPath, rname) } else { fail('Cannot construct target filename.') } } else if (localStats.isFile()) { if (options.force) { console.error('Target file existing: Re-downloading...') } else if (localStats.size > stats.size) { fail('Larger local file already existing. Remove it or use force option to overwrite.') } else if (options.continue) { console.error('Local file already existing and smaller than remote file: Continuing download...') offset = localStats.size } else { let answer = readlineSync.question('Remote file larger than local one. Continue interrupted download (yN)? ', { trueValue: ['y', 'yes'] }) if (answer === true) { offset = localStats.size } else { fail('Aborted') } } } else { fail('Target path is neither a directory nor a file.') } } else { let dirname = path.dirname(localPath) if (fs.existsSync(dirname)) { if (!fs.statSync(dirname).isDirectory()) { fail('Specified target directory is not a directory.') } } else { fail('Target directory not existing.') } } if (localStats.size === stats.size && !options.force) { console.error('Local file of same size already existing: Skipped download.') options.callback && options.callback() } else { callPit('get', entityPath + '/simplefs/content/' + resource, (code, res) => { evaluateResponse(code) let bar = createProgressBar('downloading', offset, stats.size) res.on('data', buf => bar.tick(buf.length)) let target = fs.createWriteStream(localPath, {flags: offset > 0 ? 'a' : 'w'}) res.pipe(target) options.callback && target.on('finish', options.callback) }, { asStream: true, headers: { 'Range': 'bytes=' + offset + '-' } }) } } else { callPit('get', entityPath + '/simplefs/content/' + resource, (code, res) => { evaluateResponse(code) res.pipe(process.stdout) options.callback && process.stdout.on('finish', options.callback) }, { asStream: true }) } } else { fail('Command only supports file transfers.') } }) } function pushContent (entity, remotePath, localPath, options) { options = options || {} let entityPath = getEntityPath(entity) let resource = getResourcePath(remotePath) let localStats let size = 0 if (localPath) { if (fs.existsSync(localPath)) { localStats = fs.statSync(localPath) size = localStats.size } else { fail('Source file not found.') } } let transferContent = (offset) => { let targetPath = entityPath + '/simplefs/content/' + resource if (localStats) { let stream = fs.createReadStream(localPath, { start: offset }) let bar = createProgressBar('uploading', offset, size) stream.on('data', buf => bar.tick(buf.length)) callPit('put', targetPath, stream, (code, res) => { evaluateResponse(code) options.callback && options.callback() }, { headers: { 'Content-Type': 'application/octet-stream', 'Content-Offset': offset } }) } else { callPit('put', targetPath, process.stdin, (code, res) => { evaluateResponse(code) options.callback && options.callback() }, { headers: { 'Content-Type': 'application/octet-stream' } }) } } let statsPath = entityPath + '/simplefs/stats/' + resource callPit('get', statsPath, (code, stats) => { if (code === 404) { console.error('Remote file not existing - creating...') callPit('put', statsPath, { type: 'file' }, (code, res) => { evaluateResponse(code) transferContent(0) }) } else { evaluateResponse(code) if (stats.isFile) { if (stats.size === size) { if (options.force) { console.error('Remote file of same size as the local one - re-uploading...') transferContent(0) } else { options.callback && options.callback() } } else if (stats.size < size) { if (options.continue) { console.error('Remote file smaller than local one - continuing upload...') transferContent(stats.size) } else { if (options.force) { console.error('Remote file existing - re-uploading...') transferContent(0) } else { let answer = readlineSync.question('Remote file smaller than local one. Continue interrupted upload (yN)? ', { trueValue: ['y', 'yes'] }) if (answer === true) { transferContent(stats.size) } else { fail('Aborted') } } } } else { if (options.force) { console.error('Remote file is larger than local one - re-uploading...') transferContent(0) } else { fail('Remote file is larger than local one.') } } } else { fail('Target path is existing, but not a file.') } } }) } function deleteFromEntity (entity, remotePath, callback) { let entityPath = getEntityPath(entity) let resource = getResourcePath(remotePath) callPit('delete', entityPath + '/simplefs/stats/' + resource, (code) => { evaluateResponse(code) callback && callback() }) } function toWebSocketUrl (httpurl) { let endpoint = url.parse(httpurl) if (endpoint.protocol == 'https:') { endpoint.protocol = 'wss' } else { endpoint.protocol = 'ws' } return url.format(endpoint) } program .version('0.0.1') program .command('add <entity> [properties...]') .description('adds an entity to the system') .on('--help', function() { printIntro() printExample('pit add user:paul email=paul@x.y password=secret') printExample('pit add node:machine1 endpoint=192.168.2.2 password=secret') printExample('pit add alias:gtx1070 name="GeForce GTX 1070"') printExample('pit add group:students title="Students of machine learning department"') printLine() printEntityHelp(entityUser, entityNode, entityAlias, entityGroup) printPropertyHelp() printUserPropertyHelp() printNodePropertyHelp() printAliasPropertyHelp() }) .action(function(entity, properties) { entity = parseEntity(entity) if(entity.type == 'user' || entity.type == 'node' || entity.type == 'alias' || entity.type == 'group') { let obj = parseEntityProperties(entity, properties) if (entity.type == 'user') { obj = promptUserInfo(obj) } else if (entity.type == 'node') { obj = promptNodeInfo(obj) } else if (entity.type == 'alias') { obj = promptAliasInfo(obj) } else { obj = promptGroupInfo(obj) } callPit('put', entity.plural + '/' + entity.id, obj, evaluateResponse) } else { fail('Unknown entity type "' + entity.type + '"') } }) program .command('remove <entity>') .alias('rm') .description('removes an entity from the system') .on('--help', function() { printIntro() printExample('pit remove user:anna') printExample('pit remove node:machine1') printExample('pit remove job:123') printExample('pit remove alias:gtx1070') printExample('pit remove group:students') printLine() printEntityHelp(entityUser, entityNode, entityJob, entityAlias, entityGroup) }) .action(function(entity) { entity = parseEntity(entity) if(entity.type == 'user' || entity.type == 'node' || entity.type == 'job' || entity.type == 'alias' || entity.type == 'group') { callPit('del', entity.plural + '/' + entity.id, evaluateResponse) } else { fail('Unsupported entity type "' + entity.type + '"') } }) program .command('set <entity> <assignments...>') .description('sets properties of an entity') .on('--help', function() { printIntro() printExample('pit set user:paul email=x@y.z fullname="Paul Smith"') printExample('pit set node:machine1 endpoint=192.168.2.1') printExample('pit set alias:gtx1070 name="GeForce GTX 1070"') printExample('pit set group:students title="Different title"') printExample('pit set job:123 autoshare=students,professors') printLine() printEntityHelp(entityUser, entityNode, entityAlias, entityGroup, entityJob) printPropertyHelp() printUserPropertyHelp() printNodePropertyHelp() printAliasPropertyHelp() }) .action(function(entity, assignments) { entity = parseEntity(entity) if(entity.type == 'user' || entity.type == 'node' || entity.type == 'alias' || entity.type == 'group' || entity.type == 'job') { let obj = parseEntityProperties(entity, assignments) callPit('put', entity.plural + '/' + entity.id, obj, evaluateResponse) } else { fail('Unsupported entity type "' + entity.type + '"') } }) program .command('get <entity> <property>') .description('gets a property of an entity') .on('--help', function() { printIntro() printExample('pit get user:anna email') printExample('pit get node:machine1 address') printExample('pit get alias:gtx1070 name') printExample('pit get job:123 autoshare') printLine() printEntityHelp(entityUser, entityNode, entityJob, entityAlias) printPropertyHelp() printUserPropertyHelp() printNodePropertyHelp() printAliasPropertyHelp() }) .action(function(entity, property) { entity = parseEntity(entity) var descriptor = entityDescriptors[entity.type] if(descriptor) { callPit('get', entity.plural + '/' + entity.id, function(code, body) { if (code == 200) { console.log(body[property]) } else { evaluateResponse(code, body) } }) } else { fail('Unsupported entity type "' + entity.type + '"') } }) program .command('show <entity> [params...]') .description('shows info about an entity') .on('--help', function() { printIntro() printExample('pit show me') printExample('pit show users') printExample('pit show groups') printExample('pit show nodes') printExample('pit show aliases') printExample('pit show jobs') printExample('pit show jobs user=jill') printExample('pit show jobs since=4/2010 asc=date title="%test%"') printExample('pit show user:paul') printExample('pit show node:machine1') printExample('pit show job:235') printExample('pit show alias:gtx1070') printExample('pit show group:students') printLine() printEntityHelp('me', 'users', 'groups', 'nodes', 'jobs', 'aliases', entityUser, entityNode, entityJob, entityAlias, entityGroup) printLine() printLine('For "show jobs" the following query parameters (combined by AND) are supported:') printLine(' since=<date value> - shows jobs with a state change date past the provided date') printLine(' till=<date value> - shows jobs with a state change date before the provided date') printLine(' user=<username> - shows jobs owned by the provided user') printLine(' title=<wildcard> - shows jobs whose titles match the provided wildcard') printLine(' asc=<field> - orders jobs ascending by provided field (date|user|title|state)') printLine(' desc=<field> - orders jobs descending by provided field (date|user|title|state)') printLine(' limit=<number> - shows first N results') printLine(' offset=<number> - shows jobs beginning with N-th result') }) .action(function(entity, params, options) { if(entity === 'users' || entity === 'groups' || entity === 'nodes' || entity === 'aliases') { callPit('get', entity, function(code, body) { if (code == 200) { body.forEach(obj => console.log(obj)) } else { evaluateResponse(code, body) } }) } else if(entity === 'jobs') { let obj = parseEntityProperties(entity, params) let query = [] for(let param of Object.keys(obj)) { query.push(encodeURI(param) + '=' + encodeURI(obj[param])) } query = query.length > 0 ? '?' + query.join('&') : '' callPit('get', entity + query, function(code, body) { if (code == 200) { printJobGroups([{ jobs: body }], true) } else { evaluateResponse(code, body) } }) } else { if (entity == 'me') { entity = { type: 'user', plural: 'users', id: '~' } } else { entity = parseEntity(entity) } var descriptor = entityDescriptors[entity.type] if(descriptor) { callPit('get', entity.plural + '/' + entity.id, function(code, body) { if (code == 200) { let attributes = [] let maxLen = 0 for (let property of Object.keys(descriptor)) { let name = descriptor[property] let attribute if (name instanceof Function) { attribute = name(body, body[property]) } else if (body.hasOwnProperty(property)) { attribute = [name, body[property]] } if (attribute) { if (attribute[0].length > maxLen) { maxLen = attribute[0].length } attributes.push(attribute) } } for (let attribute of attributes) { let name = attribute[0] + ':' + Array(maxLen - attribute[0].length + 1).join(' ') console.log(name + ' ' + attribute[1]) } } else { evaluateResponse(code, body) } }) } else { fail('Unsupported entity type "' + entity.type + '"') } } }) program .command('add-group <entity> <group>') .description('adds the entity to the access group') .on('--help', function() { printIntro() printExample('pit add-group node:machine1 professors') printExample('pit add-group node:machine1:0 students') printExample('pit add-group user:anna students') printExample('pit add-group job:123 students') printLine() printEntityHelp(entityUser, entityNode, entityJob, 'node:<node name>:<resource index>') }) .action(function(entity, group) { entity = parseEntity(entity, true) if (entity.type == 'node' || entity.type == 'user' || entity.type == 'job') { let resource = entity.hasOwnProperty('index') ? '/resources/' + entity.index : '' let p = entity.plural + '/' + entity.id + resource + '/groups/' + group callPit('put', p, evaluateResponse) } else { fail('Unsupported entity type "' + entity.type + '"') } }) program .command('remove-group <entity> <group>') .description('removes the entity from the access group') .on('--help', function() { printIntro() printExample('pit remove-group node:machine1 professors') printExample('pit remove-group node:machine1:0 students') printExample('pit remove-group user:paul students') printExample('pit remove-group job:123 students') printLine() printEntityHelp(entityUser, entityNode, entityJob, 'node:<node name>:<resource index>') }) .action(function(entity, group) { entity = parseEntity(entity, true) if (entity.type == 'node' || entity.type == 'user' || entity.type == 'job') { let resource = entity.hasOwnProperty('index') ? '/resources/' + entity.index : '' let p = entity.plural + '/' + entity.id + resource + '/groups/' + group callPit('del', p, evaluateResponse) } else { fail('Unsupported entity type "' + entity.type + '"') } }) program .command('stop <jobNumber>') .description('stops a running job') .on('--help', function() { printIntro() printExample('pit stop 1234') printLine() printJobNumberHelp() }) .action(function(jobNumber) { callPit('post', 'jobs/' + jobNumber + '/stop', evaluateResponse) }) program .command('run <title> [clusterRequest]') .alias('put') .description('enqueues current directory as new job') .option('-p, --private', 'prevents automatic sharing of this job') .option('-a, --archive', 'archive based job upload of current directory') .option('-c, --continue <jobNumber>', 'continues job with provided number by copying its "keep" directory over to the new job') .option('-d, --direct <commands>', 'directly executes provided commands through bash instead of loading .compute file') .option('-l, --log', 'waits for and prints job\'s log output') .on('--help', function() { printIntro() printExample('pit run "My task" 2:[8:gtx1070]') printExample('pit run "My command" [] -d \'hostname; env\'') printLine() printLine('"title" is a short text that will later help identifying the job and its purpose.') printLine('"clusterRequest" is an expression to specify resources this job requires from the cluster.') printLine('It\'s a comma separated list of "process requests".') printLine('Each "process request" specifies the number of process instances and (divided by colon and in braces) which resources to allocate for one process instances (on one node).') printLine('The first example will allocate 2 process instances. For each process, 8 "gtx1070" resources will get allocated.') printLine('You can also provide a "' + REQUEST_FILE + '" file with the same content in your project root as default value.') }) .action(function(title, clusterRequest, options) { getConnectionSettings(connection => { if (!clusterRequest && fs.existsSync(REQUEST_FILE)) { clusterRequest = fs.readFileSync(REQUEST_FILE, 'utf-8').trim() } if (!clusterRequest) { fail('No resources requested from cluster. Please provide them either through command line or through a "' + REQUEST_FILE + '" file in your project root.') } if (title.length > 40) { fail('Job title too long (20 characters max)') } let user = 'user:' + connection.user let job = { clusterRequest: clusterRequest, description: title, private: options.private, continueJob: options.continue, script: options.direct } let sendJob = () => { callPit('post', 'jobs', job, (code, body) => { if (code == 200) { console.log('Job scheduled: ' + body.id) if (job.archive) { deleteFromEntity(user, job.archive) fs.unlink(JOB_ARCHIVE) fs.unlink(JOB_COMMAND) } if (options.log) { console.log() showLog(body.id) } } else { evaluateResponse(code, body) } }) } let printJob = pj => { console.log('- Title: ' + pj.description) console.log('- Resource request: ' + pj.clusterRequest) console.log('- Private: ' + (pj.private ? 'Yes' : 'No')) pj.continueJob && console.log('- Continues job: ' + pj.continueJob) pj.script && console.log('- Command: ' + pj.script) pj.origin && console.log('- Remote: ' + pj.origin.replace(/\/\/.*@/g, '//')) pj.hash && console.log('- Hash: ' + pj.hash) pj.diff && console.log('- Diff: ' + pj.diff.split('\n').length + ' LoC') } if (!options.archive && fs.existsSync('.git')) { var tracking = tryCommand('git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}') || 'origin/master' var ob = tracking.split('/') if (ob.length != 2) { fail('Problem getting tracked git remote and branch') } var origin = ob[0] var branch = ob[1] var hash = tryCommand('git', 'rev-parse', tracking) if (!hash) { fail('Problem getting remote branch "' + tracking + '"') } var originUrl = runCommand('git', 'remote', 'get-url', '--push', origin) if (originUrl.startsWith(githubGitPrefix)) { originUrl = githubHttpsPrefix + originUrl.substr(githubGitPrefix.length) } var diff = runCommand('git', 'diff', '--no-prefix', tracking) job = Object.assign(job, { origin: originUrl, hash: hash, diff: diff, }) printJob(job) sendJob() } else { console.log('Scheduling job an archive of current directory') console.log() if (fs.existsSync(JOB_ARCHIVE)) { if (fs.existsSync(JOB_COMMAND)) { let incompleteJob = JSON.parse(fs.readFileSync(JOB_COMMAND)) console.log('Found incomplete job upload in current directory (.job-upload*):') printJob(incompleteJob) let answer = readlineSync.question('Continue incomplete upload (y) or delete it and proceed scheduling a new job (N)? ', { trueValue: ['y', 'yes'] }) if (answer) { console.log('Continuing interrupted job upload...') job = incompleteJob } else { console.log('Removing archive of interrupted job upload...') fs.unlinkSync(JOB_ARCHIVE) } } else { fail('Found job upload archive ' + JOB_ARCHIVE + ' without meta data. Please delete or move before continuing.') } } if (!fs.existsSync(JOB_ARCHIVE)) { job.archive = '.upload-' + randomstring.generate() + '.tar.gz' fs.writeFileSync(JOB_COMMAND, JSON.stringify(job)) let tmpFile = tmp.tmpNameSync() let archive = fs.createWriteStream(tmpFile) const ig = ignore() ig.add('.git') ig.add('.pituser.txt') if (fs.existsSync('.gitignore')) { ig.add(fs.readFileSync('.gitignore').toString()) } if (fs.existsSync('.pitignore')) { ig.add(fs.readFileSync('.pitignore').toString()) } tar .pack('.', { ignore: name => { if (ig.ignores(name)) { console.log('Ignoring', name) return true } console.log('Archiving ' + name + '...') } }) .pipe(zlib.Gzip()) .pipe(archive) archive.on('finish', () => { fs.renameSync(tmpFile, JOB_ARCHIVE) pushContent(user, job.archive, JOB_ARCHIVE, { continue: true, callback: sendJob }) }) } else { pushContent(user, job.archive, JOB_ARCHIVE, { continue: true, callback: sendJob }) } } }) }) program .command('log <jobNumber>') .description('show job\'s log') .on('--help', function() { printIntro() printExample('pit log 1234') printLine() printJobNumberHelp() }) .action(jobNumber => showLog(jobNumber)) program .command('exec <jobNumber> -- ...') .usage('[options] <jobNumber> -- cmd arg1 ... argN') .description('execute command on a job\'s worker') .option('-w, --worker <workerIndex>', 'index of the target worker (defaults to 0)') .on('--help', function() { printIntro() printExample('pit exec 1234 -- bash') printExample('pit exec 1234 -- ls -la /') printExample('pit exec -w 1 1234 -- cat /data/rw/pit/src/.compute >1234.compute') printLine() printJobNumberHelp() }) .action((jobNumber, options) => { let instance = '' + (options.worker || 0) getConnectionSettings(connection => { let endpoint = toWebSocketUrl(connection.url) let stdin = process.stdin let stdout = process.stdout let stderr = process.stderr let context = JSON.stringify({ command: shellCommand, environment: { TERM: process.env.TERM }, interactive: !!stdin.setRawMode, width: stdout.columns, height: stdout.rows }) let ws = new WebSocket(endpoint + 'jobs/' + jobNumber + '/instances/' + instance + '/exec?context=' + encodeURIComponent(context), { headers: { 'X-Auth-Token': connection.token }, ca: connection.ca }) if (stdin.setRawMode) { stdin.setRawMode(true) stdin.resume() } let buffers = [] stdin.on('data', data => { if ( data === '\u0003' ) { process.exit() } let buffer = Buffer.concat([new Buffer([1]), data]) if (buffers) { buffers.push(buffer) } else { ws.send(buffer) } }) stdout.on('resize', () => { let data = JSON.stringify({ "command": "window-resize", "args": { "width": "" + stdout.columns, "height": "" + stdout.rows } }) ws.send(Buffer.concat([new Buffer([0]), Buffer.from(data)])) }) ws.on('open', () => { for (let buffer of buffers) { ws.send(buffer) } buffers = undefined }) ws.on('message', data => { if (data[0] == 1) { stdout.write(data.slice(1)) } else if (data[0] == 2) { stderr.write(data.slice(1)) } }) ws.on('error', err => fail('Problem opening connection to pit: ' + err)) ws.on('close', () => process.exit(0)) }) }) program .command('forward <jobNumber> [ports...]') .description('forward ports of a job\'s worker to localhost') .option('-w, --worker <workerIndex>', 'index of the target worker (defaults to 0)') .on('--help', function() { printIntro() printExample('pit forward 1234 8080:80 7022:22') printExample('pit forward 1234 8080') printLine() printJobNumberHelp() printLine('"ports": All the ports to forward. Each port has to be provided either as one number (local and remote port being the same) or as a colon-separated pair where the first one is the local and the second one the remote counter-part.') }) .action((jobNumber, ports, options) => { let instance = '' + (options.worker || 0) let portPairs = {} for (let port of ports) { let [localPort, remotePort] = port.split(':').map(x => Number(x)) remotePort = remotePort || localPort if (!localPort) { fail('Wrong port pair format') } portPairs[localPort] = remotePort } getConnectionSettings(connection => { let endpoint = toWebSocketUrl(connection.url) let ws = websocket(endpoint + 'jobs/' + jobNumber + '/instances/' + instance + '/forward', { headers: { 'X-Auth-Token': connection.token }, ca: connection.ca }) let mp = multiplex() mp.pipe(ws) ws.pipe(mp) let idc = 0 let onConnection = socket => { let remotePort = portPairs[socket.localPort] let id = idc++ let stream = mp.createStream(id + '-' + remotePort) socket.pipe(stream) stream.pipe(socket) stream.on('error', err => { console.error('Remote', err.message || 'problem'); socket.end() }) } for (let localPort of Object.keys(portPairs)) { let remotePort = portPairs[localPort] console.log('Forwarding port ' + remotePort + ' of worker ' + instance + ' to port ' + localPort + ' on localhost...') let server = net.createServer(onConnection) server.listen(localPort, 'localhost') } console.log('Hit Ctrl-C to stop forwarding.') mp.on('error', err => fail('Problem with remote end - Closing')) ws.on('error', err => fail('Problem opening connection to pit: ' + err)) }) }) program .command('ls <entity> [remotePath]') .description('lists contents within a job directory') .on('--help', function() { printIntro() printExample('pit ls job:1234 sub-dir') printExample('pit ls home') printExample('pit ls group:students path/to/some/group/data') printExample('pit ls shared path/to/some/shared/data') printLine() printLine('"entity" is the entity whose data directory should be accessed') printEntityHelp('home', entityUser, entityJob, entityGroup, 'shared') printLine('"remotePath" is the path to list within the remote data directory.') }) .action((entity, remotePath) => { let entityPath = getEntityPath(entity) let resource = getResourcePath(remotePath) callPit('get', entityPath + '/simplefs/stats/' + resource, (code, stats) => { evaluateResponse(code) if (stats.isFile) { console.log('F ' + resource) } else { callPit('get', entityPath + '/simplefs/content/' + resource, (code, contents) => { evaluateResponse(code) for(let dir of contents.dirs) { console.log('D ' + dir) } for(let file of contents.files) { console.log('F ' + file) } }) } }) }) program .command('pull <entity> <remotePath> [localPath]') .alias('cp') .option('-f, --force', 'will overwrite existing target file if existing - always starting download from scratch') .option('-c, --continue', 'will try to continue interrupted download - starting from scratch, if target is not existing') .description('copies contents from an entity\'s file to a local file or stdout') .on('--help', function() { printIntro() printExample('pit pull job:1234 keep/checkpoint-0001.bin ./checkpoint.bin') printExample('pit pull home data/corpus.data ./corpus.data') printLine() printLine('"entity" is the entity whose data directory should be accessed') printEntityHelp('home', entityUser, entityJob, entityGroup, 'shared') printLine('"remotePath" is the source path within the remote data directory.') printLine('"localPath" is the destination path within the local filesystem. If omitted, data will be written to stdout.') }) .action((entity, remotePath, localPath, options) => pullContent(entity, remotePath, localPath, options)) program .command('cat <entity> <remotePath>') .description('copies contents from an entity\'s directory to stdout') .on('--help', function() { printIntro() printExample('pit cat job:1234 keep/results.txt') printExample('pit cat home data/some.txt') printLine() printLine('"entity" is the entity whose data directory should be accessed') printEntityHelp('home', entityUser, entityJob, entityGroup, 'shared') printLine('"remotePath" is the source path within the remote data directory.') }) .action((entity, remotePath) => pullContent(entity, remotePath)) program .command('push <entity> <remotePath> [localPath]') .option('-f, --force', 'will overwrite existing target file if existing - always starting upload from scratch') .option('-c, --continue', 'will try to continue interrupted upload - starting from scratch, if target is not existing') .description('copies contents from stdin or local file system to a file in an entity\'s tree') .on('--help', function() { printIntro() printExample('pit push group:students some/dir/data.bin ./data.bin') printExample('generate-some-data.py | pit push home keeping/some.data') printLine() printLine('"entity" is the entity whose data directory should be targeted') printEntityHelp('home', entityUser, entityGroup) printLine('"remotePath" is the target path within the remote entity\'s directory.') printLine('"localPath" is the path to a source file within the local filesystem. If omitted, data will be read from stdin.') }) .action((entity, remotePath, localPath, options) => pushContent(entity, remotePath, localPath, options)) program .command('mkdir <entity> <remotePath>') .description('creates an entity directory') .on('--help', function() { printIntro() printExample('pit mkdir group:students some/dir') printLine() printLine('"entity" is the entity whose data directory should be targeted') printEntityHelp('home', entityUser, entityGroup) printLine('"remotePath" is the target path within the remote entity\'s tree.') }) .action((entity, remotePath) => { let entityPath = getEntityPath(entity) let resource = getResourcePath(remotePath) callPit('put', entityPath + '/simplefs/stats/' + resource, { type: 'directory' }, (code) => { evaluateResponse(code) }) }) program .command('delete <entity> <remotePath>') .description('deletes a file or directory within an entity\'s tree') .on('--help', function() { printIntro() printExample('pit delete group:students some/dir') printExample('pit delete home some/file.txt') printLine() printLine('"entity" is the entity whose data directory should be targeted') printEntityHelp('home', entityUser, entityGroup) printLine('"remotePath" is the target path within the remote entity\'s tree.') }) .action((entity, remotePath) => deleteFromEntity(entity, remotePath)) program .command('mount <entity> [mountpoint]') .description('mounts the data directory of an entity to a local mountpoint') .option('--shell', 'starts a shell in the mounted directory. The mount will be automatically unmounted upon shell exit.') .on('--help', function() { printIntro() printExample('pit mount home') printExample('pit mount user:anna ~/annahome') printExample('pit mount --shell job:1234') printExample('pit mount group:students ./students') printExample('pit mount shared ./shared') printLine() printLine('"entity" is the entity whose data directory will be mounted') printEntityHelp('home', entityUser, entityJob, entityGroup, 'shared') printLine('"mountpoint" is the directory where the data directory will be mounted onto. Has to be empty. If omitted, a temporary directory will be used as mountpoint and automatically deleted on unmounting.') printLine('Home and group directories are write-enabled, all others are read-only.') }) .action((entity, mountpoint, options) => { let httpfs try { httpfs = require('./httpfs.js') } catch (ex) { fail( 'For mounting, package "fuse" has to be installed.\n' + 'Most likely it has been skipped due to missing dependencies.\n' + 'Please consult the following page for system specific requirements:\n' + '\thttps://github.com/mafintosh/fuse-bindings#requirements\n' + 'Once fulfilled, you can either re-install snakepit-client or\n' + 'call again "npm install" within its project root.' ) } getConnectionSettings(connection => { if (mountpoint) { mountpoint = { name: mountpoint, removeCallback: () => {} } } else { mountpoint = tmp.dirSync() } let mountOptions = { headers: { 'X-Auth-Token': connection.token }, cache: true, blocksize: 10 * 1024 * 1024 } if (connection.ca) { mountOptions.certificate = connection.ca } httpfs.mount( connection.url + getEntityPath(entity) + '/fs', mountpoint.name, mountOptions, (err, mount) => { if (err) { fail(err) } let unmount = () => mount.unmount(err => { if (err) { console.error('problem unmounting filesystem:', err) } else { mountpoint.removeCallback() } }) if (options.shell) { console.log('secondary shell: call "exit" to end and unmount') let sh = spawn(process.env.SHELL || 'bash', ['-i'], { stdio: 'inherit', cwd: mountpoint.name }) sh.on('close', unmount) } else { console.log('press Ctrl-C to unmount') globalunmount = unmount } } ) }) }) program .command('status') .description('prints a job status report') .on('--help', function() { printIntro() printExample('pit status') }) .action(function(options) { let updateStatus = () => { callPit('get', 'jobs/status', function(code, jobGroups) { if (code == 200) { printJobGroups([ { jobs: jobGroups.running, caption: 'Running' }, { jobs: jobGroups.waiting, caption: 'Waiting' }, { jobs: jobGroups.done, caption: 'Done' } ]) } else { evaluateResponse(code, jobGroups) } }) } updateStatus() }) program .command('*') .action(function() { fail("unknown command"); }) var argv = process.argv var shellCommand var dashSplitter = argv.indexOf('--') if (dashSplitter >= 0) { shellCommand = argv.slice(dashSplitter + 1) argv = argv.slice(0, dashSplitter) } program.parse(argv) if (!argv.slice(2).length) { program.outputHelp(); } function writeFragment(text, len, right, padding) { text = text + '' text = text.substr(0, len) padding = typeof padding == 'string' ? padding : '' let space = Array(len - text.length + 1).join(' ') text = right ? (space + text) : (text + space) process.stdout.write(text + padding) } function unmount() { if (globalunmount) { console.log('\runmounting...') globalunmount() globalunmount = null } } process.on('SIGINT', () => { unmount() process.exit(0) }) process.on('exit', () => { unmount() })
0
coqui_public_repos/inference-engine/third_party/cereal/include/cereal/external
coqui_public_repos/inference-engine/third_party/cereal/include/cereal/external/rapidjson/memorybuffer.h
// Tencent is pleased to support the open source community by making RapidJSON available. // // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. // // Licensed under the MIT License (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // http://opensource.org/licenses/MIT // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. #ifndef CEREAL_RAPIDJSON_MEMORYBUFFER_H_ #define CEREAL_RAPIDJSON_MEMORYBUFFER_H_ #include "stream.h" #include "internal/stack.h" CEREAL_RAPIDJSON_NAMESPACE_BEGIN //! Represents an in-memory output byte stream. /*! This class is mainly for being wrapped by EncodedOutputStream or AutoUTFOutputStream. It is similar to FileWriteBuffer but the destination is an in-memory buffer instead of a file. Differences between MemoryBuffer and StringBuffer: 1. StringBuffer has Encoding but MemoryBuffer is only a byte buffer. 2. StringBuffer::GetString() returns a null-terminated string. MemoryBuffer::GetBuffer() returns a buffer without terminator. \tparam Allocator type for allocating memory buffer. \note implements Stream concept */ template <typename Allocator = CrtAllocator> struct GenericMemoryBuffer { typedef char Ch; // byte GenericMemoryBuffer(Allocator* allocator = 0, size_t capacity = kDefaultCapacity) : stack_(allocator, capacity) {} void Put(Ch c) { *stack_.template Push<Ch>() = c; } void Flush() {} void Clear() { stack_.Clear(); } void ShrinkToFit() { stack_.ShrinkToFit(); } Ch* Push(size_t count) { return stack_.template Push<Ch>(count); } void Pop(size_t count) { stack_.template Pop<Ch>(count); } const Ch* GetBuffer() const { return stack_.template Bottom<Ch>(); } size_t GetSize() const { return stack_.GetSize(); } static const size_t kDefaultCapacity = 256; mutable internal::Stack<Allocator> stack_; }; typedef GenericMemoryBuffer<> MemoryBuffer; //! Implement specialized version of PutN() with memset() for better performance. template<> inline void PutN(MemoryBuffer& memoryBuffer, char c, size_t n) { std::memset(memoryBuffer.stack_.Push<char>(n), c, n * sizeof(c)); } CEREAL_RAPIDJSON_NAMESPACE_END #endif // CEREAL_RAPIDJSON_MEMORYBUFFER_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/include/fst/script/draw.h
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #ifndef FST_SCRIPT_DRAW_H_ #define FST_SCRIPT_DRAW_H_ #include <ostream> #include <fst/script/draw-impl.h> #include <fst/script/fst-class.h> namespace fst { namespace script { // Note: it is safe to pass these strings as references because // this struct is only used to pass them deeper in the call graph. // Be sure you understand why this is so before using this struct // for anything else! struct FstDrawerArgs { const FstClass &fst; const SymbolTable *isyms; const SymbolTable *osyms; const SymbolTable *ssyms; const bool accep; const string &title; const float width; const float height; const bool portrait; const bool vertical; const float ranksep; const float nodesep; const int fontsize; const int precision; const string &float_format; // NOLINT const bool show_weight_one; std::ostream *ostrm; const string &dest; FstDrawerArgs(const FstClass &fst, const SymbolTable *isyms, const SymbolTable *osyms, const SymbolTable *ssyms, bool accep, const string &title, float width, float height, bool portrait, bool vertical, float ranksep, float nodesep, int fontsize, int precision, const string &float_format, bool show_weight_one, std::ostream *ostrm, const string &dest) : fst(fst), isyms(isyms), osyms(osyms), ssyms(ssyms), accep(accep), title(title), width(width), height(height), portrait(portrait), vertical(vertical), ranksep(ranksep), nodesep(nodesep), fontsize(fontsize), precision(precision), float_format(float_format), show_weight_one(show_weight_one), ostrm(ostrm), dest(dest) {} }; template <class Arc> void DrawFst(FstDrawerArgs *args) { const Fst<Arc> &fst = *(args->fst.GetFst<Arc>()); FstDrawer<Arc> fstdrawer(fst, args->isyms, args->osyms, args->ssyms, args->accep, args->title, args->width, args->height, args->portrait, args->vertical, args->ranksep, args->nodesep, args->fontsize, args->precision, args->float_format, args->show_weight_one); fstdrawer.Draw(args->ostrm, args->dest); } void DrawFst(const FstClass &fst, const SymbolTable *isyms, const SymbolTable *osyms, const SymbolTable *ssyms, bool accep, const string &title, float width, float height, bool portrait, bool vertical, float ranksep, float nodesep, int fontsize, int precision, const string &float_format, bool show_weight_one, std::ostream *ostrm, const string &dest); } // namespace script } // namespace fst #endif // FST_SCRIPT_DRAW_H_
0
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions
coqui_public_repos/inference-engine/third_party/openfst-1.6.7/src/extensions/compact/compact8_unweighted-fst.cc
// See www.openfst.org for extensive documentation on this weighted // finite-state transducer library. #include <fst/fst.h> #include <fst/compact-fst.h> namespace fst { static FstRegisterer<CompactUnweightedFst<StdArc, uint8>> CompactUnweightedFst_StdArc_uint8_registerer; static FstRegisterer<CompactUnweightedFst<LogArc, uint8>> CompactUnweightedFst_LogArc_uint8_registerer; } // namespace fst
0
coqui_public_repos
coqui_public_repos/TTS/.cardboardlint.yml
linters: - pylint: # pylintrc: pylintrc filefilter: ['- test_*.py', '+ *.py', '- *.npy'] # exclude:
0