You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: notebooks/coding_projects/digital_processing_of_speech_signals/P2_HMM/hmm.bib
+2Lines changed: 2 additions & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -34,3 +34,5 @@ @book{Zhou_2016
34
34
@inproceedings{Zhang_2023, title={Tractable Control for Autoregressive Language Generation}, ISSN={2640-3498}, url={https://proceedings.mlr.press/v202/zhang23g.html}, abstractNote={Despite the success of autoregressive large language models in text generation, it remains a major challenge to generate text that satisfies complex constraints: sampling from the conditional distribution ${Pr}(text{text} | alpha)$ is intractable for even the simplest lexical constraints $alpha$. To overcome this challenge, we propose to use tractable probabilistic models (TPMs) to impose lexical constraints in autoregressive text generation models, which we refer to as GeLaTo (Generating Language with Tractable Constraints). To demonstrate the effectiveness of this framework, we use distilled hidden Markov models, where we can efficiently compute ${Pr}(text{text} | alpha)$, to guide autoregressive generation from GPT2. GeLaTo achieves state-of-the-art performance on challenging benchmarks for constrained text generation (e.g., CommonGen), beating various strong baselines by a large margin. Our work not only opens up new avenues for controlling large language models but also motivates the development of more expressive TPMs.}, booktitle={Proceedings of the 40th International Conference on Machine Learning}, publisher={PMLR}, author={Zhang, Honghua and Dang, Meihua and Peng, Nanyun and Broeck, Guy Van Den}, year={2023}, month=jul, pages={40932–40945}, language={en} }
35
35
@article{Dang_2021, title={Juice: A Julia Package for Logic and Probabilistic Circuits}, volume={35}, ISSN={2374-3468, 2159-5399}, DOI={10.1609/aaai.v35i18.17999}, abstractNote={JUICE is an open-source Julia package providing tools for logic and probabilistic reasoning and learning based on logic circuits (LCs) and probabilistic circuits (PCs). It provides a range of efficient algorithms for probabilistic inference queries, such as computing marginal probabilities (MAR), as well as many more advanced queries. Certain structural circuit properties are needed to achieve this tractability, which JUICE helps validate. Additionally, it supports several parameter and structure learning algorithms proposed in the recent literature. By leveraging parallelism (on both CPU and GPU), JUICE provides a fast implementation of circuit-based algorithms, which makes it suitable for tackling large-scale datasets and models.}, number={18}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Dang, Meihua and Khosravi, Pasha and Liang, Yitao and Vergari, Antonio and Van Den Broeck, Guy}, year={2021}, month=may, pages={16020–16023}, language={en} }
36
36
@inproceedings{Liu_2022, title={Scaling Up Probabilistic Circuits by Latent Variable Distillation}, url={https://openreview.net/forum?id=067CGykiZTS}, abstractNote={Probabilistic Circuits (PCs) are a unified framework for tractable probabilistic models that support efficient computation of various probabilistic queries (e.g., marginal probabilities). One key challenge is to scale PCs to model large and high-dimensional real-world datasets: we observe that as the number of parameters in PCs increases, their performance immediately plateaus. This phenomenon suggests that the existing optimizers fail to exploit the full expressive power of large PCs. We propose to overcome such bottleneck by latent variable distillation: we leverage the less tractable but more expressive deep generative models to provide extra supervision over the latent variables of PCs. Specifically, we extract information from Transformer-based generative models to assign values to latent variables of PCs, providing guidance to PC optimizers. Experiments on both image and language modeling benchmarks (e.g., ImageNet and WikiText-2) show that latent variable distillation substantially boosts the performance of large PCs compared to their counterparts without latent variable distillation. In particular, on the image modeling benchmarks, PCs achieve competitive performance against some of the widely-used deep generative models, including variational autoencoders and flow-based models, opening up new avenues for tractable generative modeling. Our code can be found at https://github.com/UCLA-StarAI/LVD.}, author={Liu, Anji and Zhang, Honghua and Broeck, Guy Van den}, year={2022}, month=sep, language={en} }
37
+
@article{Xu_Luo_2021, title={Human action recognition based on mixed gaussian hidden markov model}, volume={336}, rights={https://creativecommons.org/licenses/by/4.0/}, ISSN={2261-236X}, DOI={10.1051/matecconf/202133606004}, abstractNote={Human action recognition is a challenging field in recent years. Many traditional signal processing and machine learning methods are gradually trying to be applied in this field. This paper uses a hidden Markov model based on mixed Gaussian to solve the problem of human action recognition. The model treats the observed human actions as samples which conform to the Gaussian mixture model, and each Gaussian mixture model is determined by a state variable. The training of the model is the process that obtain the model parameters through the expectation maximization algorithm. The simulation results show that the Hidden Markov Model based on the mixed Gaussian distribution can perform well in human action recognition.}, note={4 citations (Crossref) [2024-12-08]}, journal={MATEC Web of Conferences}, author={Xu, Jiawei and Luo, Qian}, editor={Barukčić, I.}, year={2021}, pages={06004}, language={en} }
38
+
@article{Liu_Wang_2017, title={Decoding Chinese stock market returns: Three-state hidden semi-Markov model}, volume={44}, ISSN={0927-538X}, DOI={10.1016/j.pacfin.2017.06.007}, abstractNote={In this paper, we employ a three-state hidden semi-Markov model (HSMM) to explain the time-varying distribution of the Chinese stock market returns since 2005. Our results indicate that the time-varying distribution depends on the hidden states, which are represented by three market conditions, namely the bear, sidewalk, and bull markets. We find that the inflation, the PMI, and the exchange rate are significantly related to the market conditions in China. A simple trading strategy based on expanding window decoding shows profitability with a Sharpe ratio of 1.14.}, journal={Pacific-Basin Finance Journal}, author={Liu, Zhenya and Wang, Shixuan}, year={2017}, month=sep, pages={127–149}, language={en} }
0 commit comments