QuickSearch:   Number of matching entries: 0.

Search Settings

    AuthorTitleYearJournal/ProceedingsDOI/URL
    Alexander Rehmer, Marco Klute, Andreas Kroll and Hans-Peter Heim An internal dynamics approach to predicting batch-end product quality in plastic injection molding using Recurrent Neural Networks 2022 IFAC-PapersOnLine, 6th IEEE Conference on Control Technology and Applications (CCTA), vol. 53, pp. 1427-1432, Elsevier, Trieste, Italy, IFAC, 22.-25- August   
    Abstract: Recurrent Neural Networks are applied in areas such as speech recognition, natural language and video processing, and the identification of nonlinear state space models. Conventional Recurrent Neural Networks, e.g. the Elman Network, are hard to train. A more recently developed class of recurrent neural networks, so-called Gated Units, outperform their counterparts on virtually every task. This paper aims to provide additional insights into the differences between RNNs and Gated Units in order to explain the superior perfomance of gated recurrent units. It is argued, that Gated Units are easier to optimize not because they solve the vanishing gradient problem, but because they circumvent the emergence of large local gradients.
    BibTeX:
    @inproceedings{Rehmer-CCTA-2022,
     abstract = {Recurrent Neural Networks are applied in areas such as speech recognition, natural language and video processing, and the identification of nonlinear state space models. Conventional Recurrent Neural Networks, e.g. the Elman Network, are hard to train. A more recently developed class of recurrent neural networks, so-called Gated Units, outperform their counterparts on virtually every task. This paper aims to provide additional insights into the differences between RNNs and Gated Units in order to explain the superior perfomance of gated recurrent units. It is argued, that Gated Units are easier to optimize not because they solve the vanishing gradient problem, but because they circumvent the emergence of large local gradients.},
     address = {Trieste, Italy},
     author = {Alexander Rehmer and Marco Klute and Andreas Kroll
    and Hans-Peter Heim},
     booktitle = {6th IEEE Conference on Control Technology and
    Applications (CCTA)},
     journal = {IFAC-PapersOnLine},
     language = {english},
     month = {22.-25- August},
     mrtnote = {peer,IdRNN},
     organization = {IFAC},
     owner = {rehmer},
     pages = {1427-1432},
     publisher = {Elsevier},
     timestamp = {2019.11.25},
     title = {An internal dynamics approach to predicting batch-end product quality in plastic injection molding using Recurrent Neural
    Networks},
     volume = {53},
     year = {2022}
    }
    
    
    Alexander Rehmer, Andreas Kroll On affine quasi-LPV System Identification with unknown state-scheduling using (deep) Recurrent Neural Networks 2022 IFAC-PapersOnLine, Proceedings of the 26th International Conference on System Theory Control and Computing (ICSTCC), pp. 446-451, Sinaia, Romania, 19.-21- October   
    Abstract: Recurrent Neural Networks are applied in areas such as speech recognition, natural language and video processing, and the identification of nonlinear state space models. Conventional Recurrent Neural Networks, e.g. the Elman Network, are hard to train. A more recently developed class of recurrent neural networks, so-called Gated Units, outperform their counterparts on virtually every task. This paper aims to provide additional insights into the differences between RNNs and Gated Units in order to explain the superior perfomance of gated recurrent units. It is argued, that Gated Units are easier to optimize not because they solve the vanishing gradient problem, but because they circumvent the emergence of large local gradients.
    BibTeX:
    @inproceedings{Rehmer-ICSTCC-2022,
     abstract = {Recurrent Neural Networks are applied in areas such as speech recognition, natural language and video processing, and the identification of nonlinear state space models. Conventional Recurrent Neural Networks, e.g. the Elman Network, are hard to train. A more recently developed class of recurrent neural networks, so-called Gated Units, outperform their counterparts on virtually every task. This paper aims to provide additional insights into the differences between RNNs and Gated Units in order to explain the superior perfomance of gated recurrent units. It is argued, that Gated Units are easier to optimize not because they solve the vanishing gradient problem, but because they circumvent the emergence of large local gradients.},
     address = {Sinaia, Romania},
     author = {Alexander Rehmer and Andreas Kroll},
     booktitle = {Proceedings of the 26th International Conference on System Theory
    Control and Computing
    (ICSTCC)},
     journal = {IFAC-PapersOnLine},
     language = {english},
     month = {19.-21- October},
     mrtnote = {peer,IdRNN},
     owner = {rehmer},
     pages = {446-451},
     timestamp = {2019.11.25},
     title = {On affine quasi-LPV System Identification with unknown state-scheduling using (deep) Recurrent Neural
    Networks},
     year = {2022}
    }
    
    
    Alexander Rehmer, Andreas Kroll Eine Python-Toolbox zur datengetriebenen Modellierung des Spritzgieprozsses und Lösung von Optimalsteuerungsproblemen zur Steuerung der Bauteilqualität 2022 32. Workshop Computational Intelligence, pp. 133-150, KIT Scientific Publishing, Berlin, GMA-FA 5.14, 1. - 2. Dezember 2022  URL  
    BibTeX:
    @inproceedings{RehmerGMACI2022,
     address = {Berlin},
     author = {Alexander Rehmer and Andreas Kroll},
     booktitle = {32. Workshop Computational Intelligence},
     date = {2022},
     location = {Berlin},
     month = {1. - 2. Dezember 2022},
     mrtnote = {nopeer, IdRNN},
     organization = {GMA-FA 5.14},
     owner = {rehmer},
     pages = {133-150},
     publisher = {KIT Scientific Publishing},
     timestamp = {2021.08.24},
     title = {Eine Python-Toolbox zur datengetriebenen Modellierung des Spritzgieprozsses und Lösung von Optimalsteuerungsproblemen zur Steuerung der Bauteilqualität},
     url = {https://library.oapen.org/handle/20.500.12657/59840?show=full},
     year = {2022}
    }
    
    
    Alexander Rehmer, Andreas Kroll A Deep Recurrent Neural Network model for affine quasi-LPV System identification 2022 Preprints of the 20th European Control Conference (ECC), pp. 566-571, London, UK, 12.-15. July   
    BibTeX:
    @inproceedings{RehmerECC2022_2,
     address = {London, UK},
     author = {Alexander Rehmer and Andreas Kroll},
     booktitle = {Preprints of the 20th European Control Conference
    (ECC)},
     month = {12.-15. July},
     mrtnote = {peer,IdRNN},
     owner = {gringard},
     pages = {566-571},
     timestamp = {2017.12.13},
     title = {A Deep Recurrent Neural Network model for affine
    quasi-LPV System identification},
     year = {2022}
    }
    
    
    Alexander Rehmer, Andras Kroll The effect of the forget gate on bifurcation boundaries and dynamics in Recurrent Neural Networks and its implications for gradient-based optimization 2022 Preprints of the International Joint Conference on Neural Networks (IJCNN 2022), pp. 1-8, Padua, Italy, 18.-23. July   
    BibTeX:
    @inproceedings{Rehmer_WCCI_2022,
     address = {Padua, Italy},
     author = {Alexander Rehmer and Andras Kroll},
     booktitle = {Preprints of the International Joint Conference on
    Neural Networks (IJCNN 2022)},
     month = {18.-23. July},
     mrtnote = {nopeer,IdRNN},
     owner = {duerrbaum},
     pages = {1-8},
     timestamp = {2022.03.29},
     title = {The effect of the forget gate on bifurcation boundaries and dynamics in Recurrent Neural Networks and its implications for gradient-based optimization},
     year = {2022}
    }
    
    
    Alexander Rehmer, Andreas Kroll On the vanishing and exploding gradient problem in Gated Recurrent Units 2020 IFAC-PapersOnLine, 21th IFAC World Congress, vol. 53, no. 2, pp. 1243-1248, Elsevier, Berlin, Germany, IFAC, 12.-17- July   
    Abstract: Recurrent Neural Networks are applied in areas such as speech recognition, natural language and video processing, and the identification of nonlinear state space models. Conventional Recurrent Neural Networks, e.g. the Elman Network, are hard to train. A more recently developed class of recurrent neural networks, so-called Gated Units, outperform their counterparts on virtually every task. This paper aims to provide additional insights into the differences between RNNs and Gated Units in order to explain the superior perfomance of gated recurrent units. It is argued, that Gated Units are easier to optimize not because they solve the vanishing gradient problem, but because they circumvent the emergence of large local gradients.
    BibTeX:
    @inproceedings{Rehmer-IFAC-2020,
     abstract = {Recurrent Neural Networks are applied in areas such as speech recognition, natural language and video processing, and the identification of nonlinear state space models. Conventional Recurrent Neural Networks, e.g. the Elman Network, are hard to train. A more recently developed class of recurrent neural networks, so-called Gated Units, outperform their counterparts on virtually every task. This paper aims to provide additional insights into the differences between RNNs and Gated Units in order to explain the superior perfomance of gated recurrent units. It is argued, that Gated Units are easier to optimize not because they solve the vanishing gradient problem, but because they circumvent the emergence of large local gradients.},
     address = {Berlin, Germany},
     author = {Alexander Rehmer and Andreas Kroll},
     booktitle = {21th IFAC World Congress},
     journal = {IFAC-PapersOnLine},
     language = {english},
     month = {12.-17- July},
     mrtnote = {peer,IdRNN},
     number = {2},
     organization = {IFAC},
     owner = {rehmer},
     pages = {1243--1248},
     publisher = {Elsevier},
     timestamp = {2019.11.25},
     title = {On the vanishing and exploding gradient problem in
    Gated Recurrent Units},
     volume = {53},
     year = {2020}
    }
    
    
    Alexander Rehmer, Andreas Kroll On Using Gated Recurrent Units for Nonlinear System Identification 2019 Preprints of the 18th European Control Conference (ECC), pp. 2504-2509, Naples, Italy, IFAC, 25.-28. Juni  URL  
    Abstract: This paper is concerned with the test signal design for the identification of the partition parameters of locally affine Takagi-Sugeno-(TS-)Models. The basic idea is that data should be generated in local model transition areas in the scheduling space. A reference system output that represents the desired path in the scheduling space is forced upon the underlying system through a combination of a feed-forward and feedback control scheme. The system is then identified in an iterative closed-loop identification procedure. This method has been applied to an artificial system to demonstrate its potential.
    BibTeX:
    @inproceedings{RehmerECC2019,
     abstract = {This paper is concerned with the test signal design for the identification of the partition parameters of locally affine Takagi-Sugeno-(TS-)Models. The basic idea is that data should be generated in local model transition areas in the scheduling space. A reference system output that represents the desired path in the scheduling space is forced upon the underlying system through a combination of a feed-forward and feedback control scheme. The system is then identified in an iterative closed-loop identification procedure. This method has been applied to an artificial system to demonstrate its potential.},
     address = {Naples, Italy},
     author = {Alexander Rehmer and Andreas Kroll},
     booktitle = {Preprints of the 18th European Control Conference
    (ECC)},
     month = {25.-28. Juni},
     mrtnote = {peer,IdRNN},
     organization = {IFAC},
     owner = {rehmer},
     pages = {2504-2509},
     timestamp = {2018.11.14},
     title = {On Using Gated Recurrent Units for Nonlinear System
    Identification},
     url = {https://www.ifac-control.org/events/european-control-conference-in-cooperation-with-ifac-ecc-2019},
     year = {2019}
    }
    
    

    Created by JabRef on 08.05.24.