@inproceedings{lutati-etal-2023-focus,
title = "Focus Your Attention (with Adaptive {IIR} Filters)",
author = "Lutati, Shahar and
Zimerman, Itamar and
Wolf, Lior",
editor = "Bouamor, Houda and
Pino, Juan and
Bali, Kalika",
booktitle = "Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.emnlp-main.772",
doi = "10.18653/v1/2023.emnlp-main.772",
pages = "12538--12549",
abstract = "We present a new layer in which dynamic (i.e., input-dependent) Infinite Impulse Response (IIR) filters of order two are used to process the input sequence prior to applying conventional attention. The input is split into chunks, and the coefficients of these filters are determined based on previous chunks to maintain causality. Despite their relatively low order, the causal adaptive filters are shown to focus attention on the relevant sequence elements. The new layer is grounded in control theory, and is shown to generalize diagonal state-space layers. The layer performs on-par with state-of-the-art networks, with a fraction of their parameters and with time complexity that is sub-quadratic with input size. The obtained layer is favorable to layers such as Heyna, GPT2, and Mega, both with respect to the number of parameters and the obtained level of performance on multiple long-range sequence problems.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="lutati-etal-2023-focus">
<titleInfo>
<title>Focus Your Attention (with Adaptive IIR Filters)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Shahar</namePart>
<namePart type="family">Lutati</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Itamar</namePart>
<namePart type="family">Zimerman</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lior</namePart>
<namePart type="family">Wolf</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing</title>
</titleInfo>
<name type="personal">
<namePart type="given">Houda</namePart>
<namePart type="family">Bouamor</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Juan</namePart>
<namePart type="family">Pino</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Kalika</namePart>
<namePart type="family">Bali</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Singapore</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>We present a new layer in which dynamic (i.e., input-dependent) Infinite Impulse Response (IIR) filters of order two are used to process the input sequence prior to applying conventional attention. The input is split into chunks, and the coefficients of these filters are determined based on previous chunks to maintain causality. Despite their relatively low order, the causal adaptive filters are shown to focus attention on the relevant sequence elements. The new layer is grounded in control theory, and is shown to generalize diagonal state-space layers. The layer performs on-par with state-of-the-art networks, with a fraction of their parameters and with time complexity that is sub-quadratic with input size. The obtained layer is favorable to layers such as Heyna, GPT2, and Mega, both with respect to the number of parameters and the obtained level of performance on multiple long-range sequence problems.</abstract>
<identifier type="citekey">lutati-etal-2023-focus</identifier>
<identifier type="doi">10.18653/v1/2023.emnlp-main.772</identifier>
<location>
<url>https://aclanthology.org/2023.emnlp-main.772</url>
</location>
<part>
<date>2023-12</date>
<extent unit="page">
<start>12538</start>
<end>12549</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Focus Your Attention (with Adaptive IIR Filters)
%A Lutati, Shahar
%A Zimerman, Itamar
%A Wolf, Lior
%Y Bouamor, Houda
%Y Pino, Juan
%Y Bali, Kalika
%S Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing
%D 2023
%8 December
%I Association for Computational Linguistics
%C Singapore
%F lutati-etal-2023-focus
%X We present a new layer in which dynamic (i.e., input-dependent) Infinite Impulse Response (IIR) filters of order two are used to process the input sequence prior to applying conventional attention. The input is split into chunks, and the coefficients of these filters are determined based on previous chunks to maintain causality. Despite their relatively low order, the causal adaptive filters are shown to focus attention on the relevant sequence elements. The new layer is grounded in control theory, and is shown to generalize diagonal state-space layers. The layer performs on-par with state-of-the-art networks, with a fraction of their parameters and with time complexity that is sub-quadratic with input size. The obtained layer is favorable to layers such as Heyna, GPT2, and Mega, both with respect to the number of parameters and the obtained level of performance on multiple long-range sequence problems.
%R 10.18653/v1/2023.emnlp-main.772
%U https://aclanthology.org/2023.emnlp-main.772
%U https://doi.org/10.18653/v1/2023.emnlp-main.772
%P 12538-12549
Markdown (Informal)
[Focus Your Attention (with Adaptive IIR Filters)](https://aclanthology.org/2023.emnlp-main.772) (Lutati et al., EMNLP 2023)
ACL
- Shahar Lutati, Itamar Zimerman, and Lior Wolf. 2023. Focus Your Attention (with Adaptive IIR Filters). In Proceedings of the 2023 Conference on Empirical Methods in Natural Language Processing, pages 12538–12549, Singapore. Association for Computational Linguistics.