{
unsigned int i;
u32 addr, desc_addr;
- unsigned long flags;
if (periods > ATI_MAX_DESCRIPTORS)
return -ENOMEM;
return 0;
/* reset DMA before changing the descriptor table */
- spin_lock_irqsave(&chip->reg_lock, flags);
- writel(0, chip->remap_addr + dma->ops->llp_offset);
- dma->ops->enable_dma(chip, 0);
- dma->ops->enable_dma(chip, 1);
- spin_unlock_irqrestore(&chip->reg_lock, flags);
+ scoped_guard(spinlock_irqsave, &chip->reg_lock) {
+ writel(0, chip->remap_addr + dma->ops->llp_offset);
+ dma->ops->enable_dma(chip, 0);
+ dma->ops->enable_dma(chip, 1);
+ }
/* fill the entries */
addr = (u32)substream->runtime->dma_addr;
!dma->ops->flush_dma))
return -EINVAL;
- spin_lock(&chip->reg_lock);
+ guard(spinlock)(&chip->reg_lock);
switch (cmd) {
case SNDRV_PCM_TRIGGER_START:
case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
snd_atiixp_check_bus_busy(chip);
}
}
- spin_unlock(&chip->reg_lock);
return err;
}
{
struct atiixp *chip = snd_pcm_substream_chip(substream);
- spin_lock_irq(&chip->reg_lock);
+ guard(spinlock_irq)(&chip->reg_lock);
if (chip->spdif_over_aclink) {
unsigned int data;
/* enable slots 10/11 */
atiixp_update(chip, CMD, ATI_REG_CMD_SPDF_CONFIG_MASK, 0);
atiixp_update(chip, CMD, ATI_REG_CMD_INTERLEAVE_SPDF, 0);
}
- spin_unlock_irq(&chip->reg_lock);
return 0;
}
struct atiixp *chip = snd_pcm_substream_chip(substream);
unsigned int data;
- spin_lock_irq(&chip->reg_lock);
+ guard(spinlock_irq)(&chip->reg_lock);
data = atiixp_read(chip, OUT_DMA_SLOT) & ~ATI_REG_OUT_DMA_SLOT_MASK;
switch (substream->runtime->channels) {
case 8:
atiixp_update(chip, 6CH_REORDER, ATI_REG_6CH_REORDER_EN,
substream->runtime->channels >= 6 ? ATI_REG_6CH_REORDER_EN: 0);
- spin_unlock_irq(&chip->reg_lock);
return 0;
}
{
struct atiixp *chip = snd_pcm_substream_chip(substream);
- spin_lock_irq(&chip->reg_lock);
+ guard(spinlock_irq)(&chip->reg_lock);
atiixp_update(chip, CMD, ATI_REG_CMD_INTERLEAVE_IN,
substream->runtime->format == SNDRV_PCM_FORMAT_S16_LE ?
ATI_REG_CMD_INTERLEAVE_IN : 0);
- spin_unlock_irq(&chip->reg_lock);
return 0;
}
runtime->private_data = dma;
/* enable DMA bits */
- spin_lock_irq(&chip->reg_lock);
- dma->ops->enable_dma(chip, 1);
- spin_unlock_irq(&chip->reg_lock);
+ scoped_guard(spinlock_irq, &chip->reg_lock) {
+ dma->ops->enable_dma(chip, 1);
+ }
dma->opened = 1;
return 0;
/* disable DMA bits */
if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma))
return -EINVAL;
- spin_lock_irq(&chip->reg_lock);
- dma->ops->enable_dma(chip, 0);
- spin_unlock_irq(&chip->reg_lock);
+ scoped_guard(spinlock_irq, &chip->reg_lock) {
+ dma->ops->enable_dma(chip, 0);
+ }
dma->substream = NULL;
dma->opened = 0;
return 0;
if (status & CODEC_CHECK_BITS) {
unsigned int detected;
detected = status & CODEC_CHECK_BITS;
- spin_lock(&chip->reg_lock);
+ guard(spinlock)(&chip->reg_lock);
chip->codec_not_ready_bits |= detected;
atiixp_update(chip, IER, detected, 0); /* disable the detected irqs */
- spin_unlock(&chip->reg_lock);
}
/* ack */
{
unsigned int i;
u32 addr, desc_addr;
- unsigned long flags;
if (periods > ATI_MAX_DESCRIPTORS)
return -ENOMEM;
return 0;
/* reset DMA before changing the descriptor table */
- spin_lock_irqsave(&chip->reg_lock, flags);
- writel(0, chip->remap_addr + dma->ops->llp_offset);
- dma->ops->enable_dma(chip, 0);
- dma->ops->enable_dma(chip, 1);
- spin_unlock_irqrestore(&chip->reg_lock, flags);
+ scoped_guard(spinlock_irqsave, &chip->reg_lock) {
+ writel(0, chip->remap_addr + dma->ops->llp_offset);
+ dma->ops->enable_dma(chip, 0);
+ dma->ops->enable_dma(chip, 1);
+ }
/* fill the entries */
addr = (u32)substream->runtime->dma_addr;
!dma->ops->flush_dma))
return -EINVAL;
- spin_lock(&chip->reg_lock);
+ guard(spinlock)(&chip->reg_lock);
switch(cmd) {
case SNDRV_PCM_TRIGGER_START:
dma->ops->enable_transfer(chip, 1);
snd_atiixp_check_bus_busy(chip);
}
}
- spin_unlock(&chip->reg_lock);
return err;
}
struct atiixp_modem *chip = snd_pcm_substream_chip(substream);
unsigned int data;
- spin_lock_irq(&chip->reg_lock);
+ guard(spinlock_irq)(&chip->reg_lock);
/* set output threshold */
data = atiixp_read(chip, MODEM_OUT_FIFO);
data &= ~ATI_REG_MODEM_OUT1_DMA_THRESHOLD_MASK;
data |= 0x04 << ATI_REG_MODEM_OUT1_DMA_THRESHOLD_SHIFT;
atiixp_write(chip, MODEM_OUT_FIFO, data);
- spin_unlock_irq(&chip->reg_lock);
return 0;
}
runtime->private_data = dma;
/* enable DMA bits */
- spin_lock_irq(&chip->reg_lock);
- dma->ops->enable_dma(chip, 1);
- spin_unlock_irq(&chip->reg_lock);
+ scoped_guard(spinlock_irq, &chip->reg_lock) {
+ dma->ops->enable_dma(chip, 1);
+ }
dma->opened = 1;
return 0;
/* disable DMA bits */
if (snd_BUG_ON(!dma->ops || !dma->ops->enable_dma))
return -EINVAL;
- spin_lock_irq(&chip->reg_lock);
- dma->ops->enable_dma(chip, 0);
- spin_unlock_irq(&chip->reg_lock);
+ scoped_guard(spinlock_irq, &chip->reg_lock) {
+ dma->ops->enable_dma(chip, 0);
+ }
dma->substream = NULL;
dma->opened = 0;
return 0;
if (status & CODEC_CHECK_BITS) {
unsigned int detected;
detected = status & CODEC_CHECK_BITS;
- spin_lock(&chip->reg_lock);
+ guard(spinlock)(&chip->reg_lock);
chip->codec_not_ready_bits |= detected;
atiixp_update(chip, IER, detected, 0); /* disable the detected irqs */
- spin_unlock(&chip->reg_lock);
}
/* ack */