root/drivers/dma/dw-edma/dw-edma-v0-core.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. __dw_regs
  2. __dw_ch_regs
  3. writel_ch
  4. readl_ch
  5. dw_edma_v0_core_off
  6. dw_edma_v0_core_ch_count
  7. dw_edma_v0_core_ch_status
  8. dw_edma_v0_core_clear_done_int
  9. dw_edma_v0_core_clear_abort_int
  10. dw_edma_v0_core_status_done_int
  11. dw_edma_v0_core_status_abort_int
  12. dw_edma_v0_core_write_chunk
  13. dw_edma_v0_core_start
  14. dw_edma_v0_core_device_config
  15. dw_edma_v0_core_debugfs_on
  16. dw_edma_v0_core_debugfs_off

   1 // SPDX-License-Identifier: GPL-2.0
   2 /*
   3  * Copyright (c) 2018-2019 Synopsys, Inc. and/or its affiliates.
   4  * Synopsys DesignWare eDMA v0 core
   5  *
   6  * Author: Gustavo Pimentel <gustavo.pimentel@synopsys.com>
   7  */
   8 
   9 #include <linux/bitfield.h>
  10 
  11 #include "dw-edma-core.h"
  12 #include "dw-edma-v0-core.h"
  13 #include "dw-edma-v0-regs.h"
  14 #include "dw-edma-v0-debugfs.h"
  15 
  16 enum dw_edma_control {
  17         DW_EDMA_V0_CB                                   = BIT(0),
  18         DW_EDMA_V0_TCB                                  = BIT(1),
  19         DW_EDMA_V0_LLP                                  = BIT(2),
  20         DW_EDMA_V0_LIE                                  = BIT(3),
  21         DW_EDMA_V0_RIE                                  = BIT(4),
  22         DW_EDMA_V0_CCS                                  = BIT(8),
  23         DW_EDMA_V0_LLE                                  = BIT(9),
  24 };
  25 
  26 static inline struct dw_edma_v0_regs __iomem *__dw_regs(struct dw_edma *dw)
  27 {
  28         return dw->rg_region.vaddr;
  29 }
  30 
  31 #define SET(dw, name, value)                            \
  32         writel(value, &(__dw_regs(dw)->name))
  33 
  34 #define GET(dw, name)                                   \
  35         readl(&(__dw_regs(dw)->name))
  36 
  37 #define SET_RW(dw, dir, name, value)                    \
  38         do {                                            \
  39                 if ((dir) == EDMA_DIR_WRITE)            \
  40                         SET(dw, wr_##name, value);      \
  41                 else                                    \
  42                         SET(dw, rd_##name, value);      \
  43         } while (0)
  44 
  45 #define GET_RW(dw, dir, name)                           \
  46         ((dir) == EDMA_DIR_WRITE                        \
  47           ? GET(dw, wr_##name)                          \
  48           : GET(dw, rd_##name))
  49 
  50 #define SET_BOTH(dw, name, value)                       \
  51         do {                                            \
  52                 SET(dw, wr_##name, value);              \
  53                 SET(dw, rd_##name, value);              \
  54         } while (0)
  55 
  56 static inline struct dw_edma_v0_ch_regs __iomem *
  57 __dw_ch_regs(struct dw_edma *dw, enum dw_edma_dir dir, u16 ch)
  58 {
  59         if (dw->mode == EDMA_MODE_LEGACY)
  60                 return &(__dw_regs(dw)->type.legacy.ch);
  61 
  62         if (dir == EDMA_DIR_WRITE)
  63                 return &__dw_regs(dw)->type.unroll.ch[ch].wr;
  64 
  65         return &__dw_regs(dw)->type.unroll.ch[ch].rd;
  66 }
  67 
  68 static inline void writel_ch(struct dw_edma *dw, enum dw_edma_dir dir, u16 ch,
  69                              u32 value, void __iomem *addr)
  70 {
  71         if (dw->mode == EDMA_MODE_LEGACY) {
  72                 u32 viewport_sel;
  73                 unsigned long flags;
  74 
  75                 raw_spin_lock_irqsave(&dw->lock, flags);
  76 
  77                 viewport_sel = FIELD_PREP(EDMA_V0_VIEWPORT_MASK, ch);
  78                 if (dir == EDMA_DIR_READ)
  79                         viewport_sel |= BIT(31);
  80 
  81                 writel(viewport_sel,
  82                        &(__dw_regs(dw)->type.legacy.viewport_sel));
  83                 writel(value, addr);
  84 
  85                 raw_spin_unlock_irqrestore(&dw->lock, flags);
  86         } else {
  87                 writel(value, addr);
  88         }
  89 }
  90 
  91 static inline u32 readl_ch(struct dw_edma *dw, enum dw_edma_dir dir, u16 ch,
  92                            const void __iomem *addr)
  93 {
  94         u32 value;
  95 
  96         if (dw->mode == EDMA_MODE_LEGACY) {
  97                 u32 viewport_sel;
  98                 unsigned long flags;
  99 
 100                 raw_spin_lock_irqsave(&dw->lock, flags);
 101 
 102                 viewport_sel = FIELD_PREP(EDMA_V0_VIEWPORT_MASK, ch);
 103                 if (dir == EDMA_DIR_READ)
 104                         viewport_sel |= BIT(31);
 105 
 106                 writel(viewport_sel,
 107                        &(__dw_regs(dw)->type.legacy.viewport_sel));
 108                 value = readl(addr);
 109 
 110                 raw_spin_unlock_irqrestore(&dw->lock, flags);
 111         } else {
 112                 value = readl(addr);
 113         }
 114 
 115         return value;
 116 }
 117 
 118 #define SET_CH(dw, dir, ch, name, value) \
 119         writel_ch(dw, dir, ch, value, &(__dw_ch_regs(dw, dir, ch)->name))
 120 
 121 #define GET_CH(dw, dir, ch, name) \
 122         readl_ch(dw, dir, ch, &(__dw_ch_regs(dw, dir, ch)->name))
 123 
 124 #define SET_LL(ll, value) \
 125         writel(value, ll)
 126 
 127 /* eDMA management callbacks */
 128 void dw_edma_v0_core_off(struct dw_edma *dw)
 129 {
 130         SET_BOTH(dw, int_mask, EDMA_V0_DONE_INT_MASK | EDMA_V0_ABORT_INT_MASK);
 131         SET_BOTH(dw, int_clear, EDMA_V0_DONE_INT_MASK | EDMA_V0_ABORT_INT_MASK);
 132         SET_BOTH(dw, engine_en, 0);
 133 }
 134 
 135 u16 dw_edma_v0_core_ch_count(struct dw_edma *dw, enum dw_edma_dir dir)
 136 {
 137         u32 num_ch;
 138 
 139         if (dir == EDMA_DIR_WRITE)
 140                 num_ch = FIELD_GET(EDMA_V0_WRITE_CH_COUNT_MASK, GET(dw, ctrl));
 141         else
 142                 num_ch = FIELD_GET(EDMA_V0_READ_CH_COUNT_MASK, GET(dw, ctrl));
 143 
 144         if (num_ch > EDMA_V0_MAX_NR_CH)
 145                 num_ch = EDMA_V0_MAX_NR_CH;
 146 
 147         return (u16)num_ch;
 148 }
 149 
 150 enum dma_status dw_edma_v0_core_ch_status(struct dw_edma_chan *chan)
 151 {
 152         struct dw_edma *dw = chan->chip->dw;
 153         u32 tmp;
 154 
 155         tmp = FIELD_GET(EDMA_V0_CH_STATUS_MASK,
 156                         GET_CH(dw, chan->dir, chan->id, ch_control1));
 157 
 158         if (tmp == 1)
 159                 return DMA_IN_PROGRESS;
 160         else if (tmp == 3)
 161                 return DMA_COMPLETE;
 162         else
 163                 return DMA_ERROR;
 164 }
 165 
 166 void dw_edma_v0_core_clear_done_int(struct dw_edma_chan *chan)
 167 {
 168         struct dw_edma *dw = chan->chip->dw;
 169 
 170         SET_RW(dw, chan->dir, int_clear,
 171                FIELD_PREP(EDMA_V0_DONE_INT_MASK, BIT(chan->id)));
 172 }
 173 
 174 void dw_edma_v0_core_clear_abort_int(struct dw_edma_chan *chan)
 175 {
 176         struct dw_edma *dw = chan->chip->dw;
 177 
 178         SET_RW(dw, chan->dir, int_clear,
 179                FIELD_PREP(EDMA_V0_ABORT_INT_MASK, BIT(chan->id)));
 180 }
 181 
 182 u32 dw_edma_v0_core_status_done_int(struct dw_edma *dw, enum dw_edma_dir dir)
 183 {
 184         return FIELD_GET(EDMA_V0_DONE_INT_MASK, GET_RW(dw, dir, int_status));
 185 }
 186 
 187 u32 dw_edma_v0_core_status_abort_int(struct dw_edma *dw, enum dw_edma_dir dir)
 188 {
 189         return FIELD_GET(EDMA_V0_ABORT_INT_MASK, GET_RW(dw, dir, int_status));
 190 }
 191 
 192 static void dw_edma_v0_core_write_chunk(struct dw_edma_chunk *chunk)
 193 {
 194         struct dw_edma_burst *child;
 195         struct dw_edma_v0_lli __iomem *lli;
 196         struct dw_edma_v0_llp __iomem *llp;
 197         u32 control = 0, i = 0;
 198         int j;
 199 
 200         lli = chunk->ll_region.vaddr;
 201 
 202         if (chunk->cb)
 203                 control = DW_EDMA_V0_CB;
 204 
 205         j = chunk->bursts_alloc;
 206         list_for_each_entry(child, &chunk->burst->list, list) {
 207                 j--;
 208                 if (!j)
 209                         control |= (DW_EDMA_V0_LIE | DW_EDMA_V0_RIE);
 210 
 211                 /* Channel control */
 212                 SET_LL(&lli[i].control, control);
 213                 /* Transfer size */
 214                 SET_LL(&lli[i].transfer_size, child->sz);
 215                 /* SAR - low, high */
 216                 SET_LL(&lli[i].sar_low, lower_32_bits(child->sar));
 217                 SET_LL(&lli[i].sar_high, upper_32_bits(child->sar));
 218                 /* DAR - low, high */
 219                 SET_LL(&lli[i].dar_low, lower_32_bits(child->dar));
 220                 SET_LL(&lli[i].dar_high, upper_32_bits(child->dar));
 221                 i++;
 222         }
 223 
 224         llp = (void __iomem *)&lli[i];
 225         control = DW_EDMA_V0_LLP | DW_EDMA_V0_TCB;
 226         if (!chunk->cb)
 227                 control |= DW_EDMA_V0_CB;
 228 
 229         /* Channel control */
 230         SET_LL(&llp->control, control);
 231         /* Linked list  - low, high */
 232         SET_LL(&llp->llp_low, lower_32_bits(chunk->ll_region.paddr));
 233         SET_LL(&llp->llp_high, upper_32_bits(chunk->ll_region.paddr));
 234 }
 235 
 236 void dw_edma_v0_core_start(struct dw_edma_chunk *chunk, bool first)
 237 {
 238         struct dw_edma_chan *chan = chunk->chan;
 239         struct dw_edma *dw = chan->chip->dw;
 240         u32 tmp;
 241 
 242         dw_edma_v0_core_write_chunk(chunk);
 243 
 244         if (first) {
 245                 /* Enable engine */
 246                 SET_RW(dw, chan->dir, engine_en, BIT(0));
 247                 /* Interrupt unmask - done, abort */
 248                 tmp = GET_RW(dw, chan->dir, int_mask);
 249                 tmp &= ~FIELD_PREP(EDMA_V0_DONE_INT_MASK, BIT(chan->id));
 250                 tmp &= ~FIELD_PREP(EDMA_V0_ABORT_INT_MASK, BIT(chan->id));
 251                 SET_RW(dw, chan->dir, int_mask, tmp);
 252                 /* Linked list error */
 253                 tmp = GET_RW(dw, chan->dir, linked_list_err_en);
 254                 tmp |= FIELD_PREP(EDMA_V0_LINKED_LIST_ERR_MASK, BIT(chan->id));
 255                 SET_RW(dw, chan->dir, linked_list_err_en, tmp);
 256                 /* Channel control */
 257                 SET_CH(dw, chan->dir, chan->id, ch_control1,
 258                        (DW_EDMA_V0_CCS | DW_EDMA_V0_LLE));
 259                 /* Linked list - low, high */
 260                 SET_CH(dw, chan->dir, chan->id, llp_low,
 261                        lower_32_bits(chunk->ll_region.paddr));
 262                 SET_CH(dw, chan->dir, chan->id, llp_high,
 263                        upper_32_bits(chunk->ll_region.paddr));
 264         }
 265         /* Doorbell */
 266         SET_RW(dw, chan->dir, doorbell,
 267                FIELD_PREP(EDMA_V0_DOORBELL_CH_MASK, chan->id));
 268 }
 269 
 270 int dw_edma_v0_core_device_config(struct dw_edma_chan *chan)
 271 {
 272         struct dw_edma *dw = chan->chip->dw;
 273         u32 tmp = 0;
 274 
 275         /* MSI done addr - low, high */
 276         SET_RW(dw, chan->dir, done_imwr_low, chan->msi.address_lo);
 277         SET_RW(dw, chan->dir, done_imwr_high, chan->msi.address_hi);
 278         /* MSI abort addr - low, high */
 279         SET_RW(dw, chan->dir, abort_imwr_low, chan->msi.address_lo);
 280         SET_RW(dw, chan->dir, abort_imwr_high, chan->msi.address_hi);
 281         /* MSI data - low, high */
 282         switch (chan->id) {
 283         case 0:
 284         case 1:
 285                 tmp = GET_RW(dw, chan->dir, ch01_imwr_data);
 286                 break;
 287 
 288         case 2:
 289         case 3:
 290                 tmp = GET_RW(dw, chan->dir, ch23_imwr_data);
 291                 break;
 292 
 293         case 4:
 294         case 5:
 295                 tmp = GET_RW(dw, chan->dir, ch45_imwr_data);
 296                 break;
 297 
 298         case 6:
 299         case 7:
 300                 tmp = GET_RW(dw, chan->dir, ch67_imwr_data);
 301                 break;
 302         }
 303 
 304         if (chan->id & BIT(0)) {
 305                 /* Channel odd {1, 3, 5, 7} */
 306                 tmp &= EDMA_V0_CH_EVEN_MSI_DATA_MASK;
 307                 tmp |= FIELD_PREP(EDMA_V0_CH_ODD_MSI_DATA_MASK,
 308                                   chan->msi.data);
 309         } else {
 310                 /* Channel even {0, 2, 4, 6} */
 311                 tmp &= EDMA_V0_CH_ODD_MSI_DATA_MASK;
 312                 tmp |= FIELD_PREP(EDMA_V0_CH_EVEN_MSI_DATA_MASK,
 313                                   chan->msi.data);
 314         }
 315 
 316         switch (chan->id) {
 317         case 0:
 318         case 1:
 319                 SET_RW(dw, chan->dir, ch01_imwr_data, tmp);
 320                 break;
 321 
 322         case 2:
 323         case 3:
 324                 SET_RW(dw, chan->dir, ch23_imwr_data, tmp);
 325                 break;
 326 
 327         case 4:
 328         case 5:
 329                 SET_RW(dw, chan->dir, ch45_imwr_data, tmp);
 330                 break;
 331 
 332         case 6:
 333         case 7:
 334                 SET_RW(dw, chan->dir, ch67_imwr_data, tmp);
 335                 break;
 336         }
 337 
 338         return 0;
 339 }
 340 
 341 /* eDMA debugfs callbacks */
 342 void dw_edma_v0_core_debugfs_on(struct dw_edma_chip *chip)
 343 {
 344         dw_edma_v0_debugfs_on(chip);
 345 }
 346 
 347 void dw_edma_v0_core_debugfs_off(void)
 348 {
 349         dw_edma_v0_debugfs_off();
 350 }

/* [<][>][^][v][top][bottom][index][help] */