aboutsummaryrefslogtreecommitdiff
path: root/src/kern/dma/dma_manager.c
blob: 39ae9a38b93e0f4f9390cfa758af1e31f047720e (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
#include "kern/dma/dma_manager.h"

#include "arch/stm32l4xxx/peripherals/dma.h"
#include "arch/stm32l4xxx/peripherals/rcc.h"
#include "arch/stm32l4xxx/peripherals/spi.h"
#include "arch/stm32l4xxx/peripherals/usart.h"

/* Bitmask of DMA2 channels in use. */
uint8_t dma_inuse[2];

void (*dma_channel_callbacks[14])(void*);
void* callback_args[14];

#define ON_DMA(dma, chan)                                    \
  void on_dma##dma##_channel##chan()                         \
  {                                                          \
    if (dma_channel_callbacks[(dma - 1) * 7 + (chan - 1)]) { \
      dma_channel_callbacks[(dma - 1) * 7 + (chan - 1)](     \
          callback_args[(dma - 1) * 7 + (chan - 1)]);        \
    }                                                        \
  }

ON_DMA(1, 1);
ON_DMA(1, 2);
ON_DMA(1, 3);
ON_DMA(1, 4);
ON_DMA(1, 5);
ON_DMA(1, 6);
ON_DMA(1, 7);
ON_DMA(2, 1);
ON_DMA(2, 2);
ON_DMA(2, 3);
ON_DMA(2, 4);
ON_DMA(2, 5);
ON_DMA(2, 6);
ON_DMA(2, 7);

static inline dma_t* get_dma(int dma)
{
  if (dma) {
    return &DMA2;
  } else {
    return &DMA1;
  }
}

static dma_t* get_raw_dma(dma_channel_t chan)
{
  return get_dma(chan.dma);
}

static dma_channel_config_t* get_raw_channel_config(dma_channel_t chan)
{
  dma_t* dma = get_raw_dma(chan);
  return &dma->channel_config[chan.chan];
}

static uint32_t get_periph_location(dma_peripheral_t operipheral)
{
#define CASE(p, n) \
  case p:          \
    return ptr2reg(n);
  switch (operipheral) {
    CASE(DMA1_PERIPH_USART1_RX, &USART1.rd_r)
    CASE(DMA1_PERIPH_USART1_TX, &USART1.td_r)
    CASE(DMA1_PERIPH_USART2_RX, &USART2.rd_r)
    CASE(DMA1_PERIPH_USART2_TX, &USART2.td_r)
    CASE(DMA2_PERIPH_SPI1_RX, &SPI1.d_r)
    CASE(DMA2_PERIPH_SPI1_TX, &SPI1.d_r)
    CASE(DMA1_PERIPH_SPI1_RX, &SPI1.d_r)
    CASE(DMA1_PERIPH_SPI1_TX, &SPI1.d_r)
    CASE(DMA2_PERIPH_SPI3_RX, &SPI3.d_r)
    CASE(DMA2_PERIPH_SPI3_TX, &SPI3.d_r)

    default:
      return 0;
  };
#undef CASE
}

static dma_channel_t allocate_dma_channel(
    dma_peripheral_t operipheral, int* modesel)
{
  dma_peripheral_t peripheral = operipheral & 0xff;
  int dmasel = peripheral >= DMA2_DMA1_SWITCH__;
  if (dmasel) {
    peripheral -= DMA2_DMA1_SWITCH__;
  }
  int chan = peripheral % DMA_N_CHANNELS;

  *modesel = peripheral / 7;
  return (dma_channel_t){.dma = dmasel, .chan = chan};
}

/*
 * Atomically reserves the DMA channel so other calls
 * cannot erroneously reserve the same DMA channel.
 *
 * Returns 0 if this function was unable to reserve
 * the channel.
 */
static int try_reserve_dma_channel(dma_channel_t chan)
{
  int in_use = __sync_fetch_and_or(&dma_inuse[chan.dma], 1 << chan.chan);

  return !(in_use & (1 << chan.chan));
}

void release_dma_channel(dma_channel_t chan)
{
  dma_channel_config_t* config = get_raw_channel_config(chan);
  regset(config->cc_r, dma_cc_en, 0);       /* Disable the register. */
  dma_inuse[chan.dma] &= ~(1 << chan.chan); /* Release the DMA. */

  if (!dma_inuse[chan.dma]) {
    /* Power-down the DMA if not in use. */
    if (chan.dma) {
      regset(RCC.ahb1en_r, rcc_dma2en, 0);
    } else {
      regset(RCC.ahb1en_r, rcc_dma1en, 0);
    }
  }
}

void configure_dma_channel(
    dma_channel_t chan,
    dma_peripheral_t operipheral,
    dma_opts_t* opts,
    dma_dir_t dir,
    int selmode,
    bool mem2mem,
    int* error_out)
{
  if (chan.dma) {
    regset(RCC.ahb1en_r, rcc_dma2en, 1);
  } else {
    regset(RCC.ahb1en_r, rcc_dma1en, 1);
  }

  dma_t* dma = get_raw_dma(chan);
  regset(dma->csel_r, 0xF << (4 * chan.chan), selmode);
  dma_channel_config_t* config = &dma->channel_config[chan.chan];

  uint32_t reg = 0;

  regset(reg, dma_cc_dir, dir);
  regset(reg, dma_cc_tcie, opts->transfer_complete_interrupt_enable);
  regset(reg, dma_cc_htie, opts->half_transfer_interrupt_enable);
  regset(reg, dma_cc_teie, opts->transfer_error_interrupt_enable);
  regset(reg, dma_cc_circ, opts->circular_mode);
  regset(reg, dma_cc_pinc, opts->peripheral_increment);
  regset(reg, dma_cc_minc, opts->memory_increment);
  regset(reg, dma_cc_psize, opts->peripheral_block_size);
  regset(reg, dma_cc_msize, opts->memory_block_size);
  regset(reg, dma_cc_pl, opts->priority);
  regset(reg, dma_cc_mem2mem, mem2mem);

  config->cc_r = reg;
  config->cpa_r = get_periph_location(operipheral);

  *error_out = 0;
}

dma_mem2mem_channel_t select_dma_channel_mem2mem(
    int channel, dma_opts_t* opts, int* error_out)
{
#define WRAP(c) ((dma_mem2mem_channel_t){.c_ = c})
  // TODO this should probably be in a critical section.
  dma_channel_t chan;
  if (channel == -1) {
    chan.dma = 1;
    if ((dma_inuse[chan.dma] & 0x7F) == 0x7F) {
      chan.dma = 0;
    }

    if ((dma_inuse[chan.dma] & 0x7F) == 0x7F) {
      *error_out = DMA_ERROR_CHANNEL_IN_USE;
      return WRAP(DMA_CHAN_ERROR);
    }

    uint8_t t = ~(dma_inuse[chan.dma] << 1);
    chan.chan = 6 - (__builtin_clz(t) - 24);
  } else {
    if (channel < 7) {
      chan.dma = 0;
      chan.chan = channel;
    } else {
      chan.dma = 0;
      chan.chan = channel - 7;
    }
  }

  if (!try_reserve_dma_channel(chan)) {
    *error_out = DMA_ERROR_CHANNEL_IN_USE;
    return WRAP(DMA_CHAN_ERROR);
  }

  int ec = 0;
  configure_dma_channel(
      chan,
      -1 /* No peripheral */,
      opts,
      READ_FROM_PERIPHERAL,
      /* selmode = */ 0x8,
      /* mem2mem = */ true,
      &ec);

  if (ec) {
    *error_out = ec;
    return WRAP(DMA_CHAN_ERROR);
  }

  *error_out = 0;
  return WRAP(chan);
#undef WRAP
}

dma_mem2p_channel_t select_dma_channel_mem2p(
    dma_peripheral_t peripheral, dma_opts_t* opts_in, int* error_out)
{
#define WRAP(c) ((dma_mem2p_channel_t){.c_ = c})
  *error_out = 0;

  int modesel;
  dma_channel_t ret = allocate_dma_channel(peripheral, &modesel);

  if (!try_reserve_dma_channel(ret)) {
    *error_out = DMA_ERROR_CHANNEL_IN_USE;
    return WRAP(DMA_CHAN_ERROR);
  }

  configure_dma_channel(
      ret,
      peripheral,
      opts_in,
      READ_FROM_MEMORY,
      modesel,
      /* mem2mem = */ false,
      error_out);

  if (*error_out) {
    return WRAP(DMA_CHAN_ERROR);
  }

  *error_out = 0;
  return WRAP(ret);
#undef WRAP
}

dma_p2mem_channel_t select_dma_channel_p2mem(
    dma_peripheral_t peripheral, dma_opts_t* opts_in, int* error_out)
{
#define WRAP(c) ((dma_p2mem_channel_t){.c_ = c})
  *error_out = 0;

  int modesel;
  dma_channel_t ret = allocate_dma_channel(peripheral, &modesel);

  if (!try_reserve_dma_channel(ret)) {
    *error_out = DMA_ERROR_CHANNEL_IN_USE;
    return WRAP(DMA_CHAN_ERROR);
  }

  configure_dma_channel(
      ret,
      peripheral,
      opts_in,
      READ_FROM_PERIPHERAL,
      modesel,
      /* mem2mem = */ false,
      error_out);

  if (*error_out) {
    return WRAP(DMA_CHAN_ERROR);
  }

  *error_out = 0;
  return WRAP(ret);
#undef WRAP
}

void dma_mem2p_initiate_transfer(
    dma_mem2p_channel_t chan, const void* from_loc, uint16_t nblocks)
{
  dma_channel_config_t* config = get_raw_channel_config(chan.c_);
  config->cma_r = ptr2reg(from_loc);
  config->cndt_r = nblocks;

  regset(config->cc_r, dma_cc_en, 1);
}

void dma_mem2mem_initiate_transfer(
    dma_mem2mem_channel_t chan,
    void* to_loc,
    const void* from_loc,
    uint16_t nblocks)
{
  dma_channel_config_t* config = get_raw_channel_config(chan.c_);
  config->cma_r = ptr2reg(to_loc);
  config->cpa_r = ptr2reg(from_loc);
  config->cndt_r = nblocks;

  regset(config->cc_r, dma_cc_en, 1);
}

void dma_p2mem_initiate_transfer(
    dma_p2mem_channel_t chan, void* to_loc, uint16_t nblocks)
{
  dma_channel_config_t* config = get_raw_channel_config(chan.c_);

  config->cma_r = ptr2reg(to_loc);
  config->cndt_r = nblocks;

  regset(config->cc_r, dma_cc_en, 1);
}

void dma_chan_set_callback(
    dma_channel_t chan, void (*callback)(void*), void* arg)
{
  dma_channel_callbacks[chan.dma * 7 + chan.chan] = callback;
  callback_args[chan.dma * 7 + chan.chan] = arg;
  enable_interrupt(dma_channel_get_interrupt(chan));
}

void dma_channel_interrupt_enable(dma_channel_t chan, bool enabled)
{
  dma_channel_config_t* config = get_raw_channel_config(chan);
  regset(config->cc_r, dma_cc_tcie, !!enabled);
  if (enabled) {
    enable_interrupt(dma_channel_get_interrupt(chan));
  } else {
    disable_interrupt(dma_channel_get_interrupt(chan));
  }
}

interrupt_t dma_channel_get_interrupt(dma_channel_t chan)
{
  if (chan.dma == 0) {
    return IRQ_DMA1_CHANNEL1_IRQ + chan.chan;
  } else {
    switch (chan.chan) {
      case 5:
        return IRQ_DMA1_CHANNEL6_IRQ;
      case 6:
        return IRQ_DMA1_CHANNEL7_IRQ;
      default:
        return IRQ_DMA2_CHANNEL1_IRQ + chan.chan;
    }
  }
}