Add Broadcom / Netgear changes from RAXE 1.0.0.48
[project/bcm63xx/u-boot.git] / arch / arm / mach-bcmbca / rdp / rdp_dma.h
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * Copyright (c) 2013 Broadcom
4 */
5 /*
6
7 */
8
9 #ifndef __DMA_H_INCLUDED
10 #define __DMA_H_INCLUDED
11
12 /* File automatically generated by Reggae at 15/08/2013 10:54:36 */
13
14 #include "access_macros.h"
15 #include "packing.h"
16 #include "rdp_map.h"
17
18 /*****************************************************************************************/
19 /* The Direct Memory Access (DMA) module serves peripheral (EMACs and GPON) requests for */
20 /* writes and reads from DDR and packet SRAM. DMA connects the peripherals to DDR. S */
21 /* DMA connects the same peripherals to packet SRAM. */
22 /*****************************************************************************************/
23
24 /*****************************************************************************************/
25 /* Blocks offsets */
26 /*****************************************************************************************/
27 /*****************************************************************************************/
28 /* Functions offsets and addresses */
29 /*****************************************************************************************/
30 #define DMA_REGS_0_CONFIG_OFFSET ( 0x00000000 )
31 #define DMA_REGS_0_CONFIG_ADDRESS ( DMA_REGS_0_OFFSET + DMA_REGS_0_CONFIG_OFFSET )
32
33 #define DMA_REGS_0_DEBUG_OFFSET ( 0x00000100 )
34 #define DMA_REGS_0_DEBUG_ADDRESS ( DMA_REGS_0_OFFSET + DMA_REGS_0_DEBUG_OFFSET )
35
36 #define DMA_REGS_1_CONFIG_OFFSET ( 0x00000000 )
37 #define DMA_REGS_1_CONFIG_ADDRESS ( DMA_REGS_1_OFFSET + DMA_REGS_1_CONFIG_OFFSET )
38
39 #define DMA_REGS_1_DEBUG_OFFSET ( 0x00000100 )
40 #define DMA_REGS_1_DEBUG_ADDRESS ( DMA_REGS_1_OFFSET + DMA_REGS_1_DEBUG_OFFSET )
41
42 /* 'd' is module index */
43 /* 'i' is block index */
44 /* 'j' is function index */
45 /* 'e' is function entry */
46 /* 'k' is register index */
47
48 /*****************************************************************************************/
49 /* BB_SOURCE */
50 /* Broadbus source address of DMA and SDMA */
51 /*****************************************************************************************/
52
53 #define DMA_REGS_CONFIG_SOURCE_R1_DEFAULT_VALUE ( 0x0 )
54 #define DMA_REGS_CONFIG_SOURCE_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
55 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_DMA_VALUE ( 0x16 )
56 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_DMA_VALUE_RESET_VALUE ( 0x16 )
57 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_SDMA_VALUE ( 0x18 )
58 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_SDMA_VALUE_RESET_VALUE ( 0x18 )
59
60
61 #define DMA_REGS_CONFIG_SOURCE_OFFSET ( 0x00000000 )
62
63 #define DMA_REGS_0_CONFIG_SOURCE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_SOURCE_OFFSET )
64 #define DMA_REGS_0_CONFIG_SOURCE_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_SOURCE_ADDRESS ), (r) )
65 #define DMA_REGS_0_CONFIG_SOURCE_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_SOURCE_ADDRESS ), (v) )
66
67 #define DMA_REGS_1_CONFIG_SOURCE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_SOURCE_OFFSET )
68 #define DMA_REGS_1_CONFIG_SOURCE_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_SOURCE_ADDRESS ), (r) )
69 #define DMA_REGS_1_CONFIG_SOURCE_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_SOURCE_ADDRESS ), (v) )
70
71
72 extern uint32_t DMA_REGS_CONFIG_SOURCE_ARRAY [ ] ;
73
74 #define DMA_REGS_CONFIG_SOURCE_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_SOURCE_ARRAY [ i ], (v) )
75 #define DMA_REGS_CONFIG_SOURCE_READ( i, r ) READ_32( DMA_REGS_CONFIG_SOURCE_ARRAY [ i ], (r) )
76
77 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
78 typedef struct
79 {
80 /* reserved */
81 uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
82
83 /* bb_source */
84 uint32_t source : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
85 }
86 __PACKING_ATTRIBUTE_STRUCT_END__
87 DMA_REGS_CONFIG_SOURCE ;
88 #else
89 typedef struct
90 { uint32_t source : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
91
92 /* bb_source */
93 uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
94
95 /* reserved */
96 }
97 __PACKING_ATTRIBUTE_STRUCT_END__
98 DMA_REGS_CONFIG_SOURCE ;
99 #endif
100
101 /*****************************************************************************************/
102 /* MEMORY_ALLOCATION */
103 /* This array of registers defines the memory allocation for the peripherals, for upstre */
104 /* am. The allocation is of number of 128byte buffers out of the total 32 buffers for s */
105 /* dma or 96 buffers in dma in the upload data RAM. For the DMA, the buffers are divid */
106 /* ed between 2 physical RAMs 964 in the first, 32 in the second). The decision which cl */
107 /* ients FIFO is located in which memory is done by the register in address 0x98. The a */
108 /* llocation is done by defining a base address (aligned to 128 bytes) and the number of */
109 /* allocated buffers. Note that the memory allocation should not contain wrap around. */
110 /* For example, if three buffers are needed, do not allocate buffers 30, 31 and 0. The */
111 /* number of allocated CDs is the same of data buffers - one chunk descriptor per buffe */
112 /* r, therefore allocation in CD RAM is defined only by offset address. The order of */
113 /* peripherals within the array is: Ethernet 0 Ethernet 1 Ethernet 2 Ethernet 3 Eth */
114 /* ernet 4 GPON/EPON */
115 /*****************************************************************************************/
116
117 #define DMA_REGS_CONFIG_MALLOC_R3_R3_VALUE ( 0x0 )
118 #define DMA_REGS_CONFIG_MALLOC_R3_R3_VALUE_RESET_VALUE ( 0x0 )
119 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC0_VALUE ( 0x0 )
120 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
121 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC0_VALUE ( 0x0 )
122 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
123 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
124 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
125 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
126 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
127 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC2_VALUE ( 0xA )
128 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0xA )
129 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC3_VALUE ( 0xF )
130 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0xF )
131 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC2_VALUE ( 0x12 )
132 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x12 )
133 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC4_VALUE ( 0x14 )
134 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x14 )
135 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_GPON_VALUE ( 0x19 )
136 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x19 )
137 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC3_VALUE ( 0x1B )
138 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x1B )
139 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC4_VALUE ( 0x24 )
140 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x24 )
141 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_GPON_VALUE ( 0x2D )
142 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x2D )
143 #define DMA_REGS_CONFIG_MALLOC_R2_R2_VALUE ( 0x0 )
144 #define DMA_REGS_CONFIG_MALLOC_R2_R2_VALUE_RESET_VALUE ( 0x0 )
145 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MIN_DMA_SDMA_VALUE ( 0x0 )
146 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC0_VALUE ( 0x5 )
147 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x5 )
148 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
149 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
150 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC2_VALUE ( 0x5 )
151 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x5 )
152 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC3_VALUE ( 0x5 )
153 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x5 )
154 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC4_VALUE ( 0x5 )
155 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x5 )
156 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_GPON_VALUE ( 0x7 )
157 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x7 )
158 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC0_VALUE ( 0x9 )
159 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x9 )
160 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
161 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
162 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC2_VALUE ( 0x9 )
163 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x9 )
164 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC3_VALUE ( 0x9 )
165 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x9 )
166 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC4_VALUE ( 0x9 )
167 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x9 )
168 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_GPON_VALUE ( 0x12 )
169 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x12 )
170 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MAX_SDMA_VALUE ( 0x20 )
171 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MAX_DMA_VALUE ( 0x3F )
172 #define DMA_REGS_CONFIG_MALLOC_R1_R1_VALUE ( 0x0 )
173 #define DMA_REGS_CONFIG_MALLOC_R1_R1_VALUE_RESET_VALUE ( 0x0 )
174 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC0_VALUE ( 0x0 )
175 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
176 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC0_VALUE ( 0x0 )
177 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
178 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
179 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
180 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
181 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
182 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC2_VALUE ( 0xA )
183 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0xA )
184 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC3_VALUE ( 0xF )
185 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0xF )
186 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC2_VALUE ( 0x12 )
187 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x12 )
188 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC4_VALUE ( 0x14 )
189 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x14 )
190 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_GPON_VALUE ( 0x19 )
191 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x19 )
192 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC3_VALUE ( 0x1B )
193 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x1B )
194 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC4_VALUE ( 0x24 )
195 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x24 )
196 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_GPON_VALUE ( 0x2D )
197 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x2D )
198
199
200 #define DMA_REGS_CONFIG_MALLOC_OFFSET ( 0x00000004 )
201
202 #define DMA_REGS_0_CONFIG_MALLOC_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_MALLOC_OFFSET )
203 #define DMA_REGS_0_CONFIG_MALLOC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_MALLOC_ADDRESS ), (i), (r) )
204 #define DMA_REGS_0_CONFIG_MALLOC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_MALLOC_ADDRESS ), (i), (v) )
205
206 #define DMA_REGS_1_CONFIG_MALLOC_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_MALLOC_OFFSET )
207 #define DMA_REGS_1_CONFIG_MALLOC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_MALLOC_ADDRESS ), (i), (r) )
208 #define DMA_REGS_1_CONFIG_MALLOC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_MALLOC_ADDRESS ), (i), (v) )
209
210
211 extern uint32_t DMA_REGS_CONFIG_MALLOC_ARRAY [ ] ;
212
213 #define DMA_REGS_CONFIG_MALLOC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_MALLOC_ARRAY [ i ], (k), (v) )
214 #define DMA_REGS_CONFIG_MALLOC_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_MALLOC_ARRAY [ i ], (k), (r) )
215
216 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
217 typedef struct
218 {
219 /* reserved3 */
220 uint32_t r3 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
221
222 /* CD_memory_offset_address */
223 uint32_t cdoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
224
225 /* reserved2 */
226 uint32_t r2 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
227
228 /* number_of_buffers */
229 uint32_t numofbuff : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
230
231 /* reserved1 */
232 uint32_t r1 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
233
234 /* data_memory_offset_address */
235 uint32_t datatoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
236 }
237 __PACKING_ATTRIBUTE_STRUCT_END__
238 DMA_REGS_CONFIG_MALLOC ;
239 #else
240 typedef struct
241 { uint32_t datatoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
242
243 /* data_memory_offset_address */
244 uint32_t r1 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
245
246 /* reserved1 */
247 uint32_t numofbuff : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
248
249 /* number_of_buffers */
250 uint32_t r2 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
251
252 /* reserved2 */
253 uint32_t cdoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
254
255 /* CD_memory_offset_address */
256 uint32_t r3 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
257
258 /* reserved3 */
259 }
260 __PACKING_ATTRIBUTE_STRUCT_END__
261 DMA_REGS_CONFIG_MALLOC ;
262 #endif
263
264 /*****************************************************************************************/
265 /* READ_REQ_BASE_ADDRESS */
266 /* This array of registers controls the base address of each peripheral within the read */
267 /* requests RAM. Each peripheral gets memory enough for storing up to 8 read requests */
268 /* (total of 48 requests in the RAM), starting from a configurable base address. The ba */
269 /* se address is aligned to 8 therefore the only valid values are: 0, 8, 16, 24, 32, 4 */
270 /* 0. */
271 /*****************************************************************************************/
272
273 #define DMA_REGS_CONFIG_READ_BASE_R1_R1_VALUE ( 0x0 )
274 #define DMA_REGS_CONFIG_READ_BASE_R1_R1_VALUE_RESET_VALUE ( 0x0 )
275 #define DMA_REGS_CONFIG_READ_BASE_BASE_MIN_VALUE ( 0x0 )
276 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC0_VALUE ( 0x0 )
277 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0x0 )
278 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC1_VALUE ( 0x8 )
279 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x8 )
280 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC2_VALUE ( 0x10 )
281 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0x10 )
282 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC3_VALUE ( 0x18 )
283 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x18 )
284 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC4_VALUE ( 0x20 )
285 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x20 )
286 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_GPON_VALUE ( 0x28 )
287 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x28 )
288 #define DMA_REGS_CONFIG_READ_BASE_BASE_MAX_VALUE ( 0x28 )
289
290
291 #define DMA_REGS_CONFIG_READ_BASE_OFFSET ( 0x0000001C )
292
293 #define DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_READ_BASE_OFFSET )
294 #define DMA_REGS_0_CONFIG_READ_BASE_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ), (i), (r) )
295 #define DMA_REGS_0_CONFIG_READ_BASE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ), (i), (v) )
296
297 #define DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_READ_BASE_OFFSET )
298 #define DMA_REGS_1_CONFIG_READ_BASE_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ), (i), (r) )
299 #define DMA_REGS_1_CONFIG_READ_BASE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ), (i), (v) )
300
301
302 extern uint32_t DMA_REGS_CONFIG_READ_BASE_ARRAY [ ] ;
303
304 #define DMA_REGS_CONFIG_READ_BASE_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_READ_BASE_ARRAY [ i ], (k), (v) )
305 #define DMA_REGS_CONFIG_READ_BASE_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_READ_BASE_ARRAY [ i ], (k), (r) )
306
307 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
308 typedef struct
309 {
310 /* reserved1 */
311 uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
312
313 /* base_address */
314 uint32_t base : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
315 }
316 __PACKING_ATTRIBUTE_STRUCT_END__
317 DMA_REGS_CONFIG_READ_BASE ;
318 #else
319 typedef struct
320 { uint32_t base : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
321
322 /* base_address */
323 uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
324
325 /* reserved1 */
326 }
327 __PACKING_ATTRIBUTE_STRUCT_END__
328 DMA_REGS_CONFIG_READ_BASE ;
329 #endif
330
331 /*****************************************************************************************/
332 /* URGENT_THRESHOLDS */
333 /* the in/out of urgent thresholds mark the number of write requests in the queue in whi */
334 /* ch the peripherals priority is changed. The two thresholds should create hysteresis. */
335 /* The moving into urgent threshold must always be greater than the moving out of urgen */
336 /* t threshold. */
337 /*****************************************************************************************/
338
339 #define DMA_REGS_CONFIG_U_THRESH_R2_R2_VALUE ( 0x0 )
340 #define DMA_REGS_CONFIG_U_THRESH_R2_R2_VALUE_RESET_VALUE ( 0x0 )
341 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_MIN_VALUE ( 0x0 )
342 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC0_VALUE ( 0x2 )
343 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x2 )
344 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC1_VALUE ( 0x2 )
345 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x2 )
346 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC2_VALUE ( 0x2 )
347 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x2 )
348 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC3_VALUE ( 0x2 )
349 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x2 )
350 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC4_VALUE ( 0x2 )
351 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x2 )
352 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_GPON_VALUE ( 0x4 )
353 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x4 )
354 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC0_VALUE ( 0x5 )
355 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x5 )
356 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC1_VALUE ( 0x5 )
357 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
358 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC2_VALUE ( 0x5 )
359 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x5 )
360 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC3_VALUE ( 0x5 )
361 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x5 )
362 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC4_VALUE ( 0x5 )
363 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x5 )
364 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_GPON_VALUE ( 0xC )
365 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0xC )
366 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_MAX_VALUE ( 0x1F )
367 #define DMA_REGS_CONFIG_U_THRESH_R1_R1_VALUE ( 0x0 )
368 #define DMA_REGS_CONFIG_U_THRESH_R1_R1_VALUE_RESET_VALUE ( 0x0 )
369 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_MIN_VALUE ( 0x0 )
370 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC0_VALUE ( 0x3 )
371 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x3 )
372 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC1_VALUE ( 0x3 )
373 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x3 )
374 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC2_VALUE ( 0x3 )
375 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x3 )
376 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC3_VALUE ( 0x3 )
377 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x3 )
378 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC4_VALUE ( 0x3 )
379 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x3 )
380 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_GPON_VALUE ( 0x5 )
381 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x5 )
382 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC0_VALUE ( 0x7 )
383 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x7 )
384 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC1_VALUE ( 0x7 )
385 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x7 )
386 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC2_VALUE ( 0x7 )
387 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x7 )
388 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC3_VALUE ( 0x7 )
389 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x7 )
390 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC4_VALUE ( 0x7 )
391 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x7 )
392 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_GPON_VALUE ( 0xE )
393 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0xE )
394 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_MAX_VALUE ( 0x1F )
395
396
397 #define DMA_REGS_CONFIG_U_THRESH_OFFSET ( 0x00000034 )
398
399 #define DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_U_THRESH_OFFSET )
400 #define DMA_REGS_0_CONFIG_U_THRESH_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ), (i), (r) )
401 #define DMA_REGS_0_CONFIG_U_THRESH_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ), (i), (v) )
402
403 #define DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_U_THRESH_OFFSET )
404 #define DMA_REGS_1_CONFIG_U_THRESH_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ), (i), (r) )
405 #define DMA_REGS_1_CONFIG_U_THRESH_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ), (i), (v) )
406
407
408 extern uint32_t DMA_REGS_CONFIG_U_THRESH_ARRAY [ ] ;
409
410 #define DMA_REGS_CONFIG_U_THRESH_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_U_THRESH_ARRAY [ i ], (k), (v) )
411 #define DMA_REGS_CONFIG_U_THRESH_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_U_THRESH_ARRAY [ i ], (k), (r) )
412
413 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
414 typedef struct
415 {
416 /* reserved2 */
417 uint32_t r2 : 18 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
418
419 /* out_of_urgent_threshold */
420 uint32_t out_of_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
421
422 /* reserved1 */
423 uint32_t r1 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
424
425 /* into_urgent_threshold */
426 uint32_t into_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
427 }
428 __PACKING_ATTRIBUTE_STRUCT_END__
429 DMA_REGS_CONFIG_U_THRESH ;
430 #else
431 typedef struct
432 { uint32_t into_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
433
434 /* into_urgent_threshold */
435 uint32_t r1 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
436
437 /* reserved1 */
438 uint32_t out_of_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
439
440 /* out_of_urgent_threshold */
441 uint32_t r2 : 18 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
442
443 /* reserved2 */
444 }
445 __PACKING_ATTRIBUTE_STRUCT_END__
446 DMA_REGS_CONFIG_U_THRESH ;
447 #endif
448
449 /*****************************************************************************************/
450 /* STRICT_PRIORITY */
451 /* The arbitration between the requests of the different peripherals is done in two stag */
452 /* es: 1. Strict priority - chooses the peripherals with the highest priority among all */
453 /* perpherals who have a request pending. 2. Weighted Round-Robin between all peripher */
454 /* als with the same priority. This array of registers allow configuration of the pri */
455 /* ority of each peripheral (both rx and tx) in the following manner: There are 8 level */
456 /* s of priorities, when each bit in the register represents a different level of priori */
457 /* ty. One should assert the relevant bit according to the desired priority - For the */
458 /* lowest - 00000001 For the highest - 10000000 */
459 /*****************************************************************************************/
460
461 #define DMA_REGS_CONFIG_PRI_R1_R2_VALUE ( 0x0 )
462 #define DMA_REGS_CONFIG_PRI_R1_R2_VALUE_RESET_VALUE ( 0x0 )
463 #define DMA_REGS_CONFIG_PRI_TXPRI_LOW_VALUE ( 0x1 )
464 #define DMA_REGS_CONFIG_PRI_TXPRI_DEFAULT_TX_VALUE ( 0x80 )
465 #define DMA_REGS_CONFIG_PRI_TXPRI_DEFAULT_TX_VALUE_RESET_VALUE ( 0x80 )
466 #define DMA_REGS_CONFIG_PRI_TXPRI_HIGH_VALUE ( 0x80 )
467 #define DMA_REGS_CONFIG_PRI_RXPRI_LOW_VALUE ( 0x1 )
468 #define DMA_REGS_CONFIG_PRI_RXPRI_DEFAULT_RX_VALUE ( 0x80 )
469 #define DMA_REGS_CONFIG_PRI_RXPRI_DEFAULT_RX_VALUE_RESET_VALUE ( 0x80 )
470 #define DMA_REGS_CONFIG_PRI_RXPRI_HIGH_VALUE ( 0x80 )
471
472
473 #define DMA_REGS_CONFIG_PRI_OFFSET ( 0x0000004C )
474
475 #define DMA_REGS_0_CONFIG_PRI_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_PRI_OFFSET )
476 #define DMA_REGS_0_CONFIG_PRI_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_PRI_ADDRESS ), (i), (r) )
477 #define DMA_REGS_0_CONFIG_PRI_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_PRI_ADDRESS ), (i), (v) )
478
479 #define DMA_REGS_1_CONFIG_PRI_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_PRI_OFFSET )
480 #define DMA_REGS_1_CONFIG_PRI_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_PRI_ADDRESS ), (i), (r) )
481 #define DMA_REGS_1_CONFIG_PRI_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_PRI_ADDRESS ), (i), (v) )
482
483
484 extern uint32_t DMA_REGS_CONFIG_PRI_ARRAY [ ] ;
485
486 #define DMA_REGS_CONFIG_PRI_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_PRI_ARRAY [ i ], (k), (v) )
487 #define DMA_REGS_CONFIG_PRI_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_PRI_ARRAY [ i ], (k), (r) )
488
489 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
490 typedef struct
491 {
492 /* reserved2 */
493 uint32_t r1 : 16 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
494
495 /* priority_of_tx_side */
496 uint32_t txpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
497
498 /* priority_of_rx_side */
499 uint32_t rxpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
500 }
501 __PACKING_ATTRIBUTE_STRUCT_END__
502 DMA_REGS_CONFIG_PRI ;
503 #else
504 typedef struct
505 { uint32_t rxpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
506
507 /* priority_of_rx_side */
508 uint32_t txpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
509
510 /* priority_of_tx_side */
511 uint32_t r1 : 16 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
512
513 /* reserved2 */
514 }
515 __PACKING_ATTRIBUTE_STRUCT_END__
516 DMA_REGS_CONFIG_PRI ;
517 #endif
518
519 /*****************************************************************************************/
520 /* WEIGHT_OF_ROUND_ROBIN */
521 /* The second phase of the arbitration between requests is weighted round robin between */
522 /* requests of peripherals with the same priority. This array of registers allow config */
523 /* urtion of the weight of each peripheral (rx and tx). The actual weight will be weight */
524 /* + 1, meaning configuration of 0 is actual weight of 1. */
525 /*****************************************************************************************/
526
527 #define DMA_REGS_CONFIG_WEIGHT_R2_DEFAULT_VALUE ( 0x0 )
528 #define DMA_REGS_CONFIG_WEIGHT_R2_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
529 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MIN_VALUE ( 0x0 )
530 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MIN_VALUE_RESET_VALUE ( 0x0 )
531 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_SDMA_VALUE ( 0x0 )
532 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_SDMA_VALUE_RESET_VALUE ( 0x0 )
533 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_EMAC_DMA_VALUE ( 0x1 )
534 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_EMAC_DMA_VALUE_RESET_VALUE ( 0x1 )
535 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_GPON_DMA_VALUE ( 0x3 )
536 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_GPON_DMA_VALUE_RESET_VALUE ( 0x3 )
537 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MAX_VALUE ( 0x7 )
538 #define DMA_REGS_CONFIG_WEIGHT_R1_DEFAULT_VALUE ( 0x0 )
539 #define DMA_REGS_CONFIG_WEIGHT_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
540 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_DMA_VALUE ( 0x0 )
541 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_DMA_VALUE_RESET_VALUE ( 0x0 )
542 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MIN_VALUE ( 0x0 )
543 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MIN_VALUE_RESET_VALUE ( 0x0 )
544 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_SDMA_VALUE ( 0x0 )
545 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_SDMA_VALUE_RESET_VALUE ( 0x0 )
546 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_DMA_VALUE ( 0x1 )
547 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_DMA_VALUE_RESET_VALUE ( 0x1 )
548 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_SDMA_VALUE ( 0x1 )
549 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_SDMA_VALUE_RESET_VALUE ( 0x1 )
550 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MAX_VALUE ( 0x7 )
551
552
553 #define DMA_REGS_CONFIG_WEIGHT_OFFSET ( 0x00000064 )
554
555 #define DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_WEIGHT_OFFSET )
556 #define DMA_REGS_0_CONFIG_WEIGHT_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ), (i), (r) )
557 #define DMA_REGS_0_CONFIG_WEIGHT_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ), (i), (v) )
558
559 #define DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_WEIGHT_OFFSET )
560 #define DMA_REGS_1_CONFIG_WEIGHT_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ), (i), (r) )
561 #define DMA_REGS_1_CONFIG_WEIGHT_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ), (i), (v) )
562
563
564 extern uint32_t DMA_REGS_CONFIG_WEIGHT_ARRAY [ ] ;
565
566 #define DMA_REGS_CONFIG_WEIGHT_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_WEIGHT_ARRAY [ i ], (k), (v) )
567 #define DMA_REGS_CONFIG_WEIGHT_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_WEIGHT_ARRAY [ i ], (k), (r) )
568
569 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
570 typedef struct
571 {
572 /* reserved */
573 uint32_t r2 : 21 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
574
575 /* weight_of_tx_side */
576 uint32_t txweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
577
578 /* reserved */
579 uint32_t r1 : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
580
581 /* weight_of_rx_side */
582 uint32_t rxweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
583 }
584 __PACKING_ATTRIBUTE_STRUCT_END__
585 DMA_REGS_CONFIG_WEIGHT ;
586 #else
587 typedef struct
588 { uint32_t rxweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
589
590 /* weight_of_rx_side */
591 uint32_t r1 : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
592
593 /* reserved */
594 uint32_t txweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
595
596 /* weight_of_tx_side */
597 uint32_t r2 : 21 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
598
599 /* reserved */
600 }
601 __PACKING_ATTRIBUTE_STRUCT_END__
602 DMA_REGS_CONFIG_WEIGHT ;
603 #endif
604
605 /*****************************************************************************************/
606 /* BB_ROUTE_DMA_PERIPH */
607 /* Broadbus route address from the DMA to the peripherals. Register per peripheral (rx a */
608 /* nd tx). The route address is same for DMA and SDMA because of the symmetry of the BB */
609 /* tree. */
610 /*****************************************************************************************/
611
612 #define DMA_REGS_CONFIG_BB_ROUTE_R2_R2_VALUE ( 0x0 )
613 #define DMA_REGS_CONFIG_BB_ROUTE_R2_R2_VALUE_RESET_VALUE ( 0x0 )
614 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_GPON_VALUE ( 0x11 )
615 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x11 )
616 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC3_VALUE ( 0x12 )
617 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x12 )
618 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC1_VALUE ( 0x16 )
619 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x16 )
620 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC4_VALUE ( 0x19 )
621 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x19 )
622 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC2_VALUE ( 0x1A )
623 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0x1A )
624 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC0_VALUE ( 0x1E )
625 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0x1E )
626 #define DMA_REGS_CONFIG_BB_ROUTE_R1_R1_VALUE ( 0x0 )
627 #define DMA_REGS_CONFIG_BB_ROUTE_R1_R1_VALUE_RESET_VALUE ( 0x0 )
628 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_GPON_VALUE ( 0x1 )
629 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x1 )
630 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC3_VALUE ( 0x2 )
631 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x2 )
632 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC1_VALUE ( 0x6 )
633 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x6 )
634 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC4_VALUE ( 0x9 )
635 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x9 )
636 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC2_VALUE ( 0xA )
637 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0xA )
638 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC0_VALUE ( 0xE )
639 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0xE )
640
641
642 #define DMA_REGS_CONFIG_BB_ROUTE_OFFSET ( 0x0000007C )
643
644 #define DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_BB_ROUTE_OFFSET )
645 #define DMA_REGS_0_CONFIG_BB_ROUTE_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ), (i), (r) )
646 #define DMA_REGS_0_CONFIG_BB_ROUTE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ), (i), (v) )
647
648 #define DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_BB_ROUTE_OFFSET )
649 #define DMA_REGS_1_CONFIG_BB_ROUTE_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ), (i), (r) )
650 #define DMA_REGS_1_CONFIG_BB_ROUTE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ), (i), (v) )
651
652
653 extern uint32_t DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ ] ;
654
655 #define DMA_REGS_CONFIG_BB_ROUTE_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ i ], (k), (v) )
656 #define DMA_REGS_CONFIG_BB_ROUTE_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ i ], (k), (r) )
657
658 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
659 typedef struct
660 {
661 /* reserved2 */
662 uint32_t r2 : 17 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
663
664 /* bb_route_to_tx_side */
665 uint32_t txroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
666
667 /* reserved1 */
668 uint32_t r1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
669
670 /* bb_route_to_rx_side */
671 uint32_t rxroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
672 }
673 __PACKING_ATTRIBUTE_STRUCT_END__
674 DMA_REGS_CONFIG_BB_ROUTE ;
675 #else
676 typedef struct
677 { uint32_t rxroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
678
679 /* bb_route_to_rx_side */
680 uint32_t r1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
681
682 /* reserved1 */
683 uint32_t txroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
684
685 /* bb_route_to_tx_side */
686 uint32_t r2 : 17 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
687
688 /* reserved2 */
689 }
690 __PACKING_ATTRIBUTE_STRUCT_END__
691 DMA_REGS_CONFIG_BB_ROUTE ;
692 #endif
693
694 /*****************************************************************************************/
695 /* POINTERS_RESET */
696 /* Resets the pointers of the peripherals FIFOs within the DMA. Bit per peripheral side */
697 /* (rx and tx). For rx side resets the data and CD FIFOs. For tx side resets the read */
698 /* requests FIFO. */
699 /*****************************************************************************************/
700
701 #define DMA_REGS_CONFIG_PTRRST_R1_DEFAULT_VALUE ( 0x0 )
702 #define DMA_REGS_CONFIG_PTRRST_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
703 #define DMA_REGS_CONFIG_PTRRST_GPONTX_OFF_VALUE ( 0x0 )
704 #define DMA_REGS_CONFIG_PTRRST_GPONTX_OFF_VALUE_RESET_VALUE ( 0x0 )
705 #define DMA_REGS_CONFIG_PTRRST_GPONTX_RESET_VALUE ( 0x1 )
706 #define DMA_REGS_CONFIG_PTRRST_GPONRX_OFF_VALUE ( 0x0 )
707 #define DMA_REGS_CONFIG_PTRRST_GPONRX_OFF_VALUE_RESET_VALUE ( 0x0 )
708 #define DMA_REGS_CONFIG_PTRRST_GPONRX_RESET_VALUE ( 0x1 )
709 #define DMA_REGS_CONFIG_PTRRST_ETH4TX_OFF_VALUE ( 0x0 )
710 #define DMA_REGS_CONFIG_PTRRST_ETH4TX_OFF_VALUE_RESET_VALUE ( 0x0 )
711 #define DMA_REGS_CONFIG_PTRRST_ETH4TX_RESET_VALUE ( 0x1 )
712 #define DMA_REGS_CONFIG_PTRRST_ETH4RX_OFF_VALUE ( 0x0 )
713 #define DMA_REGS_CONFIG_PTRRST_ETH4RX_OFF_VALUE_RESET_VALUE ( 0x0 )
714 #define DMA_REGS_CONFIG_PTRRST_ETH4RX_RESET_VALUE ( 0x1 )
715 #define DMA_REGS_CONFIG_PTRRST_ETH3TX_OFF_VALUE ( 0x0 )
716 #define DMA_REGS_CONFIG_PTRRST_ETH3TX_OFF_VALUE_RESET_VALUE ( 0x0 )
717 #define DMA_REGS_CONFIG_PTRRST_ETH3TX_RESET_VALUE ( 0x1 )
718 #define DMA_REGS_CONFIG_PTRRST_ETH3RX_OFF_VALUE ( 0x0 )
719 #define DMA_REGS_CONFIG_PTRRST_ETH3RX_OFF_VALUE_RESET_VALUE ( 0x0 )
720 #define DMA_REGS_CONFIG_PTRRST_ETH3RX_RESET_VALUE ( 0x1 )
721 #define DMA_REGS_CONFIG_PTRRST_ETH2TX_OFF_VALUE ( 0x0 )
722 #define DMA_REGS_CONFIG_PTRRST_ETH2TX_OFF_VALUE_RESET_VALUE ( 0x0 )
723 #define DMA_REGS_CONFIG_PTRRST_ETH2TX_RESET_VALUE ( 0x1 )
724 #define DMA_REGS_CONFIG_PTRRST_ETH2RX_OFF_VALUE ( 0x0 )
725 #define DMA_REGS_CONFIG_PTRRST_ETH2RX_OFF_VALUE_RESET_VALUE ( 0x0 )
726 #define DMA_REGS_CONFIG_PTRRST_ETH2RX_RESET_VALUE ( 0x1 )
727 #define DMA_REGS_CONFIG_PTRRST_ETH1TX_OFF_VALUE ( 0x0 )
728 #define DMA_REGS_CONFIG_PTRRST_ETH1TX_OFF_VALUE_RESET_VALUE ( 0x0 )
729 #define DMA_REGS_CONFIG_PTRRST_ETH1TX_RESET_VALUE ( 0x1 )
730 #define DMA_REGS_CONFIG_PTRRST_ETH1RX_OFF_VALUE ( 0x0 )
731 #define DMA_REGS_CONFIG_PTRRST_ETH1RX_OFF_VALUE_RESET_VALUE ( 0x0 )
732 #define DMA_REGS_CONFIG_PTRRST_ETH1RX_RESET_VALUE ( 0x1 )
733 #define DMA_REGS_CONFIG_PTRRST_ETH0TX_OFF_VALUE ( 0x0 )
734 #define DMA_REGS_CONFIG_PTRRST_ETH0TX_OFF_VALUE_RESET_VALUE ( 0x0 )
735 #define DMA_REGS_CONFIG_PTRRST_ETH0TX_RESET_VALUE ( 0x1 )
736 #define DMA_REGS_CONFIG_PTRRST_ETH0RX_OFF_VALUE ( 0x0 )
737 #define DMA_REGS_CONFIG_PTRRST_ETH0RX_OFF_VALUE_RESET_VALUE ( 0x0 )
738 #define DMA_REGS_CONFIG_PTRRST_ETH0RX_RESET_VALUE ( 0x1 )
739
740
741 #define DMA_REGS_CONFIG_PTRRST_OFFSET ( 0x00000094 )
742
743 #define DMA_REGS_0_CONFIG_PTRRST_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_PTRRST_OFFSET )
744 #define DMA_REGS_0_CONFIG_PTRRST_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_PTRRST_ADDRESS ), (r) )
745 #define DMA_REGS_0_CONFIG_PTRRST_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_PTRRST_ADDRESS ), (v) )
746
747 #define DMA_REGS_1_CONFIG_PTRRST_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_PTRRST_OFFSET )
748 #define DMA_REGS_1_CONFIG_PTRRST_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_PTRRST_ADDRESS ), (r) )
749 #define DMA_REGS_1_CONFIG_PTRRST_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_PTRRST_ADDRESS ), (v) )
750
751
752 extern uint32_t DMA_REGS_CONFIG_PTRRST_ARRAY [ ] ;
753
754 #define DMA_REGS_CONFIG_PTRRST_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_PTRRST_ARRAY [ i ], (v) )
755 #define DMA_REGS_CONFIG_PTRRST_READ( i, r ) READ_32( DMA_REGS_CONFIG_PTRRST_ARRAY [ i ], (r) )
756
757 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
758 typedef struct
759 {
760 /* reserved */
761 uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
762
763 /* gpon_tx_reset */
764 uint32_t gpontx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
765
766 /* gpon_rx_reset */
767 uint32_t gponrx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
768
769 /* ethernet_4_tx_reset */
770 uint32_t eth4tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
771
772 /* ethernet_4_rx_reset */
773 uint32_t eth4rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
774
775 /* ethernet_3_tx_reset */
776 uint32_t eth3tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
777
778 /* ethernet_3_rx_reset */
779 uint32_t eth3rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
780
781 /* ethernet_2_tx_reset */
782 uint32_t eth2tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
783
784 /* ethernet_2_rx_reset */
785 uint32_t eth2rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
786
787 /* ethernet_1_tx_reset */
788 uint32_t eth1tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
789
790 /* ethernet_1_rx_reset */
791 uint32_t eth1rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
792
793 /* ethernet_0_tx_reset */
794 uint32_t eth0tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
795
796 /* ethernet_0_rx_reset */
797 uint32_t eth0rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
798 }
799 __PACKING_ATTRIBUTE_STRUCT_END__
800 DMA_REGS_CONFIG_PTRRST ;
801 #else
802 typedef struct
803 { uint32_t eth0rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
804
805 /* ethernet_0_rx_reset */
806 uint32_t eth0tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
807
808 /* ethernet_0_tx_reset */
809 uint32_t eth1rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
810
811 /* ethernet_1_rx_reset */
812 uint32_t eth1tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
813
814 /* ethernet_1_tx_reset */
815 uint32_t eth2rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
816
817 /* ethernet_2_rx_reset */
818 uint32_t eth2tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
819
820 /* ethernet_2_tx_reset */
821 uint32_t eth3rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
822
823 /* ethernet_3_rx_reset */
824 uint32_t eth3tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
825
826 /* ethernet_3_tx_reset */
827 uint32_t eth4rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
828
829 /* ethernet_4_rx_reset */
830 uint32_t eth4tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
831
832 /* ethernet_4_tx_reset */
833 uint32_t gponrx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
834
835 /* gpon_rx_reset */
836 uint32_t gpontx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
837
838 /* gpon_tx_reset */
839 uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
840
841 /* reserved */
842 }
843 __PACKING_ATTRIBUTE_STRUCT_END__
844 DMA_REGS_CONFIG_PTRRST ;
845 #endif
846
847 /*****************************************************************************************/
848 /* MEM_SEL */
849 /* For DMA, there are 2 data memories for write data (upstream), Each client has a confi */
850 /* gurable number of 128 bytes buffers in one of the memories (see MEMORY_ALLOCATION reg */
851 /* ister). The first memory has total of 64 byffers, while the second has 32 buffers. */
852 /* This register configures in which one of the memories the clients buffers are located */
853 /* (1 bit per client, 0 first memory, 1 second memory). The CD buffers will also be lo */
854 /* cated accordingly. */
855 /*****************************************************************************************/
856
857 #define DMA_REGS_CONFIG_MEM_SEL_R1_RESERVED_VALUE ( 0x0 )
858 #define DMA_REGS_CONFIG_MEM_SEL_R1_RESERVED_VALUE_RESET_VALUE ( 0x0 )
859 #define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_FIRST_VALUE ( 0x0 )
860 #define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_FIRST_VALUE_RESET_VALUE ( 0x0 )
861 #define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_SECOND_VALUE ( 0x1 )
862
863
864 #define DMA_REGS_CONFIG_MEM_SEL_OFFSET ( 0x00000098 )
865
866 #define DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_MEM_SEL_OFFSET )
867 #define DMA_REGS_0_CONFIG_MEM_SEL_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ), (r) )
868 #define DMA_REGS_0_CONFIG_MEM_SEL_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ), (v) )
869
870 #define DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_MEM_SEL_OFFSET )
871 #define DMA_REGS_1_CONFIG_MEM_SEL_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ), (r) )
872 #define DMA_REGS_1_CONFIG_MEM_SEL_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ), (v) )
873
874
875 extern uint32_t DMA_REGS_CONFIG_MEM_SEL_ARRAY [ ] ;
876
877 #define DMA_REGS_CONFIG_MEM_SEL_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_MEM_SEL_ARRAY [ i ], (v) )
878 #define DMA_REGS_CONFIG_MEM_SEL_READ( i, r ) READ_32( DMA_REGS_CONFIG_MEM_SEL_ARRAY [ i ], (r) )
879
880 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
881 typedef struct
882 {
883 /* reserved */
884 uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
885
886 /* mem_sel */
887 uint32_t mem_sel : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
888 }
889 __PACKING_ATTRIBUTE_STRUCT_END__
890 DMA_REGS_CONFIG_MEM_SEL ;
891 #else
892 typedef struct
893 { uint32_t mem_sel : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
894
895 /* mem_sel */
896 uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
897
898 /* reserved */
899 }
900 __PACKING_ATTRIBUTE_STRUCT_END__
901 DMA_REGS_CONFIG_MEM_SEL ;
902 #endif
903
904 /*****************************************************************************************/
905 /* NOT_EMPTY_VECTOR */
906 /* Each peripheral, according to its source address, is represented in a bit on the not */
907 /* empty vector. If the bit is asserted, the requests queue of the relevant peripheral */
908 /* is not empty. The not empty vector is used by the DMA scheduler to determine which p */
909 /* eripheral is the next to be served. */
910 /*****************************************************************************************/
911
912 #define DMA_REGS_DEBUG_NEMPTY_R1_R1_VALUE ( 0x0 )
913 #define DMA_REGS_DEBUG_NEMPTY_R1_R1_VALUE_RESET_VALUE ( 0x0 )
914 #define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_EMPTY_VALUE ( 0x0 )
915 #define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
916 #define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_NOT_EMPTY_VALUE ( 0x1 )
917 #define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_EMPTY_VALUE ( 0x0 )
918 #define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
919 #define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_NOT_EMPTY_VALUE ( 0x1 )
920 #define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_EMPTY_VALUE ( 0x0 )
921 #define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
922 #define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_NOT_EMPTY_VALUE ( 0x1 )
923 #define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_EMPTY_VALUE ( 0x0 )
924 #define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
925 #define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_NOT_EMPTY_VALUE ( 0x1 )
926 #define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_EMPTY_VALUE ( 0x0 )
927 #define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
928 #define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_NOT_EMPTY_VALUE ( 0x1 )
929 #define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_EMPTY_VALUE ( 0x0 )
930 #define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
931 #define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_NOT_EMPTY_VALUE ( 0x1 )
932 #define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_EMPTY_VALUE ( 0x0 )
933 #define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
934 #define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_NOT_EMPTY_VALUE ( 0x1 )
935 #define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_EMPTY_VALUE ( 0x0 )
936 #define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
937 #define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_NOT_EMPTY_VALUE ( 0x1 )
938 #define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_EMPTY_VALUE ( 0x0 )
939 #define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
940 #define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_NOR_EMPTY_VALUE ( 0x1 )
941 #define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_EMPTY_VALUE ( 0x0 )
942 #define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
943 #define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_NOT_EMPTY_VALUE ( 0x1 )
944 #define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_EMPTY_VALUE ( 0x0 )
945 #define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
946 #define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_NOT_EMPTY_VALUE ( 0x1 )
947 #define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_EMPTY_VALUE ( 0x0 )
948 #define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
949 #define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_NOT_EMPTY_VALUE ( 0x1 )
950
951
952 #define DMA_REGS_DEBUG_NEMPTY_OFFSET ( 0x00000000 )
953
954 #define DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_NEMPTY_OFFSET )
955 #define DMA_REGS_0_DEBUG_NEMPTY_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ), (r) )
956 #define DMA_REGS_0_DEBUG_NEMPTY_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ), (v) )
957
958 #define DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_NEMPTY_OFFSET )
959 #define DMA_REGS_1_DEBUG_NEMPTY_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ), (r) )
960 #define DMA_REGS_1_DEBUG_NEMPTY_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ), (v) )
961
962
963 extern uint32_t DMA_REGS_DEBUG_NEMPTY_ARRAY [ ] ;
964
965 #define DMA_REGS_DEBUG_NEMPTY_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_NEMPTY_ARRAY [ i ], (v) )
966 #define DMA_REGS_DEBUG_NEMPTY_READ( i, r ) READ_32( DMA_REGS_DEBUG_NEMPTY_ARRAY [ i ], (r) )
967
968 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
969 typedef struct
970 {
971 /* reserved1 */
972 uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
973
974 /* GPON_TX_not_empty_indications */
975 uint32_t gpontxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
976
977 /* Ethernet4_TX_not_empty_indications */
978 uint32_t eth4txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
979
980 /* Ethernet3_TX_not_empty_indications */
981 uint32_t eth3txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
982
983 /* Ethernet2_TX_not_empty_indications */
984 uint32_t eth2txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
985
986 /* Ethernet1_TX_not_empty_indications */
987 uint32_t eth1txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
988
989 /* Ethernet0_TX_not_empty_indications */
990 uint32_t eth0txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
991
992 /* GPON_RX_not_empty_indications */
993 uint32_t gponrxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
994
995 /* Ethernet4_RX_not_empty_indications */
996 uint32_t eth4rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
997
998 /* Ethernet3_RX_not_empty_indications */
999 uint32_t eth3rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1000
1001 /* Ethernet2_RX_not_empty_indications */
1002 uint32_t eth2rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1003
1004 /* Ethernet1_RX_not_empty_indications */
1005 uint32_t eth1rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1006
1007 /* Ethernet0_RX_not_empty_indications */
1008 uint32_t eth0rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1009 }
1010 __PACKING_ATTRIBUTE_STRUCT_END__
1011 DMA_REGS_DEBUG_NEMPTY ;
1012 #else
1013 typedef struct
1014 { uint32_t eth0rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1015
1016 /* Ethernet0_RX_not_empty_indications */
1017 uint32_t eth1rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1018
1019 /* Ethernet1_RX_not_empty_indications */
1020 uint32_t eth2rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1021
1022 /* Ethernet2_RX_not_empty_indications */
1023 uint32_t eth3rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1024
1025 /* Ethernet3_RX_not_empty_indications */
1026 uint32_t eth4rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1027
1028 /* Ethernet4_RX_not_empty_indications */
1029 uint32_t gponrxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1030
1031 /* GPON_RX_not_empty_indications */
1032 uint32_t eth0txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1033
1034 /* Ethernet0_TX_not_empty_indications */
1035 uint32_t eth1txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1036
1037 /* Ethernet1_TX_not_empty_indications */
1038 uint32_t eth2txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1039
1040 /* Ethernet2_TX_not_empty_indications */
1041 uint32_t eth3txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1042
1043 /* Ethernet3_TX_not_empty_indications */
1044 uint32_t eth4txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1045
1046 /* Ethernet4_TX_not_empty_indications */
1047 uint32_t gpontxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1048
1049 /* GPON_TX_not_empty_indications */
1050 uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1051
1052 /* reserved1 */
1053 }
1054 __PACKING_ATTRIBUTE_STRUCT_END__
1055 DMA_REGS_DEBUG_NEMPTY ;
1056 #endif
1057
1058 /*****************************************************************************************/
1059 /* URGENT_VECTOR */
1060 /* Each peripheral, according to its source address, is represented in a bit on the urge */
1061 /* nt vector. If the bit is asserted, the requests queue of the relevant peripheral is */
1062 /* in urgent state. The urgent vector is used by the DMA scheduler to determine which p */
1063 /* eripheral is the next to be served. */
1064 /*****************************************************************************************/
1065
1066 #define DMA_REGS_DEBUG_URGNT_R1_R1_VALUE ( 0x0 )
1067 #define DMA_REGS_DEBUG_URGNT_R1_R1_VALUE_RESET_VALUE ( 0x0 )
1068 #define DMA_REGS_DEBUG_URGNT_GPONTXU_NOT_URGENT_VALUE ( 0x0 )
1069 #define DMA_REGS_DEBUG_URGNT_GPONTXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1070 #define DMA_REGS_DEBUG_URGNT_GPONTXU_URGENT_VALUE ( 0x1 )
1071 #define DMA_REGS_DEBUG_URGNT_ETH4TXU_NOT_URGENT_VALUE ( 0x0 )
1072 #define DMA_REGS_DEBUG_URGNT_ETH4TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1073 #define DMA_REGS_DEBUG_URGNT_ETH4TXU_URGENT_VALUE ( 0x1 )
1074 #define DMA_REGS_DEBUG_URGNT_ETH3TXU_NOT_URGENT_VALUE ( 0x0 )
1075 #define DMA_REGS_DEBUG_URGNT_ETH3TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1076 #define DMA_REGS_DEBUG_URGNT_ETH3TXU_URGENT_VALUE ( 0x1 )
1077 #define DMA_REGS_DEBUG_URGNT_ETH2TXU_NOT_URGENT_VALUE ( 0x0 )
1078 #define DMA_REGS_DEBUG_URGNT_ETH2TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1079 #define DMA_REGS_DEBUG_URGNT_ETH2TXU_URGENT_VALUE ( 0x1 )
1080 #define DMA_REGS_DEBUG_URGNT_ETH1TXU_NOT_URGENT_VALUE ( 0x0 )
1081 #define DMA_REGS_DEBUG_URGNT_ETH1TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1082 #define DMA_REGS_DEBUG_URGNT_ETH1TXU_URGENT_VALUE ( 0x1 )
1083 #define DMA_REGS_DEBUG_URGNT_ETH0TXU_NOT_URGENT_VALUE ( 0x0 )
1084 #define DMA_REGS_DEBUG_URGNT_ETH0TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1085 #define DMA_REGS_DEBUG_URGNT_ETH0TXU_URGENT_VALUE ( 0x1 )
1086 #define DMA_REGS_DEBUG_URGNT_GPONRXU_NOT_URGENT_VALUE ( 0x0 )
1087 #define DMA_REGS_DEBUG_URGNT_GPONRXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1088 #define DMA_REGS_DEBUG_URGNT_GPONRXU_URGENT_VALUE ( 0x1 )
1089 #define DMA_REGS_DEBUG_URGNT_ETH4RXU_NOT_URGENT_VALUE ( 0x0 )
1090 #define DMA_REGS_DEBUG_URGNT_ETH4RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1091 #define DMA_REGS_DEBUG_URGNT_ETH4RXU_URGENT_VALUE ( 0x1 )
1092 #define DMA_REGS_DEBUG_URGNT_ETH3RXU_NOT_URGENT_VALUE ( 0x0 )
1093 #define DMA_REGS_DEBUG_URGNT_ETH3RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1094 #define DMA_REGS_DEBUG_URGNT_ETH3RXU_URGENT_VALUE ( 0x1 )
1095 #define DMA_REGS_DEBUG_URGNT_ETH2RXU_NOT_URGENT_VALUE ( 0x0 )
1096 #define DMA_REGS_DEBUG_URGNT_ETH2RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1097 #define DMA_REGS_DEBUG_URGNT_ETH2RXU_URGENT_VALUE ( 0x1 )
1098 #define DMA_REGS_DEBUG_URGNT_ETH1RXU_NOT_URGENT_VALUE ( 0x0 )
1099 #define DMA_REGS_DEBUG_URGNT_ETH1RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1100 #define DMA_REGS_DEBUG_URGNT_ETH1RXU_URGENT_VALUE ( 0x1 )
1101 #define DMA_REGS_DEBUG_URGNT_ETH0RXU_NOT_URGENT_VALUE ( 0x0 )
1102 #define DMA_REGS_DEBUG_URGNT_ETH0RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1103 #define DMA_REGS_DEBUG_URGNT_ETH0RXU_URGENT_VALUE ( 0x1 )
1104
1105
1106 #define DMA_REGS_DEBUG_URGNT_OFFSET ( 0x00000004 )
1107
1108 #define DMA_REGS_0_DEBUG_URGNT_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_URGNT_OFFSET )
1109 #define DMA_REGS_0_DEBUG_URGNT_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_URGNT_ADDRESS ), (r) )
1110 #define DMA_REGS_0_DEBUG_URGNT_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_URGNT_ADDRESS ), (v) )
1111
1112 #define DMA_REGS_1_DEBUG_URGNT_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_URGNT_OFFSET )
1113 #define DMA_REGS_1_DEBUG_URGNT_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_URGNT_ADDRESS ), (r) )
1114 #define DMA_REGS_1_DEBUG_URGNT_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_URGNT_ADDRESS ), (v) )
1115
1116
1117 extern uint32_t DMA_REGS_DEBUG_URGNT_ARRAY [ ] ;
1118
1119 #define DMA_REGS_DEBUG_URGNT_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_URGNT_ARRAY [ i ], (v) )
1120 #define DMA_REGS_DEBUG_URGNT_READ( i, r ) READ_32( DMA_REGS_DEBUG_URGNT_ARRAY [ i ], (r) )
1121
1122 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1123 typedef struct
1124 {
1125 /* reserved1 */
1126 uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1127
1128 /* GPON_TX_urgent_indication */
1129 uint32_t gpontxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1130
1131 /* Ethernet4_TX_urgent_indication */
1132 uint32_t eth4txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1133
1134 /* Ethernet3_TX_urgent_indication */
1135 uint32_t eth3txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1136
1137 /* Ethernet2_TX_urgent_indication */
1138 uint32_t eth2txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1139
1140 /* Ethernet1_TX_urgent_indication */
1141 uint32_t eth1txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1142
1143 /* Ethernet0_TX_urgent_indication */
1144 uint32_t eth0txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1145
1146 /* GPON_RX_urgent_indication */
1147 uint32_t gponrxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1148
1149 /* Ethernet4_RX_urgent_indication */
1150 uint32_t eth4rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1151
1152 /* Ethernet3_RX_urgent_indication */
1153 uint32_t eth3rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1154
1155 /* Ethernet2_RX_urgent_indication */
1156 uint32_t eth2rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1157
1158 /* Ethernet1_RX_urgent_indication */
1159 uint32_t eth1rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1160
1161 /* Ethernet0_RX_urgent_indication */
1162 uint32_t eth0rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1163 }
1164 __PACKING_ATTRIBUTE_STRUCT_END__
1165 DMA_REGS_DEBUG_URGNT ;
1166 #else
1167 typedef struct
1168 { uint32_t eth0rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1169
1170 /* Ethernet0_RX_urgent_indication */
1171 uint32_t eth1rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1172
1173 /* Ethernet1_RX_urgent_indication */
1174 uint32_t eth2rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1175
1176 /* Ethernet2_RX_urgent_indication */
1177 uint32_t eth3rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1178
1179 /* Ethernet3_RX_urgent_indication */
1180 uint32_t eth4rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1181
1182 /* Ethernet4_RX_urgent_indication */
1183 uint32_t gponrxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1184
1185 /* GPON_RX_urgent_indication */
1186 uint32_t eth0txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1187
1188 /* Ethernet0_TX_urgent_indication */
1189 uint32_t eth1txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1190
1191 /* Ethernet1_TX_urgent_indication */
1192 uint32_t eth2txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1193
1194 /* Ethernet2_TX_urgent_indication */
1195 uint32_t eth3txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1196
1197 /* Ethernet3_TX_urgent_indication */
1198 uint32_t eth4txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1199
1200 /* Ethernet4_TX_urgent_indication */
1201 uint32_t gpontxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1202
1203 /* GPON_TX_urgent_indication */
1204 uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1205
1206 /* reserved1 */
1207 }
1208 __PACKING_ATTRIBUTE_STRUCT_END__
1209 DMA_REGS_DEBUG_URGNT ;
1210 #endif
1211
1212 /*****************************************************************************************/
1213 /* SELECTED_SOURCE_NUM */
1214 /* The decision of the dma schedule rand the next peripheral to be served, represented b */
1215 /* y its source address */
1216 /*****************************************************************************************/
1217
1218 #define DMA_REGS_DEBUG_SELSRC_R1_R1_VALUE ( 0x0 )
1219 #define DMA_REGS_DEBUG_SELSRC_R1_R1_VALUE_RESET_VALUE ( 0x0 )
1220 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_RX_VALUE ( 0x0 )
1221 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_RX_VALUE_RESET_VALUE ( 0x0 )
1222 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH1_RX_VALUE ( 0x1 )
1223 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH2_RX_VALUE ( 0x2 )
1224 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH3_RX_VALUE ( 0x3 )
1225 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH4_RX_VALUE ( 0x4 )
1226 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_GPON_RX_VALUE ( 0x5 )
1227 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_TX_VALUE ( 0x8 )
1228 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH1_TX_VALUE ( 0x9 )
1229 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH2_TX_VALUE ( 0xA )
1230 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH3_TX_VALUE ( 0xB )
1231 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH4_TX_VALUE ( 0xC )
1232 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_GPON_TX_VALUE ( 0xD )
1233 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_DEFAULT_VALUE ( 0x1F )
1234 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_DEFAULT_VALUE_RESET_VALUE ( 0x1F )
1235
1236
1237 #define DMA_REGS_DEBUG_SELSRC_OFFSET ( 0x00000008 )
1238
1239 #define DMA_REGS_0_DEBUG_SELSRC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_SELSRC_OFFSET )
1240 #define DMA_REGS_0_DEBUG_SELSRC_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_SELSRC_ADDRESS ), (r) )
1241 #define DMA_REGS_0_DEBUG_SELSRC_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_SELSRC_ADDRESS ), (v) )
1242
1243 #define DMA_REGS_1_DEBUG_SELSRC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_SELSRC_OFFSET )
1244 #define DMA_REGS_1_DEBUG_SELSRC_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_SELSRC_ADDRESS ), (r) )
1245 #define DMA_REGS_1_DEBUG_SELSRC_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_SELSRC_ADDRESS ), (v) )
1246
1247
1248 extern uint32_t DMA_REGS_DEBUG_SELSRC_ARRAY [ ] ;
1249
1250 #define DMA_REGS_DEBUG_SELSRC_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_SELSRC_ARRAY [ i ], (v) )
1251 #define DMA_REGS_DEBUG_SELSRC_READ( i, r ) READ_32( DMA_REGS_DEBUG_SELSRC_ARRAY [ i ], (r) )
1252
1253 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1254 typedef struct
1255 {
1256 /* reserved1 */
1257 uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1258
1259 /* selected_source */
1260 uint32_t sel_src : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1261 }
1262 __PACKING_ATTRIBUTE_STRUCT_END__
1263 DMA_REGS_DEBUG_SELSRC ;
1264 #else
1265 typedef struct
1266 { uint32_t sel_src : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1267
1268 /* selected_source */
1269 uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1270
1271 /* reserved1 */
1272 }
1273 __PACKING_ATTRIBUTE_STRUCT_END__
1274 DMA_REGS_DEBUG_SELSRC ;
1275 #endif
1276
1277 /*****************************************************************************************/
1278 /* REQUEST_COUNTERS_RX */
1279 /* the number of write requests currently pending for each rx peripheral. */
1280 /*****************************************************************************************/
1281
1282 #define DMA_REGS_DEBUG_REQ_CNT_RX_R1_R1_VALUE ( 0x0 )
1283 #define DMA_REGS_DEBUG_REQ_CNT_RX_R1_R1_VALUE_RESET_VALUE ( 0x0 )
1284 #define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MIN_VALUE ( 0x0 )
1285 #define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MIN_VALUE_RESET_VALUE ( 0x0 )
1286 #define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MAX_VALUE ( 0x20 )
1287
1288
1289 #define DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET ( 0x0000000C )
1290
1291 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET )
1292 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (r) )
1293 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (v) )
1294
1295 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET )
1296 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (r) )
1297 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (v) )
1298
1299
1300 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ ] ;
1301
1302 #define DMA_REGS_DEBUG_REQ_CNT_RX_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ i ], (k), (v) )
1303 #define DMA_REGS_DEBUG_REQ_CNT_RX_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ i ], (k), (r) )
1304
1305 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1306 typedef struct
1307 {
1308 /* reserved1 */
1309 uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1310
1311 /* write_requests_counter */
1312 uint32_t req_cnt : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1313 }
1314 __PACKING_ATTRIBUTE_STRUCT_END__
1315 DMA_REGS_DEBUG_REQ_CNT_RX ;
1316 #else
1317 typedef struct
1318 { uint32_t req_cnt : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1319
1320 /* write_requests_counter */
1321 uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1322
1323 /* reserved1 */
1324 }
1325 __PACKING_ATTRIBUTE_STRUCT_END__
1326 DMA_REGS_DEBUG_REQ_CNT_RX ;
1327 #endif
1328
1329 /*****************************************************************************************/
1330 /* REQUEST_COUNTERS_TX */
1331 /* the number of read requestscurrently pending for each TX peripheral. */
1332 /*****************************************************************************************/
1333
1334 #define DMA_REGS_DEBUG_REQ_CNT_TX_R1_R3_VALUE ( 0x0 )
1335 #define DMA_REGS_DEBUG_REQ_CNT_TX_R1_R3_VALUE_RESET_VALUE ( 0x0 )
1336 #define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MIN_VALUE ( 0x0 )
1337 #define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MIN_VALUE_RESET_VALUE ( 0x0 )
1338 #define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MAX_VALUE ( 0x8 )
1339
1340
1341 #define DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET ( 0x00000024 )
1342
1343 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET )
1344 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (r) )
1345 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (v) )
1346
1347 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET )
1348 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (r) )
1349 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (v) )
1350
1351
1352 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ ] ;
1353
1354 #define DMA_REGS_DEBUG_REQ_CNT_TX_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ i ], (k), (v) )
1355 #define DMA_REGS_DEBUG_REQ_CNT_TX_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ i ], (k), (r) )
1356
1357 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1358 typedef struct
1359 {
1360 /* reserved */
1361 uint32_t r1 : 28 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1362
1363 /* read_requests_counter */
1364 uint32_t req_cnt : 4 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1365 }
1366 __PACKING_ATTRIBUTE_STRUCT_END__
1367 DMA_REGS_DEBUG_REQ_CNT_TX ;
1368 #else
1369 typedef struct
1370 { uint32_t req_cnt : 4 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1371
1372 /* read_requests_counter */
1373 uint32_t r1 : 28 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1374
1375 /* reserved */
1376 }
1377 __PACKING_ATTRIBUTE_STRUCT_END__
1378 DMA_REGS_DEBUG_REQ_CNT_TX ;
1379 #endif
1380
1381 /*****************************************************************************************/
1382 /* ACC_REQUEST_COUNTERS_RX */
1383 /* the accumulated number of write requests served so far for each peripheral. Wrap arou */
1384 /* nd on max value, not read clear. */
1385 /*****************************************************************************************/
1386
1387 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_REQ_CNT_CNT_VALUE ( 0x0 )
1388 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_REQ_CNT_CNT_VALUE_RESET_VALUE ( 0x0 )
1389
1390
1391 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET ( 0x0000003C )
1392
1393 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET )
1394 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (r) )
1395 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (v) )
1396
1397 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET )
1398 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (r) )
1399 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (v) )
1400
1401
1402 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ ] ;
1403
1404 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ i ], (k), (v) )
1405 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ i ], (k), (r) )
1406
1407 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1408 typedef struct
1409 {
1410 /* write_requests_counter */
1411 uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1412 }
1413 __PACKING_ATTRIBUTE_STRUCT_END__
1414 DMA_REGS_DEBUG_REQ_CNT_RX_ACC ;
1415 #else
1416 typedef struct
1417 { uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1418
1419 /* write_requests_counter */
1420 }
1421 __PACKING_ATTRIBUTE_STRUCT_END__
1422 DMA_REGS_DEBUG_REQ_CNT_RX_ACC ;
1423 #endif
1424
1425 /*****************************************************************************************/
1426 /* ACC_REQUEST_COUNTERS_TX */
1427 /* the accumulated number of read requests served so far for each peripheral. Wrap aroun */
1428 /* d on max value, not read clear. */
1429 /*****************************************************************************************/
1430
1431 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_REQ_CNT_CNT_VALUE ( 0x0 )
1432 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_REQ_CNT_CNT_VALUE_RESET_VALUE ( 0x0 )
1433
1434
1435 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET ( 0x00000054 )
1436
1437 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET )
1438 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (r) )
1439 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (v) )
1440
1441 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET )
1442 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (r) )
1443 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (v) )
1444
1445
1446 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ ] ;
1447
1448 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ i ], (k), (v) )
1449 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ i ], (k), (r) )
1450
1451 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1452 typedef struct
1453 {
1454 /* write_requests_counter */
1455 uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1456 }
1457 __PACKING_ATTRIBUTE_STRUCT_END__
1458 DMA_REGS_DEBUG_REQ_CNT_TX_ACC ;
1459 #else
1460 typedef struct
1461 { uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1462
1463 /* write_requests_counter */
1464 }
1465 __PACKING_ATTRIBUTE_STRUCT_END__
1466 DMA_REGS_DEBUG_REQ_CNT_TX_ACC ;
1467 #endif
1468
1469 /*****************************************************************************************/
1470 /* RAM_ADDRES */
1471 /* the address and cs of the ram the user wishes to read using the indirect access read */
1472 /* mechanism. */
1473 /*****************************************************************************************/
1474
1475 #define DMA_REGS_DEBUG_RDADD_R2_DEFAULT_VALUE ( 0x0 )
1476 #define DMA_REGS_DEBUG_RDADD_R2_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1477 #define DMA_REGS_DEBUG_RDADD_CDCS1_OFF_VALUE ( 0x0 )
1478 #define DMA_REGS_DEBUG_RDADD_CDCS1_OFF_VALUE_RESET_VALUE ( 0x0 )
1479 #define DMA_REGS_DEBUG_RDADD_CDCS1_ON_VALUE ( 0x1 )
1480 #define DMA_REGS_DEBUG_RDADD_DATACS1_OFF_VALUE ( 0x0 )
1481 #define DMA_REGS_DEBUG_RDADD_DATACS1_OFF_VALUE_RESET_VALUE ( 0x0 )
1482 #define DMA_REGS_DEBUG_RDADD_DATACS1_ON_VALUE ( 0x1 )
1483 #define DMA_REGS_DEBUG_RDADD_RDCS_OFF_VALUE ( 0x0 )
1484 #define DMA_REGS_DEBUG_RDADD_RDCS_OFF_VALUE_RESET_VALUE ( 0x0 )
1485 #define DMA_REGS_DEBUG_RDADD_RDCS_ON_VALUE ( 0x1 )
1486 #define DMA_REGS_DEBUG_RDADD_RRCS_OFF_VALUE ( 0x0 )
1487 #define DMA_REGS_DEBUG_RDADD_RRCS_OFF_VALUE_RESET_VALUE ( 0x0 )
1488 #define DMA_REGS_DEBUG_RDADD_RRCS_ON_VALUE ( 0x1 )
1489 #define DMA_REGS_DEBUG_RDADD_CDCS_OFF_VALUE ( 0x0 )
1490 #define DMA_REGS_DEBUG_RDADD_CDCS_OFF_VALUE_RESET_VALUE ( 0x0 )
1491 #define DMA_REGS_DEBUG_RDADD_CDCS_ON_VALUE ( 0x1 )
1492 #define DMA_REGS_DEBUG_RDADD_DATACS_OFF_VALUE ( 0x0 )
1493 #define DMA_REGS_DEBUG_RDADD_DATACS_OFF_VALUE_RESET_VALUE ( 0x0 )
1494 #define DMA_REGS_DEBUG_RDADD_DATACS_ON_VALUE ( 0x1 )
1495 #define DMA_REGS_DEBUG_RDADD_R1_DEFAULT_VALUE ( 0x0 )
1496 #define DMA_REGS_DEBUG_RDADD_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1497 #define DMA_REGS_DEBUG_RDADD_ADDRESS_ADD_VALUE ( 0x0 )
1498 #define DMA_REGS_DEBUG_RDADD_ADDRESS_ADD_VALUE_RESET_VALUE ( 0x0 )
1499
1500
1501 #define DMA_REGS_DEBUG_RDADD_OFFSET ( 0x00000100 )
1502
1503 #define DMA_REGS_0_DEBUG_RDADD_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDADD_OFFSET )
1504 #define DMA_REGS_0_DEBUG_RDADD_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDADD_ADDRESS ), (r) )
1505 #define DMA_REGS_0_DEBUG_RDADD_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDADD_ADDRESS ), (v) )
1506
1507 #define DMA_REGS_1_DEBUG_RDADD_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDADD_OFFSET )
1508 #define DMA_REGS_1_DEBUG_RDADD_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDADD_ADDRESS ), (r) )
1509 #define DMA_REGS_1_DEBUG_RDADD_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDADD_ADDRESS ), (v) )
1510
1511
1512 extern uint32_t DMA_REGS_DEBUG_RDADD_ARRAY [ ] ;
1513
1514 #define DMA_REGS_DEBUG_RDADD_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDADD_ARRAY [ i ], (v) )
1515 #define DMA_REGS_DEBUG_RDADD_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDADD_ARRAY [ i ], (r) )
1516
1517 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1518 typedef struct
1519 {
1520 /* reserved */
1521 uint32_t r2 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1522
1523 /* cd_ram_cs1 */
1524 uint32_t cdcs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1525
1526 /* data_ram_cs_1 */
1527 uint32_t datacs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1528
1529 /* rd_data_cs */
1530 uint32_t rdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1531
1532 /* rr_ram_cd */
1533 uint32_t rrcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1534
1535 /* cd_ram_cs */
1536 uint32_t cdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1537
1538 /* data_ram_cs */
1539 uint32_t datacs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1540
1541 /* reserved */
1542 uint32_t r1 : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1543
1544 /* address */
1545 uint32_t address : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1546 }
1547 __PACKING_ATTRIBUTE_STRUCT_END__
1548 DMA_REGS_DEBUG_RDADD ;
1549 #else
1550 typedef struct
1551 { uint32_t address : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1552
1553 /* address */
1554 uint32_t r1 : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1555
1556 /* reserved */
1557 uint32_t datacs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1558
1559 /* data_ram_cs */
1560 uint32_t cdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1561
1562 /* cd_ram_cs */
1563 uint32_t rrcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1564
1565 /* rr_ram_cd */
1566 uint32_t rdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1567
1568 /* rd_data_cs */
1569 uint32_t datacs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1570
1571 /* data_ram_cs_1 */
1572 uint32_t cdcs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1573
1574 /* cd_ram_cs1 */
1575 uint32_t r2 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1576
1577 /* reserved */
1578 }
1579 __PACKING_ATTRIBUTE_STRUCT_END__
1580 DMA_REGS_DEBUG_RDADD ;
1581 #endif
1582
1583 /*****************************************************************************************/
1584 /* INDIRECT_READ_REQUEST_VALID */
1585 /* After determining the address and cs, the user should assert this bit for indicating */
1586 /* that the address and cs are valid. */
1587 /*****************************************************************************************/
1588
1589 #define DMA_REGS_DEBUG_RDVALID_R1_DEFAULT_VALUE ( 0x0 )
1590 #define DMA_REGS_DEBUG_RDVALID_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1591 #define DMA_REGS_DEBUG_RDVALID_VALID_NOT_VALID_VALUE ( 0x0 )
1592 #define DMA_REGS_DEBUG_RDVALID_VALID_NOT_VALID_VALUE_RESET_VALUE ( 0x0 )
1593 #define DMA_REGS_DEBUG_RDVALID_VALID_VALID_VALUE ( 0x1 )
1594
1595
1596 #define DMA_REGS_DEBUG_RDVALID_OFFSET ( 0x00000104 )
1597
1598 #define DMA_REGS_0_DEBUG_RDVALID_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDVALID_OFFSET )
1599 #define DMA_REGS_0_DEBUG_RDVALID_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDVALID_ADDRESS ), (r) )
1600 #define DMA_REGS_0_DEBUG_RDVALID_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDVALID_ADDRESS ), (v) )
1601
1602 #define DMA_REGS_1_DEBUG_RDVALID_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDVALID_OFFSET )
1603 #define DMA_REGS_1_DEBUG_RDVALID_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDVALID_ADDRESS ), (r) )
1604 #define DMA_REGS_1_DEBUG_RDVALID_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDVALID_ADDRESS ), (v) )
1605
1606
1607 extern uint32_t DMA_REGS_DEBUG_RDVALID_ARRAY [ ] ;
1608
1609 #define DMA_REGS_DEBUG_RDVALID_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDVALID_ARRAY [ i ], (v) )
1610 #define DMA_REGS_DEBUG_RDVALID_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDVALID_ARRAY [ i ], (r) )
1611
1612 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1613 typedef struct
1614 {
1615 /* reserved */
1616 uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1617
1618 /* valid */
1619 uint32_t valid : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1620 }
1621 __PACKING_ATTRIBUTE_STRUCT_END__
1622 DMA_REGS_DEBUG_RDVALID ;
1623 #else
1624 typedef struct
1625 { uint32_t valid : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1626
1627 /* valid */
1628 uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1629
1630 /* reserved */
1631 }
1632 __PACKING_ATTRIBUTE_STRUCT_END__
1633 DMA_REGS_DEBUG_RDVALID ;
1634 #endif
1635
1636 /*****************************************************************************************/
1637 /* INDIRECT_READ_DATA */
1638 /* The returned read data from the selected RAM. Array of 4 registers (128 bits total). */
1639 /* The width of the different memories is as follows: write data - 128 bits chunk des */
1640 /* criptors - 36 bits read requests - 42 bits read data - 64 bits The the memories */
1641 /* with width smaller than 128, the data will appear in the first registers of the array */
1642 /* , for example: data from the cd RAM will appear in - {reg1[5:0], reg0[31:0]}. */
1643 /*****************************************************************************************/
1644
1645 #define DMA_REGS_DEBUG_RDDATA_DATA_DATA_VALUE ( 0x0 )
1646 #define DMA_REGS_DEBUG_RDDATA_DATA_DATA_VALUE_RESET_VALUE ( 0x0 )
1647
1648
1649 #define DMA_REGS_DEBUG_RDDATA_OFFSET ( 0x00000108 )
1650
1651 #define DMA_REGS_0_DEBUG_RDDATA_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATA_OFFSET )
1652 #define DMA_REGS_0_DEBUG_RDDATA_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_RDDATA_ADDRESS ), (i), (r) )
1653 #define DMA_REGS_0_DEBUG_RDDATA_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_RDDATA_ADDRESS ), (i), (v) )
1654
1655 #define DMA_REGS_1_DEBUG_RDDATA_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATA_OFFSET )
1656 #define DMA_REGS_1_DEBUG_RDDATA_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_RDDATA_ADDRESS ), (i), (r) )
1657 #define DMA_REGS_1_DEBUG_RDDATA_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_RDDATA_ADDRESS ), (i), (v) )
1658
1659
1660 extern uint32_t DMA_REGS_DEBUG_RDDATA_ARRAY [ ] ;
1661
1662 #define DMA_REGS_DEBUG_RDDATA_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_RDDATA_ARRAY [ i ], (k), (v) )
1663 #define DMA_REGS_DEBUG_RDDATA_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_RDDATA_ARRAY [ i ], (k), (r) )
1664
1665 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1666 typedef struct
1667 {
1668 /* data */
1669 uint32_t data : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1670 }
1671 __PACKING_ATTRIBUTE_STRUCT_END__
1672 DMA_REGS_DEBUG_RDDATA ;
1673 #else
1674 typedef struct
1675 { uint32_t data : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1676
1677 /* data */
1678 }
1679 __PACKING_ATTRIBUTE_STRUCT_END__
1680 DMA_REGS_DEBUG_RDDATA ;
1681 #endif
1682
1683 /*****************************************************************************************/
1684 /* READ_DATA_READY */
1685 /* When assertd indicats that the data in the previous array is valid.Willremain asserte */
1686 /* d until the user deasserts the valid bit in regiser RDVALID. */
1687 /*****************************************************************************************/
1688
1689 #define DMA_REGS_DEBUG_RDDATARDY_R1_DEFAULT_VALUE ( 0x0 )
1690 #define DMA_REGS_DEBUG_RDDATARDY_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1691 #define DMA_REGS_DEBUG_RDDATARDY_READY_NOT_READY_VALUE ( 0x0 )
1692 #define DMA_REGS_DEBUG_RDDATARDY_READY_NOT_READY_VALUE_RESET_VALUE ( 0x0 )
1693 #define DMA_REGS_DEBUG_RDDATARDY_READY_READY_VALUE ( 0x1 )
1694
1695
1696 #define DMA_REGS_DEBUG_RDDATARDY_OFFSET ( 0x00000118 )
1697
1698 #define DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATARDY_OFFSET )
1699 #define DMA_REGS_0_DEBUG_RDDATARDY_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ), (r) )
1700 #define DMA_REGS_0_DEBUG_RDDATARDY_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ), (v) )
1701
1702 #define DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATARDY_OFFSET )
1703 #define DMA_REGS_1_DEBUG_RDDATARDY_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ), (r) )
1704 #define DMA_REGS_1_DEBUG_RDDATARDY_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ), (v) )
1705
1706
1707 extern uint32_t DMA_REGS_DEBUG_RDDATARDY_ARRAY [ ] ;
1708
1709 #define DMA_REGS_DEBUG_RDDATARDY_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDDATARDY_ARRAY [ i ], (v) )
1710 #define DMA_REGS_DEBUG_RDDATARDY_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDDATARDY_ARRAY [ i ], (r) )
1711
1712 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1713 typedef struct
1714 {
1715 /* reserved */
1716 uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1717
1718 /* ready */
1719 uint32_t ready : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1720 }
1721 __PACKING_ATTRIBUTE_STRUCT_END__
1722 DMA_REGS_DEBUG_RDDATARDY ;
1723 #else
1724 typedef struct
1725 { uint32_t ready : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1726
1727 /* ready */
1728 uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1729
1730 /* reserved */
1731 }
1732 __PACKING_ATTRIBUTE_STRUCT_END__
1733 DMA_REGS_DEBUG_RDDATARDY ;
1734 #endif
1735
1736 /*****************************************************************************************/
1737 /* The registers in this section allow configuration of the following: 1. memory alloca */
1738 /* tions 2. priority and weight for arbitration 3. urgent thresholds 4. route address */
1739 /* es Most of the registers control the configuration of a single peripheral. They ar */
1740 /* e arranged in arrays according to their configuration topic. The order of peripher */
1741 /* als within each array is: Ethernet 0 Ethernet 1 Ethernet 2 Ethernet 3 Ethernet 4 */
1742 /* GPON */
1743 /*****************************************************************************************/
1744
1745 /*****************************************************************************************/
1746 /* Registers array numbers */
1747 /*****************************************************************************************/
1748 #define DMA_REGS_CONFIG_MALLOC_NUMBER ( 6 )
1749 #define DMA_REGS_CONFIG_READ_BASE_NUMBER ( 6 )
1750 #define DMA_REGS_CONFIG_U_THRESH_NUMBER ( 6 )
1751 #define DMA_REGS_CONFIG_PRI_NUMBER ( 6 )
1752 #define DMA_REGS_CONFIG_WEIGHT_NUMBER ( 6 )
1753 #define DMA_REGS_CONFIG_BB_ROUTE_NUMBER ( 6 )
1754 typedef struct
1755 {
1756 /* BB_SOURCE */
1757 DMA_REGS_CONFIG_SOURCE source __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1758
1759 /* MEMORY_ALLOCATION */
1760 DMA_REGS_CONFIG_MALLOC malloc [ DMA_REGS_CONFIG_MALLOC_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1761
1762 /* READ_REQ_BASE_ADDRESS */
1763 DMA_REGS_CONFIG_READ_BASE read_base [ DMA_REGS_CONFIG_READ_BASE_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1764
1765 /* URGENT_THRESHOLDS */
1766 DMA_REGS_CONFIG_U_THRESH u_thresh [ DMA_REGS_CONFIG_U_THRESH_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1767
1768 /* STRICT_PRIORITY */
1769 DMA_REGS_CONFIG_PRI pri [ DMA_REGS_CONFIG_PRI_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1770
1771 /* WEIGHT_OF_ROUND_ROBIN */
1772 DMA_REGS_CONFIG_WEIGHT weight [ DMA_REGS_CONFIG_WEIGHT_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1773
1774 /* BB_ROUTE_DMA_PERIPH */
1775 DMA_REGS_CONFIG_BB_ROUTE bb_route [ DMA_REGS_CONFIG_BB_ROUTE_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1776
1777 /* POINTERS_RESET */
1778 DMA_REGS_CONFIG_PTRRST ptrrst __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1779
1780 /* MEM_SEL */
1781 DMA_REGS_CONFIG_MEM_SEL mem_sel __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1782 }
1783 __PACKING_ATTRIBUTE_STRUCT_END__
1784 DMA_REGS_CONFIG ;
1785
1786 /*****************************************************************************************/
1787 /* request counters per peripheral */
1788 /*****************************************************************************************/
1789
1790 /*****************************************************************************************/
1791 /* Registers array numbers */
1792 /*****************************************************************************************/
1793 #define DMA_REGS_DEBUG_REQ_CNT_RX_NUMBER ( 6 )
1794 #define DMA_REGS_DEBUG_REQ_CNT_TX_NUMBER ( 6 )
1795 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_NUMBER ( 6 )
1796 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_NUMBER ( 6 )
1797 #define DMA_REGS_DEBUG_RDDATA_NUMBER ( 4 )
1798 typedef struct
1799 {
1800 /* NOT_EMPTY_VECTOR */
1801 DMA_REGS_DEBUG_NEMPTY nempty __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1802
1803 /* URGENT_VECTOR */
1804 DMA_REGS_DEBUG_URGNT urgnt __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1805
1806 /* SELECTED_SOURCE_NUM */
1807 DMA_REGS_DEBUG_SELSRC selsrc __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1808
1809 /* REQUEST_COUNTERS_RX */
1810 DMA_REGS_DEBUG_REQ_CNT_RX req_cnt_rx [ DMA_REGS_DEBUG_REQ_CNT_RX_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1811
1812 /* REQUEST_COUNTERS_TX */
1813 DMA_REGS_DEBUG_REQ_CNT_TX req_cnt_tx [ DMA_REGS_DEBUG_REQ_CNT_TX_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1814
1815 /* ACC_REQUEST_COUNTERS_RX */
1816 DMA_REGS_DEBUG_REQ_CNT_RX_ACC req_cnt_rx_acc [ DMA_REGS_DEBUG_REQ_CNT_RX_ACC_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1817
1818 /* ACC_REQUEST_COUNTERS_TX */
1819 DMA_REGS_DEBUG_REQ_CNT_TX_ACC req_cnt_tx_acc [ DMA_REGS_DEBUG_REQ_CNT_TX_ACC_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1820
1821 /* Reserved */
1822 uint8_t reserved1 [ 148 ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1823
1824 /* RAM_ADDRES */
1825 DMA_REGS_DEBUG_RDADD rdadd __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1826
1827 /* INDIRECT_READ_REQUEST_VALID */
1828 DMA_REGS_DEBUG_RDVALID rdvalid __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1829
1830 /* INDIRECT_READ_DATA */
1831 DMA_REGS_DEBUG_RDDATA rddata [ DMA_REGS_DEBUG_RDDATA_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1832
1833 /* READ_DATA_READY */
1834 DMA_REGS_DEBUG_RDDATARDY rddatardy __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1835 }
1836 __PACKING_ATTRIBUTE_STRUCT_END__
1837 DMA_REGS_DEBUG ;
1838
1839 typedef struct
1840 {
1841 /* config function */
1842 DMA_REGS_CONFIG config __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1843
1844 /* Reserved */
1845 uint8_t reserved0 [ 100 ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1846
1847 /* debug function */
1848 DMA_REGS_DEBUG debug __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1849 }
1850 __PACKING_ATTRIBUTE_STRUCT_END__
1851 DMA_REGS ;
1852
1853 #define DMA_REGS_NUMBER ( 2 )
1854 typedef struct
1855 {
1856 /* REGS */
1857 DMA_REGS regs [ DMA_REGS_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
1858 }
1859 __PACKING_ATTRIBUTE_STRUCT_END__
1860 DMA_FOR_ALL ;
1861 #endif /* DMA_H_INCLUDED */
1862