1 // SPDX-License-Identifier: GPL-2.0+
3 * Copyright (c) 2013 Broadcom
9 #ifndef __DMA_H_INCLUDED
10 #define __DMA_H_INCLUDED
12 /* File automatically generated by Reggae at 15/08/2013 10:54:36 */
14 #include "access_macros.h"
18 /*****************************************************************************************/
19 /* The Direct Memory Access (DMA) module serves peripheral (EMACs and GPON) requests for */
20 /* writes and reads from DDR and packet SRAM. DMA connects the peripherals to DDR. S */
21 /* DMA connects the same peripherals to packet SRAM. */
22 /*****************************************************************************************/
24 /*****************************************************************************************/
26 /*****************************************************************************************/
27 /*****************************************************************************************/
28 /* Functions offsets and addresses */
29 /*****************************************************************************************/
30 #define DMA_REGS_0_CONFIG_OFFSET ( 0x00000000 )
31 #define DMA_REGS_0_CONFIG_ADDRESS ( DMA_REGS_0_OFFSET + DMA_REGS_0_CONFIG_OFFSET )
33 #define DMA_REGS_0_DEBUG_OFFSET ( 0x00000100 )
34 #define DMA_REGS_0_DEBUG_ADDRESS ( DMA_REGS_0_OFFSET + DMA_REGS_0_DEBUG_OFFSET )
36 #define DMA_REGS_1_CONFIG_OFFSET ( 0x00000000 )
37 #define DMA_REGS_1_CONFIG_ADDRESS ( DMA_REGS_1_OFFSET + DMA_REGS_1_CONFIG_OFFSET )
39 #define DMA_REGS_1_DEBUG_OFFSET ( 0x00000100 )
40 #define DMA_REGS_1_DEBUG_ADDRESS ( DMA_REGS_1_OFFSET + DMA_REGS_1_DEBUG_OFFSET )
42 /* 'd' is module index */
43 /* 'i' is block index */
44 /* 'j' is function index */
45 /* 'e' is function entry */
46 /* 'k' is register index */
48 /*****************************************************************************************/
50 /* Broadbus source address of DMA and SDMA */
51 /*****************************************************************************************/
53 #define DMA_REGS_CONFIG_SOURCE_R1_DEFAULT_VALUE ( 0x0 )
54 #define DMA_REGS_CONFIG_SOURCE_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
55 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_DMA_VALUE ( 0x16 )
56 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_DMA_VALUE_RESET_VALUE ( 0x16 )
57 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_SDMA_VALUE ( 0x18 )
58 #define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_SDMA_VALUE_RESET_VALUE ( 0x18 )
61 #define DMA_REGS_CONFIG_SOURCE_OFFSET ( 0x00000000 )
63 #define DMA_REGS_0_CONFIG_SOURCE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_SOURCE_OFFSET )
64 #define DMA_REGS_0_CONFIG_SOURCE_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_SOURCE_ADDRESS ), (r) )
65 #define DMA_REGS_0_CONFIG_SOURCE_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_SOURCE_ADDRESS ), (v) )
67 #define DMA_REGS_1_CONFIG_SOURCE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_SOURCE_OFFSET )
68 #define DMA_REGS_1_CONFIG_SOURCE_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_SOURCE_ADDRESS ), (r) )
69 #define DMA_REGS_1_CONFIG_SOURCE_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_SOURCE_ADDRESS ), (v) )
72 extern uint32_t DMA_REGS_CONFIG_SOURCE_ARRAY
[ ] ;
74 #define DMA_REGS_CONFIG_SOURCE_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_SOURCE_ARRAY [ i ], (v) )
75 #define DMA_REGS_CONFIG_SOURCE_READ( i, r ) READ_32( DMA_REGS_CONFIG_SOURCE_ARRAY [ i ], (r) )
77 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
81 uint32_t r1
: 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
84 uint32_t source
: 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
86 __PACKING_ATTRIBUTE_STRUCT_END__
87 DMA_REGS_CONFIG_SOURCE
;
90 { uint32_t source
: 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
93 uint32_t r1
: 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
97 __PACKING_ATTRIBUTE_STRUCT_END__
98 DMA_REGS_CONFIG_SOURCE
;
101 /*****************************************************************************************/
102 /* MEMORY_ALLOCATION */
103 /* This array of registers defines the memory allocation for the peripherals, for upstre */
104 /* am. The allocation is of number of 128byte buffers out of the total 32 buffers for s */
105 /* dma or 96 buffers in dma in the upload data RAM. For the DMA, the buffers are divid */
106 /* ed between 2 physical RAMs 964 in the first, 32 in the second). The decision which cl */
107 /* ients FIFO is located in which memory is done by the register in address 0x98. The a */
108 /* llocation is done by defining a base address (aligned to 128 bytes) and the number of */
109 /* allocated buffers. Note that the memory allocation should not contain wrap around. */
110 /* For example, if three buffers are needed, do not allocate buffers 30, 31 and 0. The */
111 /* number of allocated CDs is the same of data buffers - one chunk descriptor per buffe */
112 /* r, therefore allocation in CD RAM is defined only by offset address. The order of */
113 /* peripherals within the array is: Ethernet 0 Ethernet 1 Ethernet 2 Ethernet 3 Eth */
114 /* ernet 4 GPON/EPON */
115 /*****************************************************************************************/
117 #define DMA_REGS_CONFIG_MALLOC_R3_R3_VALUE ( 0x0 )
118 #define DMA_REGS_CONFIG_MALLOC_R3_R3_VALUE_RESET_VALUE ( 0x0 )
119 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC0_VALUE ( 0x0 )
120 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
121 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC0_VALUE ( 0x0 )
122 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
123 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
124 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
125 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
126 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
127 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC2_VALUE ( 0xA )
128 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0xA )
129 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC3_VALUE ( 0xF )
130 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0xF )
131 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC2_VALUE ( 0x12 )
132 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x12 )
133 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC4_VALUE ( 0x14 )
134 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x14 )
135 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_GPON_VALUE ( 0x19 )
136 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x19 )
137 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC3_VALUE ( 0x1B )
138 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x1B )
139 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC4_VALUE ( 0x24 )
140 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x24 )
141 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_GPON_VALUE ( 0x2D )
142 #define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x2D )
143 #define DMA_REGS_CONFIG_MALLOC_R2_R2_VALUE ( 0x0 )
144 #define DMA_REGS_CONFIG_MALLOC_R2_R2_VALUE_RESET_VALUE ( 0x0 )
145 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MIN_DMA_SDMA_VALUE ( 0x0 )
146 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC0_VALUE ( 0x5 )
147 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x5 )
148 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
149 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
150 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC2_VALUE ( 0x5 )
151 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x5 )
152 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC3_VALUE ( 0x5 )
153 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x5 )
154 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC4_VALUE ( 0x5 )
155 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x5 )
156 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_GPON_VALUE ( 0x7 )
157 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x7 )
158 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC0_VALUE ( 0x9 )
159 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x9 )
160 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
161 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
162 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC2_VALUE ( 0x9 )
163 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x9 )
164 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC3_VALUE ( 0x9 )
165 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x9 )
166 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC4_VALUE ( 0x9 )
167 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x9 )
168 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_GPON_VALUE ( 0x12 )
169 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x12 )
170 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MAX_SDMA_VALUE ( 0x20 )
171 #define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MAX_DMA_VALUE ( 0x3F )
172 #define DMA_REGS_CONFIG_MALLOC_R1_R1_VALUE ( 0x0 )
173 #define DMA_REGS_CONFIG_MALLOC_R1_R1_VALUE_RESET_VALUE ( 0x0 )
174 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC0_VALUE ( 0x0 )
175 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
176 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC0_VALUE ( 0x0 )
177 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
178 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
179 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
180 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
181 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
182 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC2_VALUE ( 0xA )
183 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0xA )
184 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC3_VALUE ( 0xF )
185 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0xF )
186 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC2_VALUE ( 0x12 )
187 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x12 )
188 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC4_VALUE ( 0x14 )
189 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x14 )
190 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_GPON_VALUE ( 0x19 )
191 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x19 )
192 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC3_VALUE ( 0x1B )
193 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x1B )
194 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC4_VALUE ( 0x24 )
195 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x24 )
196 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_GPON_VALUE ( 0x2D )
197 #define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x2D )
200 #define DMA_REGS_CONFIG_MALLOC_OFFSET ( 0x00000004 )
202 #define DMA_REGS_0_CONFIG_MALLOC_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_MALLOC_OFFSET )
203 #define DMA_REGS_0_CONFIG_MALLOC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_MALLOC_ADDRESS ), (i), (r) )
204 #define DMA_REGS_0_CONFIG_MALLOC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_MALLOC_ADDRESS ), (i), (v) )
206 #define DMA_REGS_1_CONFIG_MALLOC_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_MALLOC_OFFSET )
207 #define DMA_REGS_1_CONFIG_MALLOC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_MALLOC_ADDRESS ), (i), (r) )
208 #define DMA_REGS_1_CONFIG_MALLOC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_MALLOC_ADDRESS ), (i), (v) )
211 extern uint32_t DMA_REGS_CONFIG_MALLOC_ARRAY
[ ] ;
213 #define DMA_REGS_CONFIG_MALLOC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_MALLOC_ARRAY [ i ], (k), (v) )
214 #define DMA_REGS_CONFIG_MALLOC_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_MALLOC_ARRAY [ i ], (k), (r) )
216 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
220 uint32_t r3
: 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
222 /* CD_memory_offset_address */
223 uint32_t cdoffset
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
226 uint32_t r2
: 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
228 /* number_of_buffers */
229 uint32_t numofbuff
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
232 uint32_t r1
: 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
234 /* data_memory_offset_address */
235 uint32_t datatoffset
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
237 __PACKING_ATTRIBUTE_STRUCT_END__
238 DMA_REGS_CONFIG_MALLOC
;
241 { uint32_t datatoffset
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
243 /* data_memory_offset_address */
244 uint32_t r1
: 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
247 uint32_t numofbuff
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
249 /* number_of_buffers */
250 uint32_t r2
: 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
253 uint32_t cdoffset
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
255 /* CD_memory_offset_address */
256 uint32_t r3
: 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
260 __PACKING_ATTRIBUTE_STRUCT_END__
261 DMA_REGS_CONFIG_MALLOC
;
264 /*****************************************************************************************/
265 /* READ_REQ_BASE_ADDRESS */
266 /* This array of registers controls the base address of each peripheral within the read */
267 /* requests RAM. Each peripheral gets memory enough for storing up to 8 read requests */
268 /* (total of 48 requests in the RAM), starting from a configurable base address. The ba */
269 /* se address is aligned to 8 therefore the only valid values are: 0, 8, 16, 24, 32, 4 */
271 /*****************************************************************************************/
273 #define DMA_REGS_CONFIG_READ_BASE_R1_R1_VALUE ( 0x0 )
274 #define DMA_REGS_CONFIG_READ_BASE_R1_R1_VALUE_RESET_VALUE ( 0x0 )
275 #define DMA_REGS_CONFIG_READ_BASE_BASE_MIN_VALUE ( 0x0 )
276 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC0_VALUE ( 0x0 )
277 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0x0 )
278 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC1_VALUE ( 0x8 )
279 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x8 )
280 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC2_VALUE ( 0x10 )
281 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0x10 )
282 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC3_VALUE ( 0x18 )
283 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x18 )
284 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC4_VALUE ( 0x20 )
285 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x20 )
286 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_GPON_VALUE ( 0x28 )
287 #define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x28 )
288 #define DMA_REGS_CONFIG_READ_BASE_BASE_MAX_VALUE ( 0x28 )
291 #define DMA_REGS_CONFIG_READ_BASE_OFFSET ( 0x0000001C )
293 #define DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_READ_BASE_OFFSET )
294 #define DMA_REGS_0_CONFIG_READ_BASE_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ), (i), (r) )
295 #define DMA_REGS_0_CONFIG_READ_BASE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ), (i), (v) )
297 #define DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_READ_BASE_OFFSET )
298 #define DMA_REGS_1_CONFIG_READ_BASE_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ), (i), (r) )
299 #define DMA_REGS_1_CONFIG_READ_BASE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ), (i), (v) )
302 extern uint32_t DMA_REGS_CONFIG_READ_BASE_ARRAY
[ ] ;
304 #define DMA_REGS_CONFIG_READ_BASE_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_READ_BASE_ARRAY [ i ], (k), (v) )
305 #define DMA_REGS_CONFIG_READ_BASE_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_READ_BASE_ARRAY [ i ], (k), (r) )
307 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
311 uint32_t r1
: 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
314 uint32_t base
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
316 __PACKING_ATTRIBUTE_STRUCT_END__
317 DMA_REGS_CONFIG_READ_BASE
;
320 { uint32_t base
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
323 uint32_t r1
: 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
327 __PACKING_ATTRIBUTE_STRUCT_END__
328 DMA_REGS_CONFIG_READ_BASE
;
331 /*****************************************************************************************/
332 /* URGENT_THRESHOLDS */
333 /* the in/out of urgent thresholds mark the number of write requests in the queue in whi */
334 /* ch the peripherals priority is changed. The two thresholds should create hysteresis. */
335 /* The moving into urgent threshold must always be greater than the moving out of urgen */
337 /*****************************************************************************************/
339 #define DMA_REGS_CONFIG_U_THRESH_R2_R2_VALUE ( 0x0 )
340 #define DMA_REGS_CONFIG_U_THRESH_R2_R2_VALUE_RESET_VALUE ( 0x0 )
341 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_MIN_VALUE ( 0x0 )
342 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC0_VALUE ( 0x2 )
343 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x2 )
344 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC1_VALUE ( 0x2 )
345 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x2 )
346 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC2_VALUE ( 0x2 )
347 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x2 )
348 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC3_VALUE ( 0x2 )
349 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x2 )
350 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC4_VALUE ( 0x2 )
351 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x2 )
352 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_GPON_VALUE ( 0x4 )
353 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x4 )
354 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC0_VALUE ( 0x5 )
355 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x5 )
356 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC1_VALUE ( 0x5 )
357 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
358 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC2_VALUE ( 0x5 )
359 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x5 )
360 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC3_VALUE ( 0x5 )
361 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x5 )
362 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC4_VALUE ( 0x5 )
363 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x5 )
364 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_GPON_VALUE ( 0xC )
365 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0xC )
366 #define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_MAX_VALUE ( 0x1F )
367 #define DMA_REGS_CONFIG_U_THRESH_R1_R1_VALUE ( 0x0 )
368 #define DMA_REGS_CONFIG_U_THRESH_R1_R1_VALUE_RESET_VALUE ( 0x0 )
369 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_MIN_VALUE ( 0x0 )
370 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC0_VALUE ( 0x3 )
371 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x3 )
372 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC1_VALUE ( 0x3 )
373 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x3 )
374 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC2_VALUE ( 0x3 )
375 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x3 )
376 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC3_VALUE ( 0x3 )
377 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x3 )
378 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC4_VALUE ( 0x3 )
379 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x3 )
380 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_GPON_VALUE ( 0x5 )
381 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x5 )
382 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC0_VALUE ( 0x7 )
383 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x7 )
384 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC1_VALUE ( 0x7 )
385 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x7 )
386 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC2_VALUE ( 0x7 )
387 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x7 )
388 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC3_VALUE ( 0x7 )
389 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x7 )
390 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC4_VALUE ( 0x7 )
391 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x7 )
392 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_GPON_VALUE ( 0xE )
393 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0xE )
394 #define DMA_REGS_CONFIG_U_THRESH_INTO_U_MAX_VALUE ( 0x1F )
397 #define DMA_REGS_CONFIG_U_THRESH_OFFSET ( 0x00000034 )
399 #define DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_U_THRESH_OFFSET )
400 #define DMA_REGS_0_CONFIG_U_THRESH_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ), (i), (r) )
401 #define DMA_REGS_0_CONFIG_U_THRESH_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ), (i), (v) )
403 #define DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_U_THRESH_OFFSET )
404 #define DMA_REGS_1_CONFIG_U_THRESH_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ), (i), (r) )
405 #define DMA_REGS_1_CONFIG_U_THRESH_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ), (i), (v) )
408 extern uint32_t DMA_REGS_CONFIG_U_THRESH_ARRAY
[ ] ;
410 #define DMA_REGS_CONFIG_U_THRESH_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_U_THRESH_ARRAY [ i ], (k), (v) )
411 #define DMA_REGS_CONFIG_U_THRESH_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_U_THRESH_ARRAY [ i ], (k), (r) )
413 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
417 uint32_t r2
: 18 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
419 /* out_of_urgent_threshold */
420 uint32_t out_of_u
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
423 uint32_t r1
: 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
425 /* into_urgent_threshold */
426 uint32_t into_u
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
428 __PACKING_ATTRIBUTE_STRUCT_END__
429 DMA_REGS_CONFIG_U_THRESH
;
432 { uint32_t into_u
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
434 /* into_urgent_threshold */
435 uint32_t r1
: 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
438 uint32_t out_of_u
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
440 /* out_of_urgent_threshold */
441 uint32_t r2
: 18 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
445 __PACKING_ATTRIBUTE_STRUCT_END__
446 DMA_REGS_CONFIG_U_THRESH
;
449 /*****************************************************************************************/
450 /* STRICT_PRIORITY */
451 /* The arbitration between the requests of the different peripherals is done in two stag */
452 /* es: 1. Strict priority - chooses the peripherals with the highest priority among all */
453 /* perpherals who have a request pending. 2. Weighted Round-Robin between all peripher */
454 /* als with the same priority. This array of registers allow configuration of the pri */
455 /* ority of each peripheral (both rx and tx) in the following manner: There are 8 level */
456 /* s of priorities, when each bit in the register represents a different level of priori */
457 /* ty. One should assert the relevant bit according to the desired priority - For the */
458 /* lowest - 00000001 For the highest - 10000000 */
459 /*****************************************************************************************/
461 #define DMA_REGS_CONFIG_PRI_R1_R2_VALUE ( 0x0 )
462 #define DMA_REGS_CONFIG_PRI_R1_R2_VALUE_RESET_VALUE ( 0x0 )
463 #define DMA_REGS_CONFIG_PRI_TXPRI_LOW_VALUE ( 0x1 )
464 #define DMA_REGS_CONFIG_PRI_TXPRI_DEFAULT_TX_VALUE ( 0x80 )
465 #define DMA_REGS_CONFIG_PRI_TXPRI_DEFAULT_TX_VALUE_RESET_VALUE ( 0x80 )
466 #define DMA_REGS_CONFIG_PRI_TXPRI_HIGH_VALUE ( 0x80 )
467 #define DMA_REGS_CONFIG_PRI_RXPRI_LOW_VALUE ( 0x1 )
468 #define DMA_REGS_CONFIG_PRI_RXPRI_DEFAULT_RX_VALUE ( 0x80 )
469 #define DMA_REGS_CONFIG_PRI_RXPRI_DEFAULT_RX_VALUE_RESET_VALUE ( 0x80 )
470 #define DMA_REGS_CONFIG_PRI_RXPRI_HIGH_VALUE ( 0x80 )
473 #define DMA_REGS_CONFIG_PRI_OFFSET ( 0x0000004C )
475 #define DMA_REGS_0_CONFIG_PRI_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_PRI_OFFSET )
476 #define DMA_REGS_0_CONFIG_PRI_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_PRI_ADDRESS ), (i), (r) )
477 #define DMA_REGS_0_CONFIG_PRI_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_PRI_ADDRESS ), (i), (v) )
479 #define DMA_REGS_1_CONFIG_PRI_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_PRI_OFFSET )
480 #define DMA_REGS_1_CONFIG_PRI_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_PRI_ADDRESS ), (i), (r) )
481 #define DMA_REGS_1_CONFIG_PRI_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_PRI_ADDRESS ), (i), (v) )
484 extern uint32_t DMA_REGS_CONFIG_PRI_ARRAY
[ ] ;
486 #define DMA_REGS_CONFIG_PRI_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_PRI_ARRAY [ i ], (k), (v) )
487 #define DMA_REGS_CONFIG_PRI_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_PRI_ARRAY [ i ], (k), (r) )
489 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
493 uint32_t r1
: 16 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
495 /* priority_of_tx_side */
496 uint32_t txpri
: 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
498 /* priority_of_rx_side */
499 uint32_t rxpri
: 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
501 __PACKING_ATTRIBUTE_STRUCT_END__
502 DMA_REGS_CONFIG_PRI
;
505 { uint32_t rxpri
: 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
507 /* priority_of_rx_side */
508 uint32_t txpri
: 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
510 /* priority_of_tx_side */
511 uint32_t r1
: 16 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
515 __PACKING_ATTRIBUTE_STRUCT_END__
516 DMA_REGS_CONFIG_PRI
;
519 /*****************************************************************************************/
520 /* WEIGHT_OF_ROUND_ROBIN */
521 /* The second phase of the arbitration between requests is weighted round robin between */
522 /* requests of peripherals with the same priority. This array of registers allow config */
523 /* urtion of the weight of each peripheral (rx and tx). The actual weight will be weight */
524 /* + 1, meaning configuration of 0 is actual weight of 1. */
525 /*****************************************************************************************/
527 #define DMA_REGS_CONFIG_WEIGHT_R2_DEFAULT_VALUE ( 0x0 )
528 #define DMA_REGS_CONFIG_WEIGHT_R2_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
529 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MIN_VALUE ( 0x0 )
530 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MIN_VALUE_RESET_VALUE ( 0x0 )
531 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_SDMA_VALUE ( 0x0 )
532 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_SDMA_VALUE_RESET_VALUE ( 0x0 )
533 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_EMAC_DMA_VALUE ( 0x1 )
534 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_EMAC_DMA_VALUE_RESET_VALUE ( 0x1 )
535 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_GPON_DMA_VALUE ( 0x3 )
536 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_GPON_DMA_VALUE_RESET_VALUE ( 0x3 )
537 #define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MAX_VALUE ( 0x7 )
538 #define DMA_REGS_CONFIG_WEIGHT_R1_DEFAULT_VALUE ( 0x0 )
539 #define DMA_REGS_CONFIG_WEIGHT_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
540 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_DMA_VALUE ( 0x0 )
541 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_DMA_VALUE_RESET_VALUE ( 0x0 )
542 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MIN_VALUE ( 0x0 )
543 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MIN_VALUE_RESET_VALUE ( 0x0 )
544 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_SDMA_VALUE ( 0x0 )
545 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_SDMA_VALUE_RESET_VALUE ( 0x0 )
546 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_DMA_VALUE ( 0x1 )
547 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_DMA_VALUE_RESET_VALUE ( 0x1 )
548 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_SDMA_VALUE ( 0x1 )
549 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_SDMA_VALUE_RESET_VALUE ( 0x1 )
550 #define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MAX_VALUE ( 0x7 )
553 #define DMA_REGS_CONFIG_WEIGHT_OFFSET ( 0x00000064 )
555 #define DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_WEIGHT_OFFSET )
556 #define DMA_REGS_0_CONFIG_WEIGHT_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ), (i), (r) )
557 #define DMA_REGS_0_CONFIG_WEIGHT_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ), (i), (v) )
559 #define DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_WEIGHT_OFFSET )
560 #define DMA_REGS_1_CONFIG_WEIGHT_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ), (i), (r) )
561 #define DMA_REGS_1_CONFIG_WEIGHT_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ), (i), (v) )
564 extern uint32_t DMA_REGS_CONFIG_WEIGHT_ARRAY
[ ] ;
566 #define DMA_REGS_CONFIG_WEIGHT_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_WEIGHT_ARRAY [ i ], (k), (v) )
567 #define DMA_REGS_CONFIG_WEIGHT_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_WEIGHT_ARRAY [ i ], (k), (r) )
569 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
573 uint32_t r2
: 21 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
575 /* weight_of_tx_side */
576 uint32_t txweight
: 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
579 uint32_t r1
: 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
581 /* weight_of_rx_side */
582 uint32_t rxweight
: 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
584 __PACKING_ATTRIBUTE_STRUCT_END__
585 DMA_REGS_CONFIG_WEIGHT
;
588 { uint32_t rxweight
: 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
590 /* weight_of_rx_side */
591 uint32_t r1
: 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
594 uint32_t txweight
: 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
596 /* weight_of_tx_side */
597 uint32_t r2
: 21 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
601 __PACKING_ATTRIBUTE_STRUCT_END__
602 DMA_REGS_CONFIG_WEIGHT
;
605 /*****************************************************************************************/
606 /* BB_ROUTE_DMA_PERIPH */
607 /* Broadbus route address from the DMA to the peripherals. Register per peripheral (rx a */
608 /* nd tx). The route address is same for DMA and SDMA because of the symmetry of the BB */
610 /*****************************************************************************************/
612 #define DMA_REGS_CONFIG_BB_ROUTE_R2_R2_VALUE ( 0x0 )
613 #define DMA_REGS_CONFIG_BB_ROUTE_R2_R2_VALUE_RESET_VALUE ( 0x0 )
614 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_GPON_VALUE ( 0x11 )
615 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x11 )
616 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC3_VALUE ( 0x12 )
617 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x12 )
618 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC1_VALUE ( 0x16 )
619 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x16 )
620 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC4_VALUE ( 0x19 )
621 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x19 )
622 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC2_VALUE ( 0x1A )
623 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0x1A )
624 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC0_VALUE ( 0x1E )
625 #define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0x1E )
626 #define DMA_REGS_CONFIG_BB_ROUTE_R1_R1_VALUE ( 0x0 )
627 #define DMA_REGS_CONFIG_BB_ROUTE_R1_R1_VALUE_RESET_VALUE ( 0x0 )
628 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_GPON_VALUE ( 0x1 )
629 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x1 )
630 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC3_VALUE ( 0x2 )
631 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x2 )
632 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC1_VALUE ( 0x6 )
633 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x6 )
634 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC4_VALUE ( 0x9 )
635 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x9 )
636 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC2_VALUE ( 0xA )
637 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0xA )
638 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC0_VALUE ( 0xE )
639 #define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0xE )
642 #define DMA_REGS_CONFIG_BB_ROUTE_OFFSET ( 0x0000007C )
644 #define DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_BB_ROUTE_OFFSET )
645 #define DMA_REGS_0_CONFIG_BB_ROUTE_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ), (i), (r) )
646 #define DMA_REGS_0_CONFIG_BB_ROUTE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ), (i), (v) )
648 #define DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_BB_ROUTE_OFFSET )
649 #define DMA_REGS_1_CONFIG_BB_ROUTE_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ), (i), (r) )
650 #define DMA_REGS_1_CONFIG_BB_ROUTE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ), (i), (v) )
653 extern uint32_t DMA_REGS_CONFIG_BB_ROUTE_ARRAY
[ ] ;
655 #define DMA_REGS_CONFIG_BB_ROUTE_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ i ], (k), (v) )
656 #define DMA_REGS_CONFIG_BB_ROUTE_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ i ], (k), (r) )
658 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
662 uint32_t r2
: 17 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
664 /* bb_route_to_tx_side */
665 uint32_t txroute
: 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
668 uint32_t r1
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
670 /* bb_route_to_rx_side */
671 uint32_t rxroute
: 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
673 __PACKING_ATTRIBUTE_STRUCT_END__
674 DMA_REGS_CONFIG_BB_ROUTE
;
677 { uint32_t rxroute
: 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
679 /* bb_route_to_rx_side */
680 uint32_t r1
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
683 uint32_t txroute
: 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
685 /* bb_route_to_tx_side */
686 uint32_t r2
: 17 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
690 __PACKING_ATTRIBUTE_STRUCT_END__
691 DMA_REGS_CONFIG_BB_ROUTE
;
694 /*****************************************************************************************/
696 /* Resets the pointers of the peripherals FIFOs within the DMA. Bit per peripheral side */
697 /* (rx and tx). For rx side resets the data and CD FIFOs. For tx side resets the read */
699 /*****************************************************************************************/
701 #define DMA_REGS_CONFIG_PTRRST_R1_DEFAULT_VALUE ( 0x0 )
702 #define DMA_REGS_CONFIG_PTRRST_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
703 #define DMA_REGS_CONFIG_PTRRST_GPONTX_OFF_VALUE ( 0x0 )
704 #define DMA_REGS_CONFIG_PTRRST_GPONTX_OFF_VALUE_RESET_VALUE ( 0x0 )
705 #define DMA_REGS_CONFIG_PTRRST_GPONTX_RESET_VALUE ( 0x1 )
706 #define DMA_REGS_CONFIG_PTRRST_GPONRX_OFF_VALUE ( 0x0 )
707 #define DMA_REGS_CONFIG_PTRRST_GPONRX_OFF_VALUE_RESET_VALUE ( 0x0 )
708 #define DMA_REGS_CONFIG_PTRRST_GPONRX_RESET_VALUE ( 0x1 )
709 #define DMA_REGS_CONFIG_PTRRST_ETH4TX_OFF_VALUE ( 0x0 )
710 #define DMA_REGS_CONFIG_PTRRST_ETH4TX_OFF_VALUE_RESET_VALUE ( 0x0 )
711 #define DMA_REGS_CONFIG_PTRRST_ETH4TX_RESET_VALUE ( 0x1 )
712 #define DMA_REGS_CONFIG_PTRRST_ETH4RX_OFF_VALUE ( 0x0 )
713 #define DMA_REGS_CONFIG_PTRRST_ETH4RX_OFF_VALUE_RESET_VALUE ( 0x0 )
714 #define DMA_REGS_CONFIG_PTRRST_ETH4RX_RESET_VALUE ( 0x1 )
715 #define DMA_REGS_CONFIG_PTRRST_ETH3TX_OFF_VALUE ( 0x0 )
716 #define DMA_REGS_CONFIG_PTRRST_ETH3TX_OFF_VALUE_RESET_VALUE ( 0x0 )
717 #define DMA_REGS_CONFIG_PTRRST_ETH3TX_RESET_VALUE ( 0x1 )
718 #define DMA_REGS_CONFIG_PTRRST_ETH3RX_OFF_VALUE ( 0x0 )
719 #define DMA_REGS_CONFIG_PTRRST_ETH3RX_OFF_VALUE_RESET_VALUE ( 0x0 )
720 #define DMA_REGS_CONFIG_PTRRST_ETH3RX_RESET_VALUE ( 0x1 )
721 #define DMA_REGS_CONFIG_PTRRST_ETH2TX_OFF_VALUE ( 0x0 )
722 #define DMA_REGS_CONFIG_PTRRST_ETH2TX_OFF_VALUE_RESET_VALUE ( 0x0 )
723 #define DMA_REGS_CONFIG_PTRRST_ETH2TX_RESET_VALUE ( 0x1 )
724 #define DMA_REGS_CONFIG_PTRRST_ETH2RX_OFF_VALUE ( 0x0 )
725 #define DMA_REGS_CONFIG_PTRRST_ETH2RX_OFF_VALUE_RESET_VALUE ( 0x0 )
726 #define DMA_REGS_CONFIG_PTRRST_ETH2RX_RESET_VALUE ( 0x1 )
727 #define DMA_REGS_CONFIG_PTRRST_ETH1TX_OFF_VALUE ( 0x0 )
728 #define DMA_REGS_CONFIG_PTRRST_ETH1TX_OFF_VALUE_RESET_VALUE ( 0x0 )
729 #define DMA_REGS_CONFIG_PTRRST_ETH1TX_RESET_VALUE ( 0x1 )
730 #define DMA_REGS_CONFIG_PTRRST_ETH1RX_OFF_VALUE ( 0x0 )
731 #define DMA_REGS_CONFIG_PTRRST_ETH1RX_OFF_VALUE_RESET_VALUE ( 0x0 )
732 #define DMA_REGS_CONFIG_PTRRST_ETH1RX_RESET_VALUE ( 0x1 )
733 #define DMA_REGS_CONFIG_PTRRST_ETH0TX_OFF_VALUE ( 0x0 )
734 #define DMA_REGS_CONFIG_PTRRST_ETH0TX_OFF_VALUE_RESET_VALUE ( 0x0 )
735 #define DMA_REGS_CONFIG_PTRRST_ETH0TX_RESET_VALUE ( 0x1 )
736 #define DMA_REGS_CONFIG_PTRRST_ETH0RX_OFF_VALUE ( 0x0 )
737 #define DMA_REGS_CONFIG_PTRRST_ETH0RX_OFF_VALUE_RESET_VALUE ( 0x0 )
738 #define DMA_REGS_CONFIG_PTRRST_ETH0RX_RESET_VALUE ( 0x1 )
741 #define DMA_REGS_CONFIG_PTRRST_OFFSET ( 0x00000094 )
743 #define DMA_REGS_0_CONFIG_PTRRST_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_PTRRST_OFFSET )
744 #define DMA_REGS_0_CONFIG_PTRRST_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_PTRRST_ADDRESS ), (r) )
745 #define DMA_REGS_0_CONFIG_PTRRST_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_PTRRST_ADDRESS ), (v) )
747 #define DMA_REGS_1_CONFIG_PTRRST_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_PTRRST_OFFSET )
748 #define DMA_REGS_1_CONFIG_PTRRST_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_PTRRST_ADDRESS ), (r) )
749 #define DMA_REGS_1_CONFIG_PTRRST_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_PTRRST_ADDRESS ), (v) )
752 extern uint32_t DMA_REGS_CONFIG_PTRRST_ARRAY
[ ] ;
754 #define DMA_REGS_CONFIG_PTRRST_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_PTRRST_ARRAY [ i ], (v) )
755 #define DMA_REGS_CONFIG_PTRRST_READ( i, r ) READ_32( DMA_REGS_CONFIG_PTRRST_ARRAY [ i ], (r) )
757 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
761 uint32_t r1
: 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
764 uint32_t gpontx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
767 uint32_t gponrx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
769 /* ethernet_4_tx_reset */
770 uint32_t eth4tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
772 /* ethernet_4_rx_reset */
773 uint32_t eth4rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
775 /* ethernet_3_tx_reset */
776 uint32_t eth3tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
778 /* ethernet_3_rx_reset */
779 uint32_t eth3rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
781 /* ethernet_2_tx_reset */
782 uint32_t eth2tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
784 /* ethernet_2_rx_reset */
785 uint32_t eth2rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
787 /* ethernet_1_tx_reset */
788 uint32_t eth1tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
790 /* ethernet_1_rx_reset */
791 uint32_t eth1rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
793 /* ethernet_0_tx_reset */
794 uint32_t eth0tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
796 /* ethernet_0_rx_reset */
797 uint32_t eth0rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
799 __PACKING_ATTRIBUTE_STRUCT_END__
800 DMA_REGS_CONFIG_PTRRST
;
803 { uint32_t eth0rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
805 /* ethernet_0_rx_reset */
806 uint32_t eth0tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
808 /* ethernet_0_tx_reset */
809 uint32_t eth1rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
811 /* ethernet_1_rx_reset */
812 uint32_t eth1tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
814 /* ethernet_1_tx_reset */
815 uint32_t eth2rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
817 /* ethernet_2_rx_reset */
818 uint32_t eth2tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
820 /* ethernet_2_tx_reset */
821 uint32_t eth3rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
823 /* ethernet_3_rx_reset */
824 uint32_t eth3tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
826 /* ethernet_3_tx_reset */
827 uint32_t eth4rx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
829 /* ethernet_4_rx_reset */
830 uint32_t eth4tx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
832 /* ethernet_4_tx_reset */
833 uint32_t gponrx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
836 uint32_t gpontx
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
839 uint32_t r1
: 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
843 __PACKING_ATTRIBUTE_STRUCT_END__
844 DMA_REGS_CONFIG_PTRRST
;
847 /*****************************************************************************************/
849 /* For DMA, there are 2 data memories for write data (upstream), Each client has a confi */
850 /* gurable number of 128 bytes buffers in one of the memories (see MEMORY_ALLOCATION reg */
851 /* ister). The first memory has total of 64 byffers, while the second has 32 buffers. */
852 /* This register configures in which one of the memories the clients buffers are located */
853 /* (1 bit per client, 0 first memory, 1 second memory). The CD buffers will also be lo */
854 /* cated accordingly. */
855 /*****************************************************************************************/
857 #define DMA_REGS_CONFIG_MEM_SEL_R1_RESERVED_VALUE ( 0x0 )
858 #define DMA_REGS_CONFIG_MEM_SEL_R1_RESERVED_VALUE_RESET_VALUE ( 0x0 )
859 #define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_FIRST_VALUE ( 0x0 )
860 #define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_FIRST_VALUE_RESET_VALUE ( 0x0 )
861 #define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_SECOND_VALUE ( 0x1 )
864 #define DMA_REGS_CONFIG_MEM_SEL_OFFSET ( 0x00000098 )
866 #define DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_MEM_SEL_OFFSET )
867 #define DMA_REGS_0_CONFIG_MEM_SEL_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ), (r) )
868 #define DMA_REGS_0_CONFIG_MEM_SEL_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ), (v) )
870 #define DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_MEM_SEL_OFFSET )
871 #define DMA_REGS_1_CONFIG_MEM_SEL_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ), (r) )
872 #define DMA_REGS_1_CONFIG_MEM_SEL_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ), (v) )
875 extern uint32_t DMA_REGS_CONFIG_MEM_SEL_ARRAY
[ ] ;
877 #define DMA_REGS_CONFIG_MEM_SEL_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_MEM_SEL_ARRAY [ i ], (v) )
878 #define DMA_REGS_CONFIG_MEM_SEL_READ( i, r ) READ_32( DMA_REGS_CONFIG_MEM_SEL_ARRAY [ i ], (r) )
880 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
884 uint32_t r1
: 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
887 uint32_t mem_sel
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
889 __PACKING_ATTRIBUTE_STRUCT_END__
890 DMA_REGS_CONFIG_MEM_SEL
;
893 { uint32_t mem_sel
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
896 uint32_t r1
: 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
900 __PACKING_ATTRIBUTE_STRUCT_END__
901 DMA_REGS_CONFIG_MEM_SEL
;
904 /*****************************************************************************************/
905 /* NOT_EMPTY_VECTOR */
906 /* Each peripheral, according to its source address, is represented in a bit on the not */
907 /* empty vector. If the bit is asserted, the requests queue of the relevant peripheral */
908 /* is not empty. The not empty vector is used by the DMA scheduler to determine which p */
909 /* eripheral is the next to be served. */
910 /*****************************************************************************************/
912 #define DMA_REGS_DEBUG_NEMPTY_R1_R1_VALUE ( 0x0 )
913 #define DMA_REGS_DEBUG_NEMPTY_R1_R1_VALUE_RESET_VALUE ( 0x0 )
914 #define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_EMPTY_VALUE ( 0x0 )
915 #define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
916 #define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_NOT_EMPTY_VALUE ( 0x1 )
917 #define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_EMPTY_VALUE ( 0x0 )
918 #define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
919 #define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_NOT_EMPTY_VALUE ( 0x1 )
920 #define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_EMPTY_VALUE ( 0x0 )
921 #define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
922 #define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_NOT_EMPTY_VALUE ( 0x1 )
923 #define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_EMPTY_VALUE ( 0x0 )
924 #define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
925 #define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_NOT_EMPTY_VALUE ( 0x1 )
926 #define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_EMPTY_VALUE ( 0x0 )
927 #define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
928 #define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_NOT_EMPTY_VALUE ( 0x1 )
929 #define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_EMPTY_VALUE ( 0x0 )
930 #define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
931 #define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_NOT_EMPTY_VALUE ( 0x1 )
932 #define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_EMPTY_VALUE ( 0x0 )
933 #define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
934 #define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_NOT_EMPTY_VALUE ( 0x1 )
935 #define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_EMPTY_VALUE ( 0x0 )
936 #define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
937 #define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_NOT_EMPTY_VALUE ( 0x1 )
938 #define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_EMPTY_VALUE ( 0x0 )
939 #define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
940 #define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_NOR_EMPTY_VALUE ( 0x1 )
941 #define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_EMPTY_VALUE ( 0x0 )
942 #define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
943 #define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_NOT_EMPTY_VALUE ( 0x1 )
944 #define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_EMPTY_VALUE ( 0x0 )
945 #define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
946 #define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_NOT_EMPTY_VALUE ( 0x1 )
947 #define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_EMPTY_VALUE ( 0x0 )
948 #define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
949 #define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_NOT_EMPTY_VALUE ( 0x1 )
952 #define DMA_REGS_DEBUG_NEMPTY_OFFSET ( 0x00000000 )
954 #define DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_NEMPTY_OFFSET )
955 #define DMA_REGS_0_DEBUG_NEMPTY_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ), (r) )
956 #define DMA_REGS_0_DEBUG_NEMPTY_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ), (v) )
958 #define DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_NEMPTY_OFFSET )
959 #define DMA_REGS_1_DEBUG_NEMPTY_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ), (r) )
960 #define DMA_REGS_1_DEBUG_NEMPTY_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ), (v) )
963 extern uint32_t DMA_REGS_DEBUG_NEMPTY_ARRAY
[ ] ;
965 #define DMA_REGS_DEBUG_NEMPTY_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_NEMPTY_ARRAY [ i ], (v) )
966 #define DMA_REGS_DEBUG_NEMPTY_READ( i, r ) READ_32( DMA_REGS_DEBUG_NEMPTY_ARRAY [ i ], (r) )
968 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
972 uint32_t r1
: 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
974 /* GPON_TX_not_empty_indications */
975 uint32_t gpontxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
977 /* Ethernet4_TX_not_empty_indications */
978 uint32_t eth4txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
980 /* Ethernet3_TX_not_empty_indications */
981 uint32_t eth3txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
983 /* Ethernet2_TX_not_empty_indications */
984 uint32_t eth2txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
986 /* Ethernet1_TX_not_empty_indications */
987 uint32_t eth1txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
989 /* Ethernet0_TX_not_empty_indications */
990 uint32_t eth0txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
992 /* GPON_RX_not_empty_indications */
993 uint32_t gponrxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
995 /* Ethernet4_RX_not_empty_indications */
996 uint32_t eth4rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
998 /* Ethernet3_RX_not_empty_indications */
999 uint32_t eth3rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1001 /* Ethernet2_RX_not_empty_indications */
1002 uint32_t eth2rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1004 /* Ethernet1_RX_not_empty_indications */
1005 uint32_t eth1rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1007 /* Ethernet0_RX_not_empty_indications */
1008 uint32_t eth0rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1010 __PACKING_ATTRIBUTE_STRUCT_END__
1011 DMA_REGS_DEBUG_NEMPTY
;
1014 { uint32_t eth0rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1016 /* Ethernet0_RX_not_empty_indications */
1017 uint32_t eth1rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1019 /* Ethernet1_RX_not_empty_indications */
1020 uint32_t eth2rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1022 /* Ethernet2_RX_not_empty_indications */
1023 uint32_t eth3rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1025 /* Ethernet3_RX_not_empty_indications */
1026 uint32_t eth4rxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1028 /* Ethernet4_RX_not_empty_indications */
1029 uint32_t gponrxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1031 /* GPON_RX_not_empty_indications */
1032 uint32_t eth0txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1034 /* Ethernet0_TX_not_empty_indications */
1035 uint32_t eth1txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1037 /* Ethernet1_TX_not_empty_indications */
1038 uint32_t eth2txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1040 /* Ethernet2_TX_not_empty_indications */
1041 uint32_t eth3txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1043 /* Ethernet3_TX_not_empty_indications */
1044 uint32_t eth4txne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1046 /* Ethernet4_TX_not_empty_indications */
1047 uint32_t gpontxne
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1049 /* GPON_TX_not_empty_indications */
1050 uint32_t r1
: 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1054 __PACKING_ATTRIBUTE_STRUCT_END__
1055 DMA_REGS_DEBUG_NEMPTY
;
1058 /*****************************************************************************************/
1060 /* Each peripheral, according to its source address, is represented in a bit on the urge */
1061 /* nt vector. If the bit is asserted, the requests queue of the relevant peripheral is */
1062 /* in urgent state. The urgent vector is used by the DMA scheduler to determine which p */
1063 /* eripheral is the next to be served. */
1064 /*****************************************************************************************/
1066 #define DMA_REGS_DEBUG_URGNT_R1_R1_VALUE ( 0x0 )
1067 #define DMA_REGS_DEBUG_URGNT_R1_R1_VALUE_RESET_VALUE ( 0x0 )
1068 #define DMA_REGS_DEBUG_URGNT_GPONTXU_NOT_URGENT_VALUE ( 0x0 )
1069 #define DMA_REGS_DEBUG_URGNT_GPONTXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1070 #define DMA_REGS_DEBUG_URGNT_GPONTXU_URGENT_VALUE ( 0x1 )
1071 #define DMA_REGS_DEBUG_URGNT_ETH4TXU_NOT_URGENT_VALUE ( 0x0 )
1072 #define DMA_REGS_DEBUG_URGNT_ETH4TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1073 #define DMA_REGS_DEBUG_URGNT_ETH4TXU_URGENT_VALUE ( 0x1 )
1074 #define DMA_REGS_DEBUG_URGNT_ETH3TXU_NOT_URGENT_VALUE ( 0x0 )
1075 #define DMA_REGS_DEBUG_URGNT_ETH3TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1076 #define DMA_REGS_DEBUG_URGNT_ETH3TXU_URGENT_VALUE ( 0x1 )
1077 #define DMA_REGS_DEBUG_URGNT_ETH2TXU_NOT_URGENT_VALUE ( 0x0 )
1078 #define DMA_REGS_DEBUG_URGNT_ETH2TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1079 #define DMA_REGS_DEBUG_URGNT_ETH2TXU_URGENT_VALUE ( 0x1 )
1080 #define DMA_REGS_DEBUG_URGNT_ETH1TXU_NOT_URGENT_VALUE ( 0x0 )
1081 #define DMA_REGS_DEBUG_URGNT_ETH1TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1082 #define DMA_REGS_DEBUG_URGNT_ETH1TXU_URGENT_VALUE ( 0x1 )
1083 #define DMA_REGS_DEBUG_URGNT_ETH0TXU_NOT_URGENT_VALUE ( 0x0 )
1084 #define DMA_REGS_DEBUG_URGNT_ETH0TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1085 #define DMA_REGS_DEBUG_URGNT_ETH0TXU_URGENT_VALUE ( 0x1 )
1086 #define DMA_REGS_DEBUG_URGNT_GPONRXU_NOT_URGENT_VALUE ( 0x0 )
1087 #define DMA_REGS_DEBUG_URGNT_GPONRXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1088 #define DMA_REGS_DEBUG_URGNT_GPONRXU_URGENT_VALUE ( 0x1 )
1089 #define DMA_REGS_DEBUG_URGNT_ETH4RXU_NOT_URGENT_VALUE ( 0x0 )
1090 #define DMA_REGS_DEBUG_URGNT_ETH4RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1091 #define DMA_REGS_DEBUG_URGNT_ETH4RXU_URGENT_VALUE ( 0x1 )
1092 #define DMA_REGS_DEBUG_URGNT_ETH3RXU_NOT_URGENT_VALUE ( 0x0 )
1093 #define DMA_REGS_DEBUG_URGNT_ETH3RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1094 #define DMA_REGS_DEBUG_URGNT_ETH3RXU_URGENT_VALUE ( 0x1 )
1095 #define DMA_REGS_DEBUG_URGNT_ETH2RXU_NOT_URGENT_VALUE ( 0x0 )
1096 #define DMA_REGS_DEBUG_URGNT_ETH2RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1097 #define DMA_REGS_DEBUG_URGNT_ETH2RXU_URGENT_VALUE ( 0x1 )
1098 #define DMA_REGS_DEBUG_URGNT_ETH1RXU_NOT_URGENT_VALUE ( 0x0 )
1099 #define DMA_REGS_DEBUG_URGNT_ETH1RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1100 #define DMA_REGS_DEBUG_URGNT_ETH1RXU_URGENT_VALUE ( 0x1 )
1101 #define DMA_REGS_DEBUG_URGNT_ETH0RXU_NOT_URGENT_VALUE ( 0x0 )
1102 #define DMA_REGS_DEBUG_URGNT_ETH0RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
1103 #define DMA_REGS_DEBUG_URGNT_ETH0RXU_URGENT_VALUE ( 0x1 )
1106 #define DMA_REGS_DEBUG_URGNT_OFFSET ( 0x00000004 )
1108 #define DMA_REGS_0_DEBUG_URGNT_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_URGNT_OFFSET )
1109 #define DMA_REGS_0_DEBUG_URGNT_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_URGNT_ADDRESS ), (r) )
1110 #define DMA_REGS_0_DEBUG_URGNT_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_URGNT_ADDRESS ), (v) )
1112 #define DMA_REGS_1_DEBUG_URGNT_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_URGNT_OFFSET )
1113 #define DMA_REGS_1_DEBUG_URGNT_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_URGNT_ADDRESS ), (r) )
1114 #define DMA_REGS_1_DEBUG_URGNT_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_URGNT_ADDRESS ), (v) )
1117 extern uint32_t DMA_REGS_DEBUG_URGNT_ARRAY
[ ] ;
1119 #define DMA_REGS_DEBUG_URGNT_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_URGNT_ARRAY [ i ], (v) )
1120 #define DMA_REGS_DEBUG_URGNT_READ( i, r ) READ_32( DMA_REGS_DEBUG_URGNT_ARRAY [ i ], (r) )
1122 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1126 uint32_t r1
: 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1128 /* GPON_TX_urgent_indication */
1129 uint32_t gpontxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1131 /* Ethernet4_TX_urgent_indication */
1132 uint32_t eth4txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1134 /* Ethernet3_TX_urgent_indication */
1135 uint32_t eth3txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1137 /* Ethernet2_TX_urgent_indication */
1138 uint32_t eth2txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1140 /* Ethernet1_TX_urgent_indication */
1141 uint32_t eth1txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1143 /* Ethernet0_TX_urgent_indication */
1144 uint32_t eth0txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1146 /* GPON_RX_urgent_indication */
1147 uint32_t gponrxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1149 /* Ethernet4_RX_urgent_indication */
1150 uint32_t eth4rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1152 /* Ethernet3_RX_urgent_indication */
1153 uint32_t eth3rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1155 /* Ethernet2_RX_urgent_indication */
1156 uint32_t eth2rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1158 /* Ethernet1_RX_urgent_indication */
1159 uint32_t eth1rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1161 /* Ethernet0_RX_urgent_indication */
1162 uint32_t eth0rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1164 __PACKING_ATTRIBUTE_STRUCT_END__
1165 DMA_REGS_DEBUG_URGNT
;
1168 { uint32_t eth0rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1170 /* Ethernet0_RX_urgent_indication */
1171 uint32_t eth1rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1173 /* Ethernet1_RX_urgent_indication */
1174 uint32_t eth2rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1176 /* Ethernet2_RX_urgent_indication */
1177 uint32_t eth3rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1179 /* Ethernet3_RX_urgent_indication */
1180 uint32_t eth4rxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1182 /* Ethernet4_RX_urgent_indication */
1183 uint32_t gponrxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1185 /* GPON_RX_urgent_indication */
1186 uint32_t eth0txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1188 /* Ethernet0_TX_urgent_indication */
1189 uint32_t eth1txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1191 /* Ethernet1_TX_urgent_indication */
1192 uint32_t eth2txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1194 /* Ethernet2_TX_urgent_indication */
1195 uint32_t eth3txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1197 /* Ethernet3_TX_urgent_indication */
1198 uint32_t eth4txu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1200 /* Ethernet4_TX_urgent_indication */
1201 uint32_t gpontxu
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1203 /* GPON_TX_urgent_indication */
1204 uint32_t r1
: 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1208 __PACKING_ATTRIBUTE_STRUCT_END__
1209 DMA_REGS_DEBUG_URGNT
;
1212 /*****************************************************************************************/
1213 /* SELECTED_SOURCE_NUM */
1214 /* The decision of the dma schedule rand the next peripheral to be served, represented b */
1215 /* y its source address */
1216 /*****************************************************************************************/
1218 #define DMA_REGS_DEBUG_SELSRC_R1_R1_VALUE ( 0x0 )
1219 #define DMA_REGS_DEBUG_SELSRC_R1_R1_VALUE_RESET_VALUE ( 0x0 )
1220 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_RX_VALUE ( 0x0 )
1221 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_RX_VALUE_RESET_VALUE ( 0x0 )
1222 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH1_RX_VALUE ( 0x1 )
1223 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH2_RX_VALUE ( 0x2 )
1224 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH3_RX_VALUE ( 0x3 )
1225 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH4_RX_VALUE ( 0x4 )
1226 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_GPON_RX_VALUE ( 0x5 )
1227 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_TX_VALUE ( 0x8 )
1228 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH1_TX_VALUE ( 0x9 )
1229 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH2_TX_VALUE ( 0xA )
1230 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH3_TX_VALUE ( 0xB )
1231 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH4_TX_VALUE ( 0xC )
1232 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_GPON_TX_VALUE ( 0xD )
1233 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_DEFAULT_VALUE ( 0x1F )
1234 #define DMA_REGS_DEBUG_SELSRC_SEL_SRC_DEFAULT_VALUE_RESET_VALUE ( 0x1F )
1237 #define DMA_REGS_DEBUG_SELSRC_OFFSET ( 0x00000008 )
1239 #define DMA_REGS_0_DEBUG_SELSRC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_SELSRC_OFFSET )
1240 #define DMA_REGS_0_DEBUG_SELSRC_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_SELSRC_ADDRESS ), (r) )
1241 #define DMA_REGS_0_DEBUG_SELSRC_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_SELSRC_ADDRESS ), (v) )
1243 #define DMA_REGS_1_DEBUG_SELSRC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_SELSRC_OFFSET )
1244 #define DMA_REGS_1_DEBUG_SELSRC_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_SELSRC_ADDRESS ), (r) )
1245 #define DMA_REGS_1_DEBUG_SELSRC_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_SELSRC_ADDRESS ), (v) )
1248 extern uint32_t DMA_REGS_DEBUG_SELSRC_ARRAY
[ ] ;
1250 #define DMA_REGS_DEBUG_SELSRC_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_SELSRC_ARRAY [ i ], (v) )
1251 #define DMA_REGS_DEBUG_SELSRC_READ( i, r ) READ_32( DMA_REGS_DEBUG_SELSRC_ARRAY [ i ], (r) )
1253 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1257 uint32_t r1
: 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1259 /* selected_source */
1260 uint32_t sel_src
: 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1262 __PACKING_ATTRIBUTE_STRUCT_END__
1263 DMA_REGS_DEBUG_SELSRC
;
1266 { uint32_t sel_src
: 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1268 /* selected_source */
1269 uint32_t r1
: 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1273 __PACKING_ATTRIBUTE_STRUCT_END__
1274 DMA_REGS_DEBUG_SELSRC
;
1277 /*****************************************************************************************/
1278 /* REQUEST_COUNTERS_RX */
1279 /* the number of write requests currently pending for each rx peripheral. */
1280 /*****************************************************************************************/
1282 #define DMA_REGS_DEBUG_REQ_CNT_RX_R1_R1_VALUE ( 0x0 )
1283 #define DMA_REGS_DEBUG_REQ_CNT_RX_R1_R1_VALUE_RESET_VALUE ( 0x0 )
1284 #define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MIN_VALUE ( 0x0 )
1285 #define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MIN_VALUE_RESET_VALUE ( 0x0 )
1286 #define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MAX_VALUE ( 0x20 )
1289 #define DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET ( 0x0000000C )
1291 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET )
1292 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (r) )
1293 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (v) )
1295 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET )
1296 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (r) )
1297 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (v) )
1300 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY
[ ] ;
1302 #define DMA_REGS_DEBUG_REQ_CNT_RX_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ i ], (k), (v) )
1303 #define DMA_REGS_DEBUG_REQ_CNT_RX_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ i ], (k), (r) )
1305 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1309 uint32_t r1
: 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1311 /* write_requests_counter */
1312 uint32_t req_cnt
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1314 __PACKING_ATTRIBUTE_STRUCT_END__
1315 DMA_REGS_DEBUG_REQ_CNT_RX
;
1318 { uint32_t req_cnt
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1320 /* write_requests_counter */
1321 uint32_t r1
: 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1325 __PACKING_ATTRIBUTE_STRUCT_END__
1326 DMA_REGS_DEBUG_REQ_CNT_RX
;
1329 /*****************************************************************************************/
1330 /* REQUEST_COUNTERS_TX */
1331 /* the number of read requestscurrently pending for each TX peripheral. */
1332 /*****************************************************************************************/
1334 #define DMA_REGS_DEBUG_REQ_CNT_TX_R1_R3_VALUE ( 0x0 )
1335 #define DMA_REGS_DEBUG_REQ_CNT_TX_R1_R3_VALUE_RESET_VALUE ( 0x0 )
1336 #define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MIN_VALUE ( 0x0 )
1337 #define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MIN_VALUE_RESET_VALUE ( 0x0 )
1338 #define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MAX_VALUE ( 0x8 )
1341 #define DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET ( 0x00000024 )
1343 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET )
1344 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (r) )
1345 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (v) )
1347 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET )
1348 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (r) )
1349 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (v) )
1352 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY
[ ] ;
1354 #define DMA_REGS_DEBUG_REQ_CNT_TX_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ i ], (k), (v) )
1355 #define DMA_REGS_DEBUG_REQ_CNT_TX_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ i ], (k), (r) )
1357 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1361 uint32_t r1
: 28 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1363 /* read_requests_counter */
1364 uint32_t req_cnt
: 4 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1366 __PACKING_ATTRIBUTE_STRUCT_END__
1367 DMA_REGS_DEBUG_REQ_CNT_TX
;
1370 { uint32_t req_cnt
: 4 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1372 /* read_requests_counter */
1373 uint32_t r1
: 28 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1377 __PACKING_ATTRIBUTE_STRUCT_END__
1378 DMA_REGS_DEBUG_REQ_CNT_TX
;
1381 /*****************************************************************************************/
1382 /* ACC_REQUEST_COUNTERS_RX */
1383 /* the accumulated number of write requests served so far for each peripheral. Wrap arou */
1384 /* nd on max value, not read clear. */
1385 /*****************************************************************************************/
1387 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_REQ_CNT_CNT_VALUE ( 0x0 )
1388 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_REQ_CNT_CNT_VALUE_RESET_VALUE ( 0x0 )
1391 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET ( 0x0000003C )
1393 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET )
1394 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (r) )
1395 #define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (v) )
1397 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET )
1398 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (r) )
1399 #define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (v) )
1402 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY
[ ] ;
1404 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ i ], (k), (v) )
1405 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ i ], (k), (r) )
1407 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1410 /* write_requests_counter */
1411 uint32_t req_cnt
: 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1413 __PACKING_ATTRIBUTE_STRUCT_END__
1414 DMA_REGS_DEBUG_REQ_CNT_RX_ACC
;
1417 { uint32_t req_cnt
: 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1419 /* write_requests_counter */
1421 __PACKING_ATTRIBUTE_STRUCT_END__
1422 DMA_REGS_DEBUG_REQ_CNT_RX_ACC
;
1425 /*****************************************************************************************/
1426 /* ACC_REQUEST_COUNTERS_TX */
1427 /* the accumulated number of read requests served so far for each peripheral. Wrap aroun */
1428 /* d on max value, not read clear. */
1429 /*****************************************************************************************/
1431 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_REQ_CNT_CNT_VALUE ( 0x0 )
1432 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_REQ_CNT_CNT_VALUE_RESET_VALUE ( 0x0 )
1435 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET ( 0x00000054 )
1437 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET )
1438 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (r) )
1439 #define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (v) )
1441 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET )
1442 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (r) )
1443 #define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (v) )
1446 extern uint32_t DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY
[ ] ;
1448 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ i ], (k), (v) )
1449 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ i ], (k), (r) )
1451 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1454 /* write_requests_counter */
1455 uint32_t req_cnt
: 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1457 __PACKING_ATTRIBUTE_STRUCT_END__
1458 DMA_REGS_DEBUG_REQ_CNT_TX_ACC
;
1461 { uint32_t req_cnt
: 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1463 /* write_requests_counter */
1465 __PACKING_ATTRIBUTE_STRUCT_END__
1466 DMA_REGS_DEBUG_REQ_CNT_TX_ACC
;
1469 /*****************************************************************************************/
1471 /* the address and cs of the ram the user wishes to read using the indirect access read */
1473 /*****************************************************************************************/
1475 #define DMA_REGS_DEBUG_RDADD_R2_DEFAULT_VALUE ( 0x0 )
1476 #define DMA_REGS_DEBUG_RDADD_R2_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1477 #define DMA_REGS_DEBUG_RDADD_CDCS1_OFF_VALUE ( 0x0 )
1478 #define DMA_REGS_DEBUG_RDADD_CDCS1_OFF_VALUE_RESET_VALUE ( 0x0 )
1479 #define DMA_REGS_DEBUG_RDADD_CDCS1_ON_VALUE ( 0x1 )
1480 #define DMA_REGS_DEBUG_RDADD_DATACS1_OFF_VALUE ( 0x0 )
1481 #define DMA_REGS_DEBUG_RDADD_DATACS1_OFF_VALUE_RESET_VALUE ( 0x0 )
1482 #define DMA_REGS_DEBUG_RDADD_DATACS1_ON_VALUE ( 0x1 )
1483 #define DMA_REGS_DEBUG_RDADD_RDCS_OFF_VALUE ( 0x0 )
1484 #define DMA_REGS_DEBUG_RDADD_RDCS_OFF_VALUE_RESET_VALUE ( 0x0 )
1485 #define DMA_REGS_DEBUG_RDADD_RDCS_ON_VALUE ( 0x1 )
1486 #define DMA_REGS_DEBUG_RDADD_RRCS_OFF_VALUE ( 0x0 )
1487 #define DMA_REGS_DEBUG_RDADD_RRCS_OFF_VALUE_RESET_VALUE ( 0x0 )
1488 #define DMA_REGS_DEBUG_RDADD_RRCS_ON_VALUE ( 0x1 )
1489 #define DMA_REGS_DEBUG_RDADD_CDCS_OFF_VALUE ( 0x0 )
1490 #define DMA_REGS_DEBUG_RDADD_CDCS_OFF_VALUE_RESET_VALUE ( 0x0 )
1491 #define DMA_REGS_DEBUG_RDADD_CDCS_ON_VALUE ( 0x1 )
1492 #define DMA_REGS_DEBUG_RDADD_DATACS_OFF_VALUE ( 0x0 )
1493 #define DMA_REGS_DEBUG_RDADD_DATACS_OFF_VALUE_RESET_VALUE ( 0x0 )
1494 #define DMA_REGS_DEBUG_RDADD_DATACS_ON_VALUE ( 0x1 )
1495 #define DMA_REGS_DEBUG_RDADD_R1_DEFAULT_VALUE ( 0x0 )
1496 #define DMA_REGS_DEBUG_RDADD_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1497 #define DMA_REGS_DEBUG_RDADD_ADDRESS_ADD_VALUE ( 0x0 )
1498 #define DMA_REGS_DEBUG_RDADD_ADDRESS_ADD_VALUE_RESET_VALUE ( 0x0 )
1501 #define DMA_REGS_DEBUG_RDADD_OFFSET ( 0x00000100 )
1503 #define DMA_REGS_0_DEBUG_RDADD_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDADD_OFFSET )
1504 #define DMA_REGS_0_DEBUG_RDADD_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDADD_ADDRESS ), (r) )
1505 #define DMA_REGS_0_DEBUG_RDADD_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDADD_ADDRESS ), (v) )
1507 #define DMA_REGS_1_DEBUG_RDADD_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDADD_OFFSET )
1508 #define DMA_REGS_1_DEBUG_RDADD_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDADD_ADDRESS ), (r) )
1509 #define DMA_REGS_1_DEBUG_RDADD_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDADD_ADDRESS ), (v) )
1512 extern uint32_t DMA_REGS_DEBUG_RDADD_ARRAY
[ ] ;
1514 #define DMA_REGS_DEBUG_RDADD_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDADD_ARRAY [ i ], (v) )
1515 #define DMA_REGS_DEBUG_RDADD_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDADD_ARRAY [ i ], (r) )
1517 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1521 uint32_t r2
: 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1524 uint32_t cdcs1
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1527 uint32_t datacs1
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1530 uint32_t rdcs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1533 uint32_t rrcs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1536 uint32_t cdcs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1539 uint32_t datacs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1542 uint32_t r1
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1545 uint32_t address
: 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1547 __PACKING_ATTRIBUTE_STRUCT_END__
1548 DMA_REGS_DEBUG_RDADD
;
1551 { uint32_t address
: 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1554 uint32_t r1
: 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1557 uint32_t datacs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1560 uint32_t cdcs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1563 uint32_t rrcs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1566 uint32_t rdcs
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1569 uint32_t datacs1
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1572 uint32_t cdcs1
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1575 uint32_t r2
: 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1579 __PACKING_ATTRIBUTE_STRUCT_END__
1580 DMA_REGS_DEBUG_RDADD
;
1583 /*****************************************************************************************/
1584 /* INDIRECT_READ_REQUEST_VALID */
1585 /* After determining the address and cs, the user should assert this bit for indicating */
1586 /* that the address and cs are valid. */
1587 /*****************************************************************************************/
1589 #define DMA_REGS_DEBUG_RDVALID_R1_DEFAULT_VALUE ( 0x0 )
1590 #define DMA_REGS_DEBUG_RDVALID_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1591 #define DMA_REGS_DEBUG_RDVALID_VALID_NOT_VALID_VALUE ( 0x0 )
1592 #define DMA_REGS_DEBUG_RDVALID_VALID_NOT_VALID_VALUE_RESET_VALUE ( 0x0 )
1593 #define DMA_REGS_DEBUG_RDVALID_VALID_VALID_VALUE ( 0x1 )
1596 #define DMA_REGS_DEBUG_RDVALID_OFFSET ( 0x00000104 )
1598 #define DMA_REGS_0_DEBUG_RDVALID_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDVALID_OFFSET )
1599 #define DMA_REGS_0_DEBUG_RDVALID_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDVALID_ADDRESS ), (r) )
1600 #define DMA_REGS_0_DEBUG_RDVALID_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDVALID_ADDRESS ), (v) )
1602 #define DMA_REGS_1_DEBUG_RDVALID_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDVALID_OFFSET )
1603 #define DMA_REGS_1_DEBUG_RDVALID_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDVALID_ADDRESS ), (r) )
1604 #define DMA_REGS_1_DEBUG_RDVALID_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDVALID_ADDRESS ), (v) )
1607 extern uint32_t DMA_REGS_DEBUG_RDVALID_ARRAY
[ ] ;
1609 #define DMA_REGS_DEBUG_RDVALID_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDVALID_ARRAY [ i ], (v) )
1610 #define DMA_REGS_DEBUG_RDVALID_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDVALID_ARRAY [ i ], (r) )
1612 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1616 uint32_t r1
: 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1619 uint32_t valid
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1621 __PACKING_ATTRIBUTE_STRUCT_END__
1622 DMA_REGS_DEBUG_RDVALID
;
1625 { uint32_t valid
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1628 uint32_t r1
: 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1632 __PACKING_ATTRIBUTE_STRUCT_END__
1633 DMA_REGS_DEBUG_RDVALID
;
1636 /*****************************************************************************************/
1637 /* INDIRECT_READ_DATA */
1638 /* The returned read data from the selected RAM. Array of 4 registers (128 bits total). */
1639 /* The width of the different memories is as follows: write data - 128 bits chunk des */
1640 /* criptors - 36 bits read requests - 42 bits read data - 64 bits The the memories */
1641 /* with width smaller than 128, the data will appear in the first registers of the array */
1642 /* , for example: data from the cd RAM will appear in - {reg1[5:0], reg0[31:0]}. */
1643 /*****************************************************************************************/
1645 #define DMA_REGS_DEBUG_RDDATA_DATA_DATA_VALUE ( 0x0 )
1646 #define DMA_REGS_DEBUG_RDDATA_DATA_DATA_VALUE_RESET_VALUE ( 0x0 )
1649 #define DMA_REGS_DEBUG_RDDATA_OFFSET ( 0x00000108 )
1651 #define DMA_REGS_0_DEBUG_RDDATA_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATA_OFFSET )
1652 #define DMA_REGS_0_DEBUG_RDDATA_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_RDDATA_ADDRESS ), (i), (r) )
1653 #define DMA_REGS_0_DEBUG_RDDATA_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_RDDATA_ADDRESS ), (i), (v) )
1655 #define DMA_REGS_1_DEBUG_RDDATA_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATA_OFFSET )
1656 #define DMA_REGS_1_DEBUG_RDDATA_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_RDDATA_ADDRESS ), (i), (r) )
1657 #define DMA_REGS_1_DEBUG_RDDATA_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_RDDATA_ADDRESS ), (i), (v) )
1660 extern uint32_t DMA_REGS_DEBUG_RDDATA_ARRAY
[ ] ;
1662 #define DMA_REGS_DEBUG_RDDATA_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_RDDATA_ARRAY [ i ], (k), (v) )
1663 #define DMA_REGS_DEBUG_RDDATA_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_RDDATA_ARRAY [ i ], (k), (r) )
1665 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1669 uint32_t data
: 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1671 __PACKING_ATTRIBUTE_STRUCT_END__
1672 DMA_REGS_DEBUG_RDDATA
;
1675 { uint32_t data
: 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1679 __PACKING_ATTRIBUTE_STRUCT_END__
1680 DMA_REGS_DEBUG_RDDATA
;
1683 /*****************************************************************************************/
1684 /* READ_DATA_READY */
1685 /* When assertd indicats that the data in the previous array is valid.Willremain asserte */
1686 /* d until the user deasserts the valid bit in regiser RDVALID. */
1687 /*****************************************************************************************/
1689 #define DMA_REGS_DEBUG_RDDATARDY_R1_DEFAULT_VALUE ( 0x0 )
1690 #define DMA_REGS_DEBUG_RDDATARDY_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
1691 #define DMA_REGS_DEBUG_RDDATARDY_READY_NOT_READY_VALUE ( 0x0 )
1692 #define DMA_REGS_DEBUG_RDDATARDY_READY_NOT_READY_VALUE_RESET_VALUE ( 0x0 )
1693 #define DMA_REGS_DEBUG_RDDATARDY_READY_READY_VALUE ( 0x1 )
1696 #define DMA_REGS_DEBUG_RDDATARDY_OFFSET ( 0x00000118 )
1698 #define DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATARDY_OFFSET )
1699 #define DMA_REGS_0_DEBUG_RDDATARDY_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ), (r) )
1700 #define DMA_REGS_0_DEBUG_RDDATARDY_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ), (v) )
1702 #define DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATARDY_OFFSET )
1703 #define DMA_REGS_1_DEBUG_RDDATARDY_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ), (r) )
1704 #define DMA_REGS_1_DEBUG_RDDATARDY_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ), (v) )
1707 extern uint32_t DMA_REGS_DEBUG_RDDATARDY_ARRAY
[ ] ;
1709 #define DMA_REGS_DEBUG_RDDATARDY_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDDATARDY_ARRAY [ i ], (v) )
1710 #define DMA_REGS_DEBUG_RDDATARDY_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDDATARDY_ARRAY [ i ], (r) )
1712 #ifndef _BYTE_ORDER_LITTLE_ENDIAN_
1716 uint32_t r1
: 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1719 uint32_t ready
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1721 __PACKING_ATTRIBUTE_STRUCT_END__
1722 DMA_REGS_DEBUG_RDDATARDY
;
1725 { uint32_t ready
: 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1728 uint32_t r1
: 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1732 __PACKING_ATTRIBUTE_STRUCT_END__
1733 DMA_REGS_DEBUG_RDDATARDY
;
1736 /*****************************************************************************************/
1737 /* The registers in this section allow configuration of the following: 1. memory alloca */
1738 /* tions 2. priority and weight for arbitration 3. urgent thresholds 4. route address */
1739 /* es Most of the registers control the configuration of a single peripheral. They ar */
1740 /* e arranged in arrays according to their configuration topic. The order of peripher */
1741 /* als within each array is: Ethernet 0 Ethernet 1 Ethernet 2 Ethernet 3 Ethernet 4 */
1743 /*****************************************************************************************/
1745 /*****************************************************************************************/
1746 /* Registers array numbers */
1747 /*****************************************************************************************/
1748 #define DMA_REGS_CONFIG_MALLOC_NUMBER ( 6 )
1749 #define DMA_REGS_CONFIG_READ_BASE_NUMBER ( 6 )
1750 #define DMA_REGS_CONFIG_U_THRESH_NUMBER ( 6 )
1751 #define DMA_REGS_CONFIG_PRI_NUMBER ( 6 )
1752 #define DMA_REGS_CONFIG_WEIGHT_NUMBER ( 6 )
1753 #define DMA_REGS_CONFIG_BB_ROUTE_NUMBER ( 6 )
1757 DMA_REGS_CONFIG_SOURCE source __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1759 /* MEMORY_ALLOCATION */
1760 DMA_REGS_CONFIG_MALLOC malloc
[ DMA_REGS_CONFIG_MALLOC_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1762 /* READ_REQ_BASE_ADDRESS */
1763 DMA_REGS_CONFIG_READ_BASE read_base
[ DMA_REGS_CONFIG_READ_BASE_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1765 /* URGENT_THRESHOLDS */
1766 DMA_REGS_CONFIG_U_THRESH u_thresh
[ DMA_REGS_CONFIG_U_THRESH_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1768 /* STRICT_PRIORITY */
1769 DMA_REGS_CONFIG_PRI pri
[ DMA_REGS_CONFIG_PRI_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1771 /* WEIGHT_OF_ROUND_ROBIN */
1772 DMA_REGS_CONFIG_WEIGHT weight
[ DMA_REGS_CONFIG_WEIGHT_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1774 /* BB_ROUTE_DMA_PERIPH */
1775 DMA_REGS_CONFIG_BB_ROUTE bb_route
[ DMA_REGS_CONFIG_BB_ROUTE_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1777 /* POINTERS_RESET */
1778 DMA_REGS_CONFIG_PTRRST ptrrst __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1781 DMA_REGS_CONFIG_MEM_SEL mem_sel __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1783 __PACKING_ATTRIBUTE_STRUCT_END__
1786 /*****************************************************************************************/
1787 /* request counters per peripheral */
1788 /*****************************************************************************************/
1790 /*****************************************************************************************/
1791 /* Registers array numbers */
1792 /*****************************************************************************************/
1793 #define DMA_REGS_DEBUG_REQ_CNT_RX_NUMBER ( 6 )
1794 #define DMA_REGS_DEBUG_REQ_CNT_TX_NUMBER ( 6 )
1795 #define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_NUMBER ( 6 )
1796 #define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_NUMBER ( 6 )
1797 #define DMA_REGS_DEBUG_RDDATA_NUMBER ( 4 )
1800 /* NOT_EMPTY_VECTOR */
1801 DMA_REGS_DEBUG_NEMPTY nempty __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1804 DMA_REGS_DEBUG_URGNT urgnt __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1806 /* SELECTED_SOURCE_NUM */
1807 DMA_REGS_DEBUG_SELSRC selsrc __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1809 /* REQUEST_COUNTERS_RX */
1810 DMA_REGS_DEBUG_REQ_CNT_RX req_cnt_rx
[ DMA_REGS_DEBUG_REQ_CNT_RX_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1812 /* REQUEST_COUNTERS_TX */
1813 DMA_REGS_DEBUG_REQ_CNT_TX req_cnt_tx
[ DMA_REGS_DEBUG_REQ_CNT_TX_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1815 /* ACC_REQUEST_COUNTERS_RX */
1816 DMA_REGS_DEBUG_REQ_CNT_RX_ACC req_cnt_rx_acc
[ DMA_REGS_DEBUG_REQ_CNT_RX_ACC_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1818 /* ACC_REQUEST_COUNTERS_TX */
1819 DMA_REGS_DEBUG_REQ_CNT_TX_ACC req_cnt_tx_acc
[ DMA_REGS_DEBUG_REQ_CNT_TX_ACC_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1822 uint8_t reserved1
[ 148 ] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1825 DMA_REGS_DEBUG_RDADD rdadd __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1827 /* INDIRECT_READ_REQUEST_VALID */
1828 DMA_REGS_DEBUG_RDVALID rdvalid __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1830 /* INDIRECT_READ_DATA */
1831 DMA_REGS_DEBUG_RDDATA rddata
[ DMA_REGS_DEBUG_RDDATA_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1833 /* READ_DATA_READY */
1834 DMA_REGS_DEBUG_RDDATARDY rddatardy __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1836 __PACKING_ATTRIBUTE_STRUCT_END__
1841 /* config function */
1842 DMA_REGS_CONFIG config __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1845 uint8_t reserved0
[ 100 ] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1847 /* debug function */
1848 DMA_REGS_DEBUG debug __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1850 __PACKING_ATTRIBUTE_STRUCT_END__
1853 #define DMA_REGS_NUMBER ( 2 )
1857 DMA_REGS regs
[ DMA_REGS_NUMBER
] __PACKING_ATTRIBUTE_FIELD_LEVEL__
;
1859 __PACKING_ATTRIBUTE_STRUCT_END__
1861 #endif /* DMA_H_INCLUDED */