--- /dev/null
+// SPDX-License-Identifier: GPL-2.0+
+/*
+ * Copyright (c) 2013 Broadcom
+ */
+/*
+
+*/
+
+#ifndef __DMA_H_INCLUDED
+#define __DMA_H_INCLUDED
+
+/* File automatically generated by Reggae at 15/08/2013 10:54:36 */
+
+#include "access_macros.h"
+#include "packing.h"
+#include "rdp_map.h"
+
+/*****************************************************************************************/
+/* The Direct Memory Access (DMA) module serves peripheral (EMACs and GPON) requests for */
+/* writes and reads from DDR and packet SRAM. DMA connects the peripherals to DDR. S */
+/* DMA connects the same peripherals to packet SRAM. */
+/*****************************************************************************************/
+
+/*****************************************************************************************/
+/* Blocks offsets */
+/*****************************************************************************************/
+/*****************************************************************************************/
+/* Functions offsets and addresses */
+/*****************************************************************************************/
+#define DMA_REGS_0_CONFIG_OFFSET ( 0x00000000 )
+#define DMA_REGS_0_CONFIG_ADDRESS ( DMA_REGS_0_OFFSET + DMA_REGS_0_CONFIG_OFFSET )
+
+#define DMA_REGS_0_DEBUG_OFFSET ( 0x00000100 )
+#define DMA_REGS_0_DEBUG_ADDRESS ( DMA_REGS_0_OFFSET + DMA_REGS_0_DEBUG_OFFSET )
+
+#define DMA_REGS_1_CONFIG_OFFSET ( 0x00000000 )
+#define DMA_REGS_1_CONFIG_ADDRESS ( DMA_REGS_1_OFFSET + DMA_REGS_1_CONFIG_OFFSET )
+
+#define DMA_REGS_1_DEBUG_OFFSET ( 0x00000100 )
+#define DMA_REGS_1_DEBUG_ADDRESS ( DMA_REGS_1_OFFSET + DMA_REGS_1_DEBUG_OFFSET )
+
+/* 'd' is module index */
+/* 'i' is block index */
+/* 'j' is function index */
+/* 'e' is function entry */
+/* 'k' is register index */
+
+/*****************************************************************************************/
+/* BB_SOURCE */
+/* Broadbus source address of DMA and SDMA */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_SOURCE_R1_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_SOURCE_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_DMA_VALUE ( 0x16 )
+#define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_DMA_VALUE_RESET_VALUE ( 0x16 )
+#define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_SDMA_VALUE ( 0x18 )
+#define DMA_REGS_CONFIG_SOURCE_SOURCE_DEFAULT_SDMA_VALUE_RESET_VALUE ( 0x18 )
+
+
+#define DMA_REGS_CONFIG_SOURCE_OFFSET ( 0x00000000 )
+
+#define DMA_REGS_0_CONFIG_SOURCE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_SOURCE_OFFSET )
+#define DMA_REGS_0_CONFIG_SOURCE_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_SOURCE_ADDRESS ), (r) )
+#define DMA_REGS_0_CONFIG_SOURCE_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_SOURCE_ADDRESS ), (v) )
+
+#define DMA_REGS_1_CONFIG_SOURCE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_SOURCE_OFFSET )
+#define DMA_REGS_1_CONFIG_SOURCE_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_SOURCE_ADDRESS ), (r) )
+#define DMA_REGS_1_CONFIG_SOURCE_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_SOURCE_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_SOURCE_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_SOURCE_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_SOURCE_ARRAY [ i ], (v) )
+#define DMA_REGS_CONFIG_SOURCE_READ( i, r ) READ_32( DMA_REGS_CONFIG_SOURCE_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* bb_source */
+ uint32_t source : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_SOURCE ;
+#else
+typedef struct
+{ uint32_t source : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* bb_source */
+ uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_SOURCE ;
+#endif
+
+/*****************************************************************************************/
+/* MEMORY_ALLOCATION */
+/* This array of registers defines the memory allocation for the peripherals, for upstre */
+/* am. The allocation is of number of 128byte buffers out of the total 32 buffers for s */
+/* dma or 96 buffers in dma in the upload data RAM. For the DMA, the buffers are divid */
+/* ed between 2 physical RAMs 964 in the first, 32 in the second). The decision which cl */
+/* ients FIFO is located in which memory is done by the register in address 0x98. The a */
+/* llocation is done by defining a base address (aligned to 128 bytes) and the number of */
+/* allocated buffers. Note that the memory allocation should not contain wrap around. */
+/* For example, if three buffers are needed, do not allocate buffers 30, 31 and 0. The */
+/* number of allocated CDs is the same of data buffers - one chunk descriptor per buffe */
+/* r, therefore allocation in CD RAM is defined only by offset address. The order of */
+/* peripherals within the array is: Ethernet 0 Ethernet 1 Ethernet 2 Ethernet 3 Eth */
+/* ernet 4 GPON/EPON */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_MALLOC_R3_R3_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_R3_R3_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC0_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC0_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC2_VALUE ( 0xA )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0xA )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC3_VALUE ( 0xF )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0xF )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC2_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC4_VALUE ( 0x14 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x14 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_GPON_VALUE ( 0x19 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x19 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC3_VALUE ( 0x1B )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x1B )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC4_VALUE ( 0x24 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x24 )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_GPON_VALUE ( 0x2D )
+#define DMA_REGS_CONFIG_MALLOC_CDOFFSET_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x2D )
+#define DMA_REGS_CONFIG_MALLOC_R2_R2_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_R2_R2_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MIN_DMA_SDMA_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC0_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC2_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC3_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC4_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_GPON_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC0_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC2_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC3_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC4_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_GPON_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MAX_SDMA_VALUE ( 0x20 )
+#define DMA_REGS_CONFIG_MALLOC_NUMOFBUFF_MAX_DMA_VALUE ( 0x3F )
+#define DMA_REGS_CONFIG_MALLOC_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC0_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC0_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC1_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC1_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC2_VALUE ( 0xA )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0xA )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC3_VALUE ( 0xF )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0xF )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC2_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC4_VALUE ( 0x14 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x14 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_GPON_VALUE ( 0x19 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x19 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC3_VALUE ( 0x1B )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x1B )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC4_VALUE ( 0x24 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x24 )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_GPON_VALUE ( 0x2D )
+#define DMA_REGS_CONFIG_MALLOC_DATATOFFSET_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0x2D )
+
+
+#define DMA_REGS_CONFIG_MALLOC_OFFSET ( 0x00000004 )
+
+#define DMA_REGS_0_CONFIG_MALLOC_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_MALLOC_OFFSET )
+#define DMA_REGS_0_CONFIG_MALLOC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_MALLOC_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_CONFIG_MALLOC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_MALLOC_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_CONFIG_MALLOC_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_MALLOC_OFFSET )
+#define DMA_REGS_1_CONFIG_MALLOC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_MALLOC_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_CONFIG_MALLOC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_MALLOC_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_MALLOC_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_MALLOC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_MALLOC_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_CONFIG_MALLOC_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_MALLOC_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved3 */
+ uint32_t r3 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* CD_memory_offset_address */
+ uint32_t cdoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved2 */
+ uint32_t r2 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* number_of_buffers */
+ uint32_t numofbuff : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+ uint32_t r1 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* data_memory_offset_address */
+ uint32_t datatoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_MALLOC ;
+#else
+typedef struct
+{ uint32_t datatoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* data_memory_offset_address */
+ uint32_t r1 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+ uint32_t numofbuff : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* number_of_buffers */
+ uint32_t r2 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved2 */
+ uint32_t cdoffset : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* CD_memory_offset_address */
+ uint32_t r3 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved3 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_MALLOC ;
+#endif
+
+/*****************************************************************************************/
+/* READ_REQ_BASE_ADDRESS */
+/* This array of registers controls the base address of each peripheral within the read */
+/* requests RAM. Each peripheral gets memory enough for storing up to 8 read requests */
+/* (total of 48 requests in the RAM), starting from a configurable base address. The ba */
+/* se address is aligned to 8 therefore the only valid values are: 0, 8, 16, 24, 32, 4 */
+/* 0. */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_READ_BASE_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_READ_BASE_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_MIN_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC0_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC1_VALUE ( 0x8 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x8 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC2_VALUE ( 0x10 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0x10 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC3_VALUE ( 0x18 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x18 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC4_VALUE ( 0x20 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x20 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_GPON_VALUE ( 0x28 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x28 )
+#define DMA_REGS_CONFIG_READ_BASE_BASE_MAX_VALUE ( 0x28 )
+
+
+#define DMA_REGS_CONFIG_READ_BASE_OFFSET ( 0x0000001C )
+
+#define DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_READ_BASE_OFFSET )
+#define DMA_REGS_0_CONFIG_READ_BASE_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_CONFIG_READ_BASE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_READ_BASE_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_READ_BASE_OFFSET )
+#define DMA_REGS_1_CONFIG_READ_BASE_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_CONFIG_READ_BASE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_READ_BASE_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_READ_BASE_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_READ_BASE_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_READ_BASE_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_CONFIG_READ_BASE_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_READ_BASE_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved1 */
+ uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* base_address */
+ uint32_t base : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_READ_BASE ;
+#else
+typedef struct
+{ uint32_t base : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* base_address */
+ uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_READ_BASE ;
+#endif
+
+/*****************************************************************************************/
+/* URGENT_THRESHOLDS */
+/* the in/out of urgent thresholds mark the number of write requests in the queue in whi */
+/* ch the peripherals priority is changed. The two thresholds should create hysteresis. */
+/* The moving into urgent threshold must always be greater than the moving out of urgen */
+/* t threshold. */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_U_THRESH_R2_R2_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_U_THRESH_R2_R2_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_MIN_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC0_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC1_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC2_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC3_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC4_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_GPON_VALUE ( 0x4 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x4 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC0_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC1_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC2_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC3_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC4_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_GPON_VALUE ( 0xC )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0xC )
+#define DMA_REGS_CONFIG_U_THRESH_OUT_OF_U_MAX_VALUE ( 0x1F )
+#define DMA_REGS_CONFIG_U_THRESH_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_U_THRESH_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_MIN_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC0_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC0_VALUE_RESET_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC1_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC1_VALUE_RESET_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC2_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC2_VALUE_RESET_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC3_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC3_VALUE_RESET_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC4_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_EMAC4_VALUE_RESET_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_GPON_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_SDMA_GPON_VALUE_RESET_VALUE ( 0x5 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC0_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC0_VALUE_RESET_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC1_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC1_VALUE_RESET_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC2_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC2_VALUE_RESET_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC3_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC3_VALUE_RESET_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC4_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_EMAC4_VALUE_RESET_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_GPON_VALUE ( 0xE )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_DEFAULT_DMA_GPON_VALUE_RESET_VALUE ( 0xE )
+#define DMA_REGS_CONFIG_U_THRESH_INTO_U_MAX_VALUE ( 0x1F )
+
+
+#define DMA_REGS_CONFIG_U_THRESH_OFFSET ( 0x00000034 )
+
+#define DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_U_THRESH_OFFSET )
+#define DMA_REGS_0_CONFIG_U_THRESH_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_CONFIG_U_THRESH_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_U_THRESH_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_U_THRESH_OFFSET )
+#define DMA_REGS_1_CONFIG_U_THRESH_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_CONFIG_U_THRESH_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_U_THRESH_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_U_THRESH_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_U_THRESH_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_U_THRESH_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_CONFIG_U_THRESH_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_U_THRESH_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved2 */
+ uint32_t r2 : 18 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* out_of_urgent_threshold */
+ uint32_t out_of_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+ uint32_t r1 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* into_urgent_threshold */
+ uint32_t into_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_U_THRESH ;
+#else
+typedef struct
+{ uint32_t into_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* into_urgent_threshold */
+ uint32_t r1 : 2 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+ uint32_t out_of_u : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* out_of_urgent_threshold */
+ uint32_t r2 : 18 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved2 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_U_THRESH ;
+#endif
+
+/*****************************************************************************************/
+/* STRICT_PRIORITY */
+/* The arbitration between the requests of the different peripherals is done in two stag */
+/* es: 1. Strict priority - chooses the peripherals with the highest priority among all */
+/* perpherals who have a request pending. 2. Weighted Round-Robin between all peripher */
+/* als with the same priority. This array of registers allow configuration of the pri */
+/* ority of each peripheral (both rx and tx) in the following manner: There are 8 level */
+/* s of priorities, when each bit in the register represents a different level of priori */
+/* ty. One should assert the relevant bit according to the desired priority - For the */
+/* lowest - 00000001 For the highest - 10000000 */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_PRI_R1_R2_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PRI_R1_R2_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PRI_TXPRI_LOW_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PRI_TXPRI_DEFAULT_TX_VALUE ( 0x80 )
+#define DMA_REGS_CONFIG_PRI_TXPRI_DEFAULT_TX_VALUE_RESET_VALUE ( 0x80 )
+#define DMA_REGS_CONFIG_PRI_TXPRI_HIGH_VALUE ( 0x80 )
+#define DMA_REGS_CONFIG_PRI_RXPRI_LOW_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PRI_RXPRI_DEFAULT_RX_VALUE ( 0x80 )
+#define DMA_REGS_CONFIG_PRI_RXPRI_DEFAULT_RX_VALUE_RESET_VALUE ( 0x80 )
+#define DMA_REGS_CONFIG_PRI_RXPRI_HIGH_VALUE ( 0x80 )
+
+
+#define DMA_REGS_CONFIG_PRI_OFFSET ( 0x0000004C )
+
+#define DMA_REGS_0_CONFIG_PRI_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_PRI_OFFSET )
+#define DMA_REGS_0_CONFIG_PRI_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_PRI_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_CONFIG_PRI_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_PRI_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_CONFIG_PRI_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_PRI_OFFSET )
+#define DMA_REGS_1_CONFIG_PRI_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_PRI_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_CONFIG_PRI_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_PRI_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_PRI_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_PRI_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_PRI_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_CONFIG_PRI_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_PRI_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved2 */
+ uint32_t r1 : 16 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* priority_of_tx_side */
+ uint32_t txpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* priority_of_rx_side */
+ uint32_t rxpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_PRI ;
+#else
+typedef struct
+{ uint32_t rxpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* priority_of_rx_side */
+ uint32_t txpri : 8 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* priority_of_tx_side */
+ uint32_t r1 : 16 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved2 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_PRI ;
+#endif
+
+/*****************************************************************************************/
+/* WEIGHT_OF_ROUND_ROBIN */
+/* The second phase of the arbitration between requests is weighted round robin between */
+/* requests of peripherals with the same priority. This array of registers allow config */
+/* urtion of the weight of each peripheral (rx and tx). The actual weight will be weight */
+/* + 1, meaning configuration of 0 is actual weight of 1. */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_WEIGHT_R2_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_R2_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MIN_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MIN_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_SDMA_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_SDMA_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_EMAC_DMA_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_EMAC_DMA_VALUE_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_GPON_DMA_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_DEFAULT_GPON_DMA_VALUE_RESET_VALUE ( 0x3 )
+#define DMA_REGS_CONFIG_WEIGHT_TXWEIGHT_MAX_VALUE ( 0x7 )
+#define DMA_REGS_CONFIG_WEIGHT_R1_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_DMA_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_DMA_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MIN_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MIN_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_SDMA_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_EMACS_SDMA_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_DMA_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_DMA_VALUE_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_SDMA_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_DEFAULT_GPON_SDMA_VALUE_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_WEIGHT_RXWEIGHT_MAX_VALUE ( 0x7 )
+
+
+#define DMA_REGS_CONFIG_WEIGHT_OFFSET ( 0x00000064 )
+
+#define DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_WEIGHT_OFFSET )
+#define DMA_REGS_0_CONFIG_WEIGHT_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_CONFIG_WEIGHT_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_WEIGHT_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_WEIGHT_OFFSET )
+#define DMA_REGS_1_CONFIG_WEIGHT_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_CONFIG_WEIGHT_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_WEIGHT_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_WEIGHT_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_WEIGHT_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_WEIGHT_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_CONFIG_WEIGHT_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_WEIGHT_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r2 : 21 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* weight_of_tx_side */
+ uint32_t txweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+ uint32_t r1 : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* weight_of_rx_side */
+ uint32_t rxweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_WEIGHT ;
+#else
+typedef struct
+{ uint32_t rxweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* weight_of_rx_side */
+ uint32_t r1 : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+ uint32_t txweight : 3 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* weight_of_tx_side */
+ uint32_t r2 : 21 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_WEIGHT ;
+#endif
+
+/*****************************************************************************************/
+/* BB_ROUTE_DMA_PERIPH */
+/* Broadbus route address from the DMA to the peripherals. Register per peripheral (rx a */
+/* nd tx). The route address is same for DMA and SDMA because of the symmetry of the BB */
+/* tree. */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_BB_ROUTE_R2_R2_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_BB_ROUTE_R2_R2_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_GPON_VALUE ( 0x11 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x11 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC3_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x12 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC1_VALUE ( 0x16 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x16 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC4_VALUE ( 0x19 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x19 )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC2_VALUE ( 0x1A )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0x1A )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC0_VALUE ( 0x1E )
+#define DMA_REGS_CONFIG_BB_ROUTE_TXROUTE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0x1E )
+#define DMA_REGS_CONFIG_BB_ROUTE_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_BB_ROUTE_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_GPON_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_GPON_VALUE_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC3_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC3_VALUE_RESET_VALUE ( 0x2 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC1_VALUE ( 0x6 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC1_VALUE_RESET_VALUE ( 0x6 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC4_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC4_VALUE_RESET_VALUE ( 0x9 )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC2_VALUE ( 0xA )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC2_VALUE_RESET_VALUE ( 0xA )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC0_VALUE ( 0xE )
+#define DMA_REGS_CONFIG_BB_ROUTE_RXROUTE_DEFAULT_EMAC0_VALUE_RESET_VALUE ( 0xE )
+
+
+#define DMA_REGS_CONFIG_BB_ROUTE_OFFSET ( 0x0000007C )
+
+#define DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_BB_ROUTE_OFFSET )
+#define DMA_REGS_0_CONFIG_BB_ROUTE_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_CONFIG_BB_ROUTE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_CONFIG_BB_ROUTE_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_BB_ROUTE_OFFSET )
+#define DMA_REGS_1_CONFIG_BB_ROUTE_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_CONFIG_BB_ROUTE_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_CONFIG_BB_ROUTE_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_BB_ROUTE_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_CONFIG_BB_ROUTE_READ( i, k, r ) READ_I_32( DMA_REGS_CONFIG_BB_ROUTE_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved2 */
+ uint32_t r2 : 17 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* bb_route_to_tx_side */
+ uint32_t txroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+ uint32_t r1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* bb_route_to_rx_side */
+ uint32_t rxroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_BB_ROUTE ;
+#else
+typedef struct
+{ uint32_t rxroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* bb_route_to_rx_side */
+ uint32_t r1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+ uint32_t txroute : 7 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* bb_route_to_tx_side */
+ uint32_t r2 : 17 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved2 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_BB_ROUTE ;
+#endif
+
+/*****************************************************************************************/
+/* POINTERS_RESET */
+/* Resets the pointers of the peripherals FIFOs within the DMA. Bit per peripheral side */
+/* (rx and tx). For rx side resets the data and CD FIFOs. For tx side resets the read */
+/* requests FIFO. */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_PTRRST_R1_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_GPONTX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_GPONTX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_GPONTX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_GPONRX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_GPONRX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_GPONRX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH4TX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH4TX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH4TX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH4RX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH4RX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH4RX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH3TX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH3TX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH3TX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH3RX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH3RX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH3RX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH2TX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH2TX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH2TX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH2RX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH2RX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH2RX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH1TX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH1TX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH1TX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH1RX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH1RX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH1RX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH0TX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH0TX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH0TX_RESET_VALUE ( 0x1 )
+#define DMA_REGS_CONFIG_PTRRST_ETH0RX_OFF_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH0RX_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_PTRRST_ETH0RX_RESET_VALUE ( 0x1 )
+
+
+#define DMA_REGS_CONFIG_PTRRST_OFFSET ( 0x00000094 )
+
+#define DMA_REGS_0_CONFIG_PTRRST_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_PTRRST_OFFSET )
+#define DMA_REGS_0_CONFIG_PTRRST_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_PTRRST_ADDRESS ), (r) )
+#define DMA_REGS_0_CONFIG_PTRRST_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_PTRRST_ADDRESS ), (v) )
+
+#define DMA_REGS_1_CONFIG_PTRRST_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_PTRRST_OFFSET )
+#define DMA_REGS_1_CONFIG_PTRRST_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_PTRRST_ADDRESS ), (r) )
+#define DMA_REGS_1_CONFIG_PTRRST_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_PTRRST_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_PTRRST_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_PTRRST_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_PTRRST_ARRAY [ i ], (v) )
+#define DMA_REGS_CONFIG_PTRRST_READ( i, r ) READ_32( DMA_REGS_CONFIG_PTRRST_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* gpon_tx_reset */
+ uint32_t gpontx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* gpon_rx_reset */
+ uint32_t gponrx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_4_tx_reset */
+ uint32_t eth4tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_4_rx_reset */
+ uint32_t eth4rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_3_tx_reset */
+ uint32_t eth3tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_3_rx_reset */
+ uint32_t eth3rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_2_tx_reset */
+ uint32_t eth2tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_2_rx_reset */
+ uint32_t eth2rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_1_tx_reset */
+ uint32_t eth1tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_1_rx_reset */
+ uint32_t eth1rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_0_tx_reset */
+ uint32_t eth0tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_0_rx_reset */
+ uint32_t eth0rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_PTRRST ;
+#else
+typedef struct
+{ uint32_t eth0rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_0_rx_reset */
+ uint32_t eth0tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_0_tx_reset */
+ uint32_t eth1rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_1_rx_reset */
+ uint32_t eth1tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_1_tx_reset */
+ uint32_t eth2rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_2_rx_reset */
+ uint32_t eth2tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_2_tx_reset */
+ uint32_t eth3rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_3_rx_reset */
+ uint32_t eth3tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_3_tx_reset */
+ uint32_t eth4rx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_4_rx_reset */
+ uint32_t eth4tx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ethernet_4_tx_reset */
+ uint32_t gponrx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* gpon_rx_reset */
+ uint32_t gpontx : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* gpon_tx_reset */
+ uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_PTRRST ;
+#endif
+
+/*****************************************************************************************/
+/* MEM_SEL */
+/* For DMA, there are 2 data memories for write data (upstream), Each client has a confi */
+/* gurable number of 128 bytes buffers in one of the memories (see MEMORY_ALLOCATION reg */
+/* ister). The first memory has total of 64 byffers, while the second has 32 buffers. */
+/* This register configures in which one of the memories the clients buffers are located */
+/* (1 bit per client, 0 first memory, 1 second memory). The CD buffers will also be lo */
+/* cated accordingly. */
+/*****************************************************************************************/
+
+#define DMA_REGS_CONFIG_MEM_SEL_R1_RESERVED_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MEM_SEL_R1_RESERVED_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_FIRST_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_FIRST_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_CONFIG_MEM_SEL_MEM_SEL_SECOND_VALUE ( 0x1 )
+
+
+#define DMA_REGS_CONFIG_MEM_SEL_OFFSET ( 0x00000098 )
+
+#define DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ( DMA_REGS_0_CONFIG_ADDRESS + DMA_REGS_CONFIG_MEM_SEL_OFFSET )
+#define DMA_REGS_0_CONFIG_MEM_SEL_READ( r ) READ_32( ( DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ), (r) )
+#define DMA_REGS_0_CONFIG_MEM_SEL_WRITE( v ) WRITE_32( ( DMA_REGS_0_CONFIG_MEM_SEL_ADDRESS ), (v) )
+
+#define DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ( DMA_REGS_1_CONFIG_ADDRESS + DMA_REGS_CONFIG_MEM_SEL_OFFSET )
+#define DMA_REGS_1_CONFIG_MEM_SEL_READ( r ) READ_32( ( DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ), (r) )
+#define DMA_REGS_1_CONFIG_MEM_SEL_WRITE( v ) WRITE_32( ( DMA_REGS_1_CONFIG_MEM_SEL_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_CONFIG_MEM_SEL_ARRAY [ ] ;
+
+#define DMA_REGS_CONFIG_MEM_SEL_WRITE( i, v ) WRITE_32( DMA_REGS_CONFIG_MEM_SEL_ARRAY [ i ], (v) )
+#define DMA_REGS_CONFIG_MEM_SEL_READ( i, r ) READ_32( DMA_REGS_CONFIG_MEM_SEL_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* mem_sel */
+ uint32_t mem_sel : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_MEM_SEL ;
+#else
+typedef struct
+{ uint32_t mem_sel : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* mem_sel */
+ uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG_MEM_SEL ;
+#endif
+
+/*****************************************************************************************/
+/* NOT_EMPTY_VECTOR */
+/* Each peripheral, according to its source address, is represented in a bit on the not */
+/* empty vector. If the bit is asserted, the requests queue of the relevant peripheral */
+/* is not empty. The not empty vector is used by the DMA scheduler to determine which p */
+/* eripheral is the next to be served. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_NEMPTY_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_GPONTXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH4TXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH3TXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH2TXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH1TXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH0TXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_GPONRXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH4RXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH3RXNE_NOR_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH2RXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH1RXNE_NOT_EMPTY_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_EMPTY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_EMPTY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_NEMPTY_ETH0RXNE_NOT_EMPTY_VALUE ( 0x1 )
+
+
+#define DMA_REGS_DEBUG_NEMPTY_OFFSET ( 0x00000000 )
+
+#define DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_NEMPTY_OFFSET )
+#define DMA_REGS_0_DEBUG_NEMPTY_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ), (r) )
+#define DMA_REGS_0_DEBUG_NEMPTY_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_NEMPTY_ADDRESS ), (v) )
+
+#define DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_NEMPTY_OFFSET )
+#define DMA_REGS_1_DEBUG_NEMPTY_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ), (r) )
+#define DMA_REGS_1_DEBUG_NEMPTY_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_NEMPTY_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_NEMPTY_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_NEMPTY_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_NEMPTY_ARRAY [ i ], (v) )
+#define DMA_REGS_DEBUG_NEMPTY_READ( i, r ) READ_32( DMA_REGS_DEBUG_NEMPTY_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved1 */
+ uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_TX_not_empty_indications */
+ uint32_t gpontxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_TX_not_empty_indications */
+ uint32_t eth4txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_TX_not_empty_indications */
+ uint32_t eth3txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_TX_not_empty_indications */
+ uint32_t eth2txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_TX_not_empty_indications */
+ uint32_t eth1txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_TX_not_empty_indications */
+ uint32_t eth0txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_RX_not_empty_indications */
+ uint32_t gponrxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_RX_not_empty_indications */
+ uint32_t eth4rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_RX_not_empty_indications */
+ uint32_t eth3rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_RX_not_empty_indications */
+ uint32_t eth2rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_RX_not_empty_indications */
+ uint32_t eth1rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_RX_not_empty_indications */
+ uint32_t eth0rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_NEMPTY ;
+#else
+typedef struct
+{ uint32_t eth0rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_RX_not_empty_indications */
+ uint32_t eth1rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_RX_not_empty_indications */
+ uint32_t eth2rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_RX_not_empty_indications */
+ uint32_t eth3rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_RX_not_empty_indications */
+ uint32_t eth4rxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_RX_not_empty_indications */
+ uint32_t gponrxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_RX_not_empty_indications */
+ uint32_t eth0txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_TX_not_empty_indications */
+ uint32_t eth1txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_TX_not_empty_indications */
+ uint32_t eth2txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_TX_not_empty_indications */
+ uint32_t eth3txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_TX_not_empty_indications */
+ uint32_t eth4txne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_TX_not_empty_indications */
+ uint32_t gpontxne : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_TX_not_empty_indications */
+ uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_NEMPTY ;
+#endif
+
+/*****************************************************************************************/
+/* URGENT_VECTOR */
+/* Each peripheral, according to its source address, is represented in a bit on the urge */
+/* nt vector. If the bit is asserted, the requests queue of the relevant peripheral is */
+/* in urgent state. The urgent vector is used by the DMA scheduler to determine which p */
+/* eripheral is the next to be served. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_URGNT_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_GPONTXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_GPONTXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_GPONTXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH4TXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH4TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH4TXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH3TXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH3TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH3TXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH2TXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH2TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH2TXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH1TXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH1TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH1TXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH0TXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH0TXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH0TXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_GPONRXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_GPONRXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_GPONRXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH4RXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH4RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH4RXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH3RXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH3RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH3RXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH2RXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH2RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH2RXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH1RXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH1RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH1RXU_URGENT_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_URGNT_ETH0RXU_NOT_URGENT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH0RXU_NOT_URGENT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_URGNT_ETH0RXU_URGENT_VALUE ( 0x1 )
+
+
+#define DMA_REGS_DEBUG_URGNT_OFFSET ( 0x00000004 )
+
+#define DMA_REGS_0_DEBUG_URGNT_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_URGNT_OFFSET )
+#define DMA_REGS_0_DEBUG_URGNT_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_URGNT_ADDRESS ), (r) )
+#define DMA_REGS_0_DEBUG_URGNT_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_URGNT_ADDRESS ), (v) )
+
+#define DMA_REGS_1_DEBUG_URGNT_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_URGNT_OFFSET )
+#define DMA_REGS_1_DEBUG_URGNT_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_URGNT_ADDRESS ), (r) )
+#define DMA_REGS_1_DEBUG_URGNT_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_URGNT_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_URGNT_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_URGNT_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_URGNT_ARRAY [ i ], (v) )
+#define DMA_REGS_DEBUG_URGNT_READ( i, r ) READ_32( DMA_REGS_DEBUG_URGNT_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved1 */
+ uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_TX_urgent_indication */
+ uint32_t gpontxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_TX_urgent_indication */
+ uint32_t eth4txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_TX_urgent_indication */
+ uint32_t eth3txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_TX_urgent_indication */
+ uint32_t eth2txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_TX_urgent_indication */
+ uint32_t eth1txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_TX_urgent_indication */
+ uint32_t eth0txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_RX_urgent_indication */
+ uint32_t gponrxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_RX_urgent_indication */
+ uint32_t eth4rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_RX_urgent_indication */
+ uint32_t eth3rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_RX_urgent_indication */
+ uint32_t eth2rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_RX_urgent_indication */
+ uint32_t eth1rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_RX_urgent_indication */
+ uint32_t eth0rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_URGNT ;
+#else
+typedef struct
+{ uint32_t eth0rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_RX_urgent_indication */
+ uint32_t eth1rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_RX_urgent_indication */
+ uint32_t eth2rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_RX_urgent_indication */
+ uint32_t eth3rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_RX_urgent_indication */
+ uint32_t eth4rxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_RX_urgent_indication */
+ uint32_t gponrxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_RX_urgent_indication */
+ uint32_t eth0txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet0_TX_urgent_indication */
+ uint32_t eth1txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet1_TX_urgent_indication */
+ uint32_t eth2txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet2_TX_urgent_indication */
+ uint32_t eth3txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet3_TX_urgent_indication */
+ uint32_t eth4txu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Ethernet4_TX_urgent_indication */
+ uint32_t gpontxu : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* GPON_TX_urgent_indication */
+ uint32_t r1 : 20 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_URGNT ;
+#endif
+
+/*****************************************************************************************/
+/* SELECTED_SOURCE_NUM */
+/* The decision of the dma schedule rand the next peripheral to be served, represented b */
+/* y its source address */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_SELSRC_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_SELSRC_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_RX_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_RX_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH1_RX_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH2_RX_VALUE ( 0x2 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH3_RX_VALUE ( 0x3 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH4_RX_VALUE ( 0x4 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_GPON_RX_VALUE ( 0x5 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH0_TX_VALUE ( 0x8 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH1_TX_VALUE ( 0x9 )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH2_TX_VALUE ( 0xA )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH3_TX_VALUE ( 0xB )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_ETH4_TX_VALUE ( 0xC )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_GPON_TX_VALUE ( 0xD )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_DEFAULT_VALUE ( 0x1F )
+#define DMA_REGS_DEBUG_SELSRC_SEL_SRC_DEFAULT_VALUE_RESET_VALUE ( 0x1F )
+
+
+#define DMA_REGS_DEBUG_SELSRC_OFFSET ( 0x00000008 )
+
+#define DMA_REGS_0_DEBUG_SELSRC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_SELSRC_OFFSET )
+#define DMA_REGS_0_DEBUG_SELSRC_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_SELSRC_ADDRESS ), (r) )
+#define DMA_REGS_0_DEBUG_SELSRC_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_SELSRC_ADDRESS ), (v) )
+
+#define DMA_REGS_1_DEBUG_SELSRC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_SELSRC_OFFSET )
+#define DMA_REGS_1_DEBUG_SELSRC_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_SELSRC_ADDRESS ), (r) )
+#define DMA_REGS_1_DEBUG_SELSRC_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_SELSRC_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_SELSRC_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_SELSRC_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_SELSRC_ARRAY [ i ], (v) )
+#define DMA_REGS_DEBUG_SELSRC_READ( i, r ) READ_32( DMA_REGS_DEBUG_SELSRC_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved1 */
+ uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* selected_source */
+ uint32_t sel_src : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_SELSRC ;
+#else
+typedef struct
+{ uint32_t sel_src : 5 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* selected_source */
+ uint32_t r1 : 27 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_SELSRC ;
+#endif
+
+/*****************************************************************************************/
+/* REQUEST_COUNTERS_RX */
+/* the number of write requests currently pending for each rx peripheral. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_REQ_CNT_RX_R1_R1_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_R1_R1_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MIN_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MIN_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_REQ_CNT_MAX_VALUE ( 0x20 )
+
+
+#define DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET ( 0x0000000C )
+
+#define DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET )
+#define DMA_REGS_0_DEBUG_REQ_CNT_RX_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_DEBUG_REQ_CNT_RX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_OFFSET )
+#define DMA_REGS_1_DEBUG_REQ_CNT_RX_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_DEBUG_REQ_CNT_RX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_REQ_CNT_RX_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved1 */
+ uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* write_requests_counter */
+ uint32_t req_cnt : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_RX ;
+#else
+typedef struct
+{ uint32_t req_cnt : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* write_requests_counter */
+ uint32_t r1 : 26 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved1 */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_RX ;
+#endif
+
+/*****************************************************************************************/
+/* REQUEST_COUNTERS_TX */
+/* the number of read requestscurrently pending for each TX peripheral. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_REQ_CNT_TX_R1_R3_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_R1_R3_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MIN_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MIN_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_REQ_CNT_MAX_VALUE ( 0x8 )
+
+
+#define DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET ( 0x00000024 )
+
+#define DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET )
+#define DMA_REGS_0_DEBUG_REQ_CNT_TX_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_DEBUG_REQ_CNT_TX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_OFFSET )
+#define DMA_REGS_1_DEBUG_REQ_CNT_TX_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_DEBUG_REQ_CNT_TX_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_REQ_CNT_TX_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r1 : 28 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* read_requests_counter */
+ uint32_t req_cnt : 4 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_TX ;
+#else
+typedef struct
+{ uint32_t req_cnt : 4 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* read_requests_counter */
+ uint32_t r1 : 28 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_TX ;
+#endif
+
+/*****************************************************************************************/
+/* ACC_REQUEST_COUNTERS_RX */
+/* the accumulated number of write requests served so far for each peripheral. Wrap arou */
+/* nd on max value, not read clear. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_REQ_CNT_CNT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_REQ_CNT_CNT_VALUE_RESET_VALUE ( 0x0 )
+
+
+#define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET ( 0x0000003C )
+
+#define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET )
+#define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_RX_ACC_OFFSET )
+#define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_RX_ACC_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_RX_ACC_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* write_requests_counter */
+ uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_RX_ACC ;
+#else
+typedef struct
+{ uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* write_requests_counter */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_RX_ACC ;
+#endif
+
+/*****************************************************************************************/
+/* ACC_REQUEST_COUNTERS_TX */
+/* the accumulated number of read requests served so far for each peripheral. Wrap aroun */
+/* d on max value, not read clear. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_REQ_CNT_CNT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_REQ_CNT_CNT_VALUE_RESET_VALUE ( 0x0 )
+
+
+#define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET ( 0x00000054 )
+
+#define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET )
+#define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_REQ_CNT_TX_ACC_OFFSET )
+#define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_REQ_CNT_TX_ACC_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_REQ_CNT_TX_ACC_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* write_requests_counter */
+ uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_TX_ACC ;
+#else
+typedef struct
+{ uint32_t req_cnt : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* write_requests_counter */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_REQ_CNT_TX_ACC ;
+#endif
+
+/*****************************************************************************************/
+/* RAM_ADDRES */
+/* the address and cs of the ram the user wishes to read using the indirect access read */
+/* mechanism. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_RDADD_R2_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_R2_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_CDCS1_OFF_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_CDCS1_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_CDCS1_ON_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_RDADD_DATACS1_OFF_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_DATACS1_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_DATACS1_ON_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_RDADD_RDCS_OFF_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_RDCS_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_RDCS_ON_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_RDADD_RRCS_OFF_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_RRCS_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_RRCS_ON_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_RDADD_CDCS_OFF_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_CDCS_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_CDCS_ON_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_RDADD_DATACS_OFF_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_DATACS_OFF_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_DATACS_ON_VALUE ( 0x1 )
+#define DMA_REGS_DEBUG_RDADD_R1_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_ADDRESS_ADD_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDADD_ADDRESS_ADD_VALUE_RESET_VALUE ( 0x0 )
+
+
+#define DMA_REGS_DEBUG_RDADD_OFFSET ( 0x00000100 )
+
+#define DMA_REGS_0_DEBUG_RDADD_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDADD_OFFSET )
+#define DMA_REGS_0_DEBUG_RDADD_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDADD_ADDRESS ), (r) )
+#define DMA_REGS_0_DEBUG_RDADD_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDADD_ADDRESS ), (v) )
+
+#define DMA_REGS_1_DEBUG_RDADD_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDADD_OFFSET )
+#define DMA_REGS_1_DEBUG_RDADD_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDADD_ADDRESS ), (r) )
+#define DMA_REGS_1_DEBUG_RDADD_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDADD_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_RDADD_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_RDADD_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDADD_ARRAY [ i ], (v) )
+#define DMA_REGS_DEBUG_RDADD_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDADD_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r2 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* cd_ram_cs1 */
+ uint32_t cdcs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* data_ram_cs_1 */
+ uint32_t datacs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* rd_data_cs */
+ uint32_t rdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* rr_ram_cd */
+ uint32_t rrcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* cd_ram_cs */
+ uint32_t cdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* data_ram_cs */
+ uint32_t datacs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+ uint32_t r1 : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* address */
+ uint32_t address : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDADD ;
+#else
+typedef struct
+{ uint32_t address : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* address */
+ uint32_t r1 : 6 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+ uint32_t datacs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* data_ram_cs */
+ uint32_t cdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* cd_ram_cs */
+ uint32_t rrcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* rr_ram_cd */
+ uint32_t rdcs : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* rd_data_cs */
+ uint32_t datacs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* data_ram_cs_1 */
+ uint32_t cdcs1 : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* cd_ram_cs1 */
+ uint32_t r2 : 10 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDADD ;
+#endif
+
+/*****************************************************************************************/
+/* INDIRECT_READ_REQUEST_VALID */
+/* After determining the address and cs, the user should assert this bit for indicating */
+/* that the address and cs are valid. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_RDVALID_R1_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDVALID_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDVALID_VALID_NOT_VALID_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDVALID_VALID_NOT_VALID_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDVALID_VALID_VALID_VALUE ( 0x1 )
+
+
+#define DMA_REGS_DEBUG_RDVALID_OFFSET ( 0x00000104 )
+
+#define DMA_REGS_0_DEBUG_RDVALID_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDVALID_OFFSET )
+#define DMA_REGS_0_DEBUG_RDVALID_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDVALID_ADDRESS ), (r) )
+#define DMA_REGS_0_DEBUG_RDVALID_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDVALID_ADDRESS ), (v) )
+
+#define DMA_REGS_1_DEBUG_RDVALID_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDVALID_OFFSET )
+#define DMA_REGS_1_DEBUG_RDVALID_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDVALID_ADDRESS ), (r) )
+#define DMA_REGS_1_DEBUG_RDVALID_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDVALID_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_RDVALID_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_RDVALID_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDVALID_ARRAY [ i ], (v) )
+#define DMA_REGS_DEBUG_RDVALID_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDVALID_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* valid */
+ uint32_t valid : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDVALID ;
+#else
+typedef struct
+{ uint32_t valid : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* valid */
+ uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDVALID ;
+#endif
+
+/*****************************************************************************************/
+/* INDIRECT_READ_DATA */
+/* The returned read data from the selected RAM. Array of 4 registers (128 bits total). */
+/* The width of the different memories is as follows: write data - 128 bits chunk des */
+/* criptors - 36 bits read requests - 42 bits read data - 64 bits The the memories */
+/* with width smaller than 128, the data will appear in the first registers of the array */
+/* , for example: data from the cd RAM will appear in - {reg1[5:0], reg0[31:0]}. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_RDDATA_DATA_DATA_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDDATA_DATA_DATA_VALUE_RESET_VALUE ( 0x0 )
+
+
+#define DMA_REGS_DEBUG_RDDATA_OFFSET ( 0x00000108 )
+
+#define DMA_REGS_0_DEBUG_RDDATA_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATA_OFFSET )
+#define DMA_REGS_0_DEBUG_RDDATA_READ_I( r, i ) READ_I_32( ( DMA_REGS_0_DEBUG_RDDATA_ADDRESS ), (i), (r) )
+#define DMA_REGS_0_DEBUG_RDDATA_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_0_DEBUG_RDDATA_ADDRESS ), (i), (v) )
+
+#define DMA_REGS_1_DEBUG_RDDATA_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATA_OFFSET )
+#define DMA_REGS_1_DEBUG_RDDATA_READ_I( r, i ) READ_I_32( ( DMA_REGS_1_DEBUG_RDDATA_ADDRESS ), (i), (r) )
+#define DMA_REGS_1_DEBUG_RDDATA_WRITE_I( v, i ) WRITE_I_32( ( DMA_REGS_1_DEBUG_RDDATA_ADDRESS ), (i), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_RDDATA_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_RDDATA_WRITE( i, k, v ) WRITE_I_32( DMA_REGS_DEBUG_RDDATA_ARRAY [ i ], (k), (v) )
+#define DMA_REGS_DEBUG_RDDATA_READ( i, k, r ) READ_I_32( DMA_REGS_DEBUG_RDDATA_ARRAY [ i ], (k), (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* data */
+ uint32_t data : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDDATA ;
+#else
+typedef struct
+{ uint32_t data : 32 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* data */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDDATA ;
+#endif
+
+/*****************************************************************************************/
+/* READ_DATA_READY */
+/* When assertd indicats that the data in the previous array is valid.Willremain asserte */
+/* d until the user deasserts the valid bit in regiser RDVALID. */
+/*****************************************************************************************/
+
+#define DMA_REGS_DEBUG_RDDATARDY_R1_DEFAULT_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDDATARDY_R1_DEFAULT_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDDATARDY_READY_NOT_READY_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDDATARDY_READY_NOT_READY_VALUE_RESET_VALUE ( 0x0 )
+#define DMA_REGS_DEBUG_RDDATARDY_READY_READY_VALUE ( 0x1 )
+
+
+#define DMA_REGS_DEBUG_RDDATARDY_OFFSET ( 0x00000118 )
+
+#define DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ( DMA_REGS_0_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATARDY_OFFSET )
+#define DMA_REGS_0_DEBUG_RDDATARDY_READ( r ) READ_32( ( DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ), (r) )
+#define DMA_REGS_0_DEBUG_RDDATARDY_WRITE( v ) WRITE_32( ( DMA_REGS_0_DEBUG_RDDATARDY_ADDRESS ), (v) )
+
+#define DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ( DMA_REGS_1_DEBUG_ADDRESS + DMA_REGS_DEBUG_RDDATARDY_OFFSET )
+#define DMA_REGS_1_DEBUG_RDDATARDY_READ( r ) READ_32( ( DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ), (r) )
+#define DMA_REGS_1_DEBUG_RDDATARDY_WRITE( v ) WRITE_32( ( DMA_REGS_1_DEBUG_RDDATARDY_ADDRESS ), (v) )
+
+
+extern uint32_t DMA_REGS_DEBUG_RDDATARDY_ARRAY [ ] ;
+
+#define DMA_REGS_DEBUG_RDDATARDY_WRITE( i, v ) WRITE_32( DMA_REGS_DEBUG_RDDATARDY_ARRAY [ i ], (v) )
+#define DMA_REGS_DEBUG_RDDATARDY_READ( i, r ) READ_32( DMA_REGS_DEBUG_RDDATARDY_ARRAY [ i ], (r) )
+
+#ifndef _BYTE_ORDER_LITTLE_ENDIAN_
+typedef struct
+{
+ /* reserved */
+ uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ready */
+ uint32_t ready : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDDATARDY ;
+#else
+typedef struct
+{ uint32_t ready : 1 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ready */
+ uint32_t r1 : 31 __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* reserved */
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG_RDDATARDY ;
+#endif
+
+/*****************************************************************************************/
+/* The registers in this section allow configuration of the following: 1. memory alloca */
+/* tions 2. priority and weight for arbitration 3. urgent thresholds 4. route address */
+/* es Most of the registers control the configuration of a single peripheral. They ar */
+/* e arranged in arrays according to their configuration topic. The order of peripher */
+/* als within each array is: Ethernet 0 Ethernet 1 Ethernet 2 Ethernet 3 Ethernet 4 */
+/* GPON */
+/*****************************************************************************************/
+
+/*****************************************************************************************/
+/* Registers array numbers */
+/*****************************************************************************************/
+#define DMA_REGS_CONFIG_MALLOC_NUMBER ( 6 )
+#define DMA_REGS_CONFIG_READ_BASE_NUMBER ( 6 )
+#define DMA_REGS_CONFIG_U_THRESH_NUMBER ( 6 )
+#define DMA_REGS_CONFIG_PRI_NUMBER ( 6 )
+#define DMA_REGS_CONFIG_WEIGHT_NUMBER ( 6 )
+#define DMA_REGS_CONFIG_BB_ROUTE_NUMBER ( 6 )
+typedef struct
+{
+ /* BB_SOURCE */
+ DMA_REGS_CONFIG_SOURCE source __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* MEMORY_ALLOCATION */
+ DMA_REGS_CONFIG_MALLOC malloc [ DMA_REGS_CONFIG_MALLOC_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* READ_REQ_BASE_ADDRESS */
+ DMA_REGS_CONFIG_READ_BASE read_base [ DMA_REGS_CONFIG_READ_BASE_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* URGENT_THRESHOLDS */
+ DMA_REGS_CONFIG_U_THRESH u_thresh [ DMA_REGS_CONFIG_U_THRESH_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* STRICT_PRIORITY */
+ DMA_REGS_CONFIG_PRI pri [ DMA_REGS_CONFIG_PRI_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* WEIGHT_OF_ROUND_ROBIN */
+ DMA_REGS_CONFIG_WEIGHT weight [ DMA_REGS_CONFIG_WEIGHT_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* BB_ROUTE_DMA_PERIPH */
+ DMA_REGS_CONFIG_BB_ROUTE bb_route [ DMA_REGS_CONFIG_BB_ROUTE_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* POINTERS_RESET */
+ DMA_REGS_CONFIG_PTRRST ptrrst __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* MEM_SEL */
+ DMA_REGS_CONFIG_MEM_SEL mem_sel __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+ __PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_CONFIG ;
+
+/*****************************************************************************************/
+/* request counters per peripheral */
+/*****************************************************************************************/
+
+/*****************************************************************************************/
+/* Registers array numbers */
+/*****************************************************************************************/
+#define DMA_REGS_DEBUG_REQ_CNT_RX_NUMBER ( 6 )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_NUMBER ( 6 )
+#define DMA_REGS_DEBUG_REQ_CNT_RX_ACC_NUMBER ( 6 )
+#define DMA_REGS_DEBUG_REQ_CNT_TX_ACC_NUMBER ( 6 )
+#define DMA_REGS_DEBUG_RDDATA_NUMBER ( 4 )
+typedef struct
+{
+ /* NOT_EMPTY_VECTOR */
+ DMA_REGS_DEBUG_NEMPTY nempty __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* URGENT_VECTOR */
+ DMA_REGS_DEBUG_URGNT urgnt __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* SELECTED_SOURCE_NUM */
+ DMA_REGS_DEBUG_SELSRC selsrc __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* REQUEST_COUNTERS_RX */
+ DMA_REGS_DEBUG_REQ_CNT_RX req_cnt_rx [ DMA_REGS_DEBUG_REQ_CNT_RX_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* REQUEST_COUNTERS_TX */
+ DMA_REGS_DEBUG_REQ_CNT_TX req_cnt_tx [ DMA_REGS_DEBUG_REQ_CNT_TX_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ACC_REQUEST_COUNTERS_RX */
+ DMA_REGS_DEBUG_REQ_CNT_RX_ACC req_cnt_rx_acc [ DMA_REGS_DEBUG_REQ_CNT_RX_ACC_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* ACC_REQUEST_COUNTERS_TX */
+ DMA_REGS_DEBUG_REQ_CNT_TX_ACC req_cnt_tx_acc [ DMA_REGS_DEBUG_REQ_CNT_TX_ACC_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Reserved */
+ uint8_t reserved1 [ 148 ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* RAM_ADDRES */
+ DMA_REGS_DEBUG_RDADD rdadd __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* INDIRECT_READ_REQUEST_VALID */
+ DMA_REGS_DEBUG_RDVALID rdvalid __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* INDIRECT_READ_DATA */
+ DMA_REGS_DEBUG_RDDATA rddata [ DMA_REGS_DEBUG_RDDATA_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* READ_DATA_READY */
+ DMA_REGS_DEBUG_RDDATARDY rddatardy __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+ __PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS_DEBUG ;
+
+typedef struct
+{
+ /* config function */
+ DMA_REGS_CONFIG config __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* Reserved */
+ uint8_t reserved0 [ 100 ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+
+ /* debug function */
+ DMA_REGS_DEBUG debug __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+ __PACKING_ATTRIBUTE_STRUCT_END__
+DMA_REGS ;
+
+#define DMA_REGS_NUMBER ( 2 )
+typedef struct
+{
+ /* REGS */
+ DMA_REGS regs [ DMA_REGS_NUMBER ] __PACKING_ATTRIBUTE_FIELD_LEVEL__ ;
+}
+__PACKING_ATTRIBUTE_STRUCT_END__
+DMA_FOR_ALL ;
+#endif /* DMA_H_INCLUDED */
+