2 * ASPEED XDMA Controller
3 * Eddie James <eajames@linux.ibm.com>
5 * Copyright (C) 2019 IBM Corp
6 * SPDX-License-Identifier: GPL-2.0-or-later
9 #include "qemu/osdep.h"
11 #include "qemu/error-report.h"
13 #include "hw/misc/aspeed_xdma.h"
14 #include "migration/vmstate.h"
15 #include "qapi/error.h"
19 #define XDMA_BMC_CMDQ_ADDR 0x10
20 #define XDMA_BMC_CMDQ_ENDP 0x14
21 #define XDMA_BMC_CMDQ_WRP 0x18
22 #define XDMA_BMC_CMDQ_W_MASK 0x0003FFFF
23 #define XDMA_BMC_CMDQ_RDP 0x1C
24 #define XDMA_BMC_CMDQ_RDP_MAGIC 0xEE882266
25 #define XDMA_IRQ_ENG_CTRL 0x20
26 #define XDMA_IRQ_ENG_CTRL_US_COMP BIT(4)
27 #define XDMA_IRQ_ENG_CTRL_DS_COMP BIT(5)
28 #define XDMA_IRQ_ENG_CTRL_W_MASK 0xBFEFF07F
29 #define XDMA_IRQ_ENG_STAT 0x24
30 #define XDMA_IRQ_ENG_STAT_US_COMP BIT(4)
31 #define XDMA_IRQ_ENG_STAT_DS_COMP BIT(5)
32 #define XDMA_IRQ_ENG_STAT_RESET 0xF8000000
34 #define XDMA_AST2600_BMC_CMDQ_ADDR 0x14
35 #define XDMA_AST2600_BMC_CMDQ_ENDP 0x18
36 #define XDMA_AST2600_BMC_CMDQ_WRP 0x1c
37 #define XDMA_AST2600_BMC_CMDQ_RDP 0x20
38 #define XDMA_AST2600_IRQ_CTRL 0x38
39 #define XDMA_AST2600_IRQ_CTRL_US_COMP BIT(16)
40 #define XDMA_AST2600_IRQ_CTRL_DS_COMP BIT(17)
41 #define XDMA_AST2600_IRQ_CTRL_W_MASK 0x017003FF
42 #define XDMA_AST2600_IRQ_STATUS 0x3c
43 #define XDMA_AST2600_IRQ_STATUS_US_COMP BIT(16)
44 #define XDMA_AST2600_IRQ_STATUS_DS_COMP BIT(17)
46 #define XDMA_MEM_SIZE 0x1000
48 #define TO_REG(addr) ((addr) / sizeof(uint32_t))
50 static uint64_t aspeed_xdma_read(void *opaque
, hwaddr addr
, unsigned int size
)
53 AspeedXDMAState
*xdma
= opaque
;
55 if (addr
< ASPEED_XDMA_REG_SIZE
) {
56 val
= xdma
->regs
[TO_REG(addr
)];
62 static void aspeed_xdma_write(void *opaque
, hwaddr addr
, uint64_t val
,
66 uint32_t val32
= (uint32_t)val
;
67 AspeedXDMAState
*xdma
= opaque
;
68 AspeedXDMAClass
*axc
= ASPEED_XDMA_GET_CLASS(xdma
);
70 if (addr
>= ASPEED_XDMA_REG_SIZE
) {
74 if (addr
== axc
->cmdq_endp
) {
75 xdma
->regs
[TO_REG(addr
)] = val32
& XDMA_BMC_CMDQ_W_MASK
;
76 } else if (addr
== axc
->cmdq_wrp
) {
78 xdma
->regs
[idx
] = val32
& XDMA_BMC_CMDQ_W_MASK
;
79 xdma
->regs
[TO_REG(axc
->cmdq_rdp
)] = xdma
->regs
[idx
];
81 trace_aspeed_xdma_write(addr
, val
);
83 if (xdma
->bmc_cmdq_readp_set
) {
84 xdma
->bmc_cmdq_readp_set
= 0;
86 xdma
->regs
[TO_REG(axc
->intr_status
)] |= axc
->intr_complete
;
88 if (xdma
->regs
[TO_REG(axc
->intr_ctrl
)] & axc
->intr_complete
) {
89 qemu_irq_raise(xdma
->irq
);
92 } else if (addr
== axc
->cmdq_rdp
) {
93 trace_aspeed_xdma_write(addr
, val
);
95 if (val32
== XDMA_BMC_CMDQ_RDP_MAGIC
) {
96 xdma
->bmc_cmdq_readp_set
= 1;
98 } else if (addr
== axc
->intr_ctrl
) {
99 xdma
->regs
[TO_REG(addr
)] = val32
& axc
->intr_ctrl_mask
;
100 } else if (addr
== axc
->intr_status
) {
101 trace_aspeed_xdma_write(addr
, val
);
104 if (val32
& axc
->intr_complete
) {
105 xdma
->regs
[idx
] &= ~axc
->intr_complete
;
106 qemu_irq_lower(xdma
->irq
);
109 xdma
->regs
[TO_REG(addr
)] = val32
;
113 static const MemoryRegionOps aspeed_xdma_ops
= {
114 .read
= aspeed_xdma_read
,
115 .write
= aspeed_xdma_write
,
116 .endianness
= DEVICE_NATIVE_ENDIAN
,
117 .valid
.min_access_size
= 4,
118 .valid
.max_access_size
= 4,
121 static void aspeed_xdma_realize(DeviceState
*dev
, Error
**errp
)
123 SysBusDevice
*sbd
= SYS_BUS_DEVICE(dev
);
124 AspeedXDMAState
*xdma
= ASPEED_XDMA(dev
);
126 sysbus_init_irq(sbd
, &xdma
->irq
);
127 memory_region_init_io(&xdma
->iomem
, OBJECT(xdma
), &aspeed_xdma_ops
, xdma
,
128 TYPE_ASPEED_XDMA
, XDMA_MEM_SIZE
);
129 sysbus_init_mmio(sbd
, &xdma
->iomem
);
132 static void aspeed_xdma_reset(DeviceState
*dev
)
134 AspeedXDMAState
*xdma
= ASPEED_XDMA(dev
);
135 AspeedXDMAClass
*axc
= ASPEED_XDMA_GET_CLASS(xdma
);
137 xdma
->bmc_cmdq_readp_set
= 0;
138 memset(xdma
->regs
, 0, ASPEED_XDMA_REG_SIZE
);
139 xdma
->regs
[TO_REG(axc
->intr_status
)] = XDMA_IRQ_ENG_STAT_RESET
;
141 qemu_irq_lower(xdma
->irq
);
144 static const VMStateDescription aspeed_xdma_vmstate
= {
145 .name
= TYPE_ASPEED_XDMA
,
147 .fields
= (const VMStateField
[]) {
148 VMSTATE_UINT32_ARRAY(regs
, AspeedXDMAState
, ASPEED_XDMA_NUM_REGS
),
149 VMSTATE_END_OF_LIST(),
153 static void aspeed_2600_xdma_class_init(ObjectClass
*klass
, void *data
)
155 DeviceClass
*dc
= DEVICE_CLASS(klass
);
156 AspeedXDMAClass
*axc
= ASPEED_XDMA_CLASS(klass
);
158 dc
->desc
= "ASPEED 2600 XDMA Controller";
160 axc
->cmdq_endp
= XDMA_AST2600_BMC_CMDQ_ENDP
;
161 axc
->cmdq_wrp
= XDMA_AST2600_BMC_CMDQ_WRP
;
162 axc
->cmdq_rdp
= XDMA_AST2600_BMC_CMDQ_RDP
;
163 axc
->intr_ctrl
= XDMA_AST2600_IRQ_CTRL
;
164 axc
->intr_ctrl_mask
= XDMA_AST2600_IRQ_CTRL_W_MASK
;
165 axc
->intr_status
= XDMA_AST2600_IRQ_STATUS
;
166 axc
->intr_complete
= XDMA_AST2600_IRQ_STATUS_US_COMP
|
167 XDMA_AST2600_IRQ_STATUS_DS_COMP
;
170 static const TypeInfo aspeed_2600_xdma_info
= {
171 .name
= TYPE_ASPEED_2600_XDMA
,
172 .parent
= TYPE_ASPEED_XDMA
,
173 .class_init
= aspeed_2600_xdma_class_init
,
176 static void aspeed_2500_xdma_class_init(ObjectClass
*klass
, void *data
)
178 DeviceClass
*dc
= DEVICE_CLASS(klass
);
179 AspeedXDMAClass
*axc
= ASPEED_XDMA_CLASS(klass
);
181 dc
->desc
= "ASPEED 2500 XDMA Controller";
183 axc
->cmdq_endp
= XDMA_BMC_CMDQ_ENDP
;
184 axc
->cmdq_wrp
= XDMA_BMC_CMDQ_WRP
;
185 axc
->cmdq_rdp
= XDMA_BMC_CMDQ_RDP
;
186 axc
->intr_ctrl
= XDMA_IRQ_ENG_CTRL
;
187 axc
->intr_ctrl_mask
= XDMA_IRQ_ENG_CTRL_W_MASK
;
188 axc
->intr_status
= XDMA_IRQ_ENG_STAT
;
189 axc
->intr_complete
= XDMA_IRQ_ENG_STAT_US_COMP
| XDMA_IRQ_ENG_STAT_DS_COMP
;
192 static const TypeInfo aspeed_2500_xdma_info
= {
193 .name
= TYPE_ASPEED_2500_XDMA
,
194 .parent
= TYPE_ASPEED_XDMA
,
195 .class_init
= aspeed_2500_xdma_class_init
,
198 static void aspeed_2400_xdma_class_init(ObjectClass
*klass
, void *data
)
200 DeviceClass
*dc
= DEVICE_CLASS(klass
);
201 AspeedXDMAClass
*axc
= ASPEED_XDMA_CLASS(klass
);
203 dc
->desc
= "ASPEED 2400 XDMA Controller";
205 axc
->cmdq_endp
= XDMA_BMC_CMDQ_ENDP
;
206 axc
->cmdq_wrp
= XDMA_BMC_CMDQ_WRP
;
207 axc
->cmdq_rdp
= XDMA_BMC_CMDQ_RDP
;
208 axc
->intr_ctrl
= XDMA_IRQ_ENG_CTRL
;
209 axc
->intr_ctrl_mask
= XDMA_IRQ_ENG_CTRL_W_MASK
;
210 axc
->intr_status
= XDMA_IRQ_ENG_STAT
;
211 axc
->intr_complete
= XDMA_IRQ_ENG_STAT_US_COMP
| XDMA_IRQ_ENG_STAT_DS_COMP
;
214 static const TypeInfo aspeed_2400_xdma_info
= {
215 .name
= TYPE_ASPEED_2400_XDMA
,
216 .parent
= TYPE_ASPEED_XDMA
,
217 .class_init
= aspeed_2400_xdma_class_init
,
220 static void aspeed_xdma_class_init(ObjectClass
*classp
, void *data
)
222 DeviceClass
*dc
= DEVICE_CLASS(classp
);
224 dc
->realize
= aspeed_xdma_realize
;
225 device_class_set_legacy_reset(dc
, aspeed_xdma_reset
);
226 dc
->vmsd
= &aspeed_xdma_vmstate
;
229 static const TypeInfo aspeed_xdma_info
= {
230 .name
= TYPE_ASPEED_XDMA
,
231 .parent
= TYPE_SYS_BUS_DEVICE
,
232 .instance_size
= sizeof(AspeedXDMAState
),
233 .class_init
= aspeed_xdma_class_init
,
234 .class_size
= sizeof(AspeedXDMAClass
),
238 static void aspeed_xdma_register_type(void)
240 type_register_static(&aspeed_xdma_info
);
241 type_register_static(&aspeed_2400_xdma_info
);
242 type_register_static(&aspeed_2500_xdma_info
);
243 type_register_static(&aspeed_2600_xdma_info
);
245 type_init(aspeed_xdma_register_type
);