4 typedef struct QEMU_PACKED NvmeBar
{
35 NVME_REG_CAP
= offsetof(NvmeBar
, cap
),
36 NVME_REG_VS
= offsetof(NvmeBar
, vs
),
37 NVME_REG_INTMS
= offsetof(NvmeBar
, intms
),
38 NVME_REG_INTMC
= offsetof(NvmeBar
, intmc
),
39 NVME_REG_CC
= offsetof(NvmeBar
, cc
),
40 NVME_REG_CSTS
= offsetof(NvmeBar
, csts
),
41 NVME_REG_NSSR
= offsetof(NvmeBar
, nssr
),
42 NVME_REG_AQA
= offsetof(NvmeBar
, aqa
),
43 NVME_REG_ASQ
= offsetof(NvmeBar
, asq
),
44 NVME_REG_ACQ
= offsetof(NvmeBar
, acq
),
45 NVME_REG_CMBLOC
= offsetof(NvmeBar
, cmbloc
),
46 NVME_REG_CMBSZ
= offsetof(NvmeBar
, cmbsz
),
47 NVME_REG_BPINFO
= offsetof(NvmeBar
, bpinfo
),
48 NVME_REG_BPRSEL
= offsetof(NvmeBar
, bprsel
),
49 NVME_REG_BPMBL
= offsetof(NvmeBar
, bpmbl
),
50 NVME_REG_CMBMSC
= offsetof(NvmeBar
, cmbmsc
),
51 NVME_REG_CMBSTS
= offsetof(NvmeBar
, cmbsts
),
52 NVME_REG_PMRCAP
= offsetof(NvmeBar
, pmrcap
),
53 NVME_REG_PMRCTL
= offsetof(NvmeBar
, pmrctl
),
54 NVME_REG_PMRSTS
= offsetof(NvmeBar
, pmrsts
),
55 NVME_REG_PMREBS
= offsetof(NvmeBar
, pmrebs
),
56 NVME_REG_PMRSWTP
= offsetof(NvmeBar
, pmrswtp
),
57 NVME_REG_PMRMSCL
= offsetof(NvmeBar
, pmrmscl
),
58 NVME_REG_PMRMSCU
= offsetof(NvmeBar
, pmrmscu
),
69 CAP_MPSMIN_SHIFT
= 48,
70 CAP_MPSMAX_SHIFT
= 52,
76 CAP_MQES_MASK
= 0xffff,
83 CAP_MPSMIN_MASK
= 0xf,
84 CAP_MPSMAX_MASK
= 0xf,
89 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK)
90 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK)
91 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK)
92 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK)
93 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK)
94 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK)
95 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK)
96 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK)
97 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK)
98 #define NVME_CAP_PMRS(cap) (((cap) >> CAP_PMRS_SHIFT) & CAP_PMRS_MASK)
99 #define NVME_CAP_CMBS(cap) (((cap) >> CAP_CMBS_SHIFT) & CAP_CMBS_MASK)
101 #define NVME_CAP_SET_MQES(cap, val) \
102 ((cap) |= (uint64_t)((val) & CAP_MQES_MASK) << CAP_MQES_SHIFT)
103 #define NVME_CAP_SET_CQR(cap, val) \
104 ((cap) |= (uint64_t)((val) & CAP_CQR_MASK) << CAP_CQR_SHIFT)
105 #define NVME_CAP_SET_AMS(cap, val) \
106 ((cap) |= (uint64_t)((val) & CAP_AMS_MASK) << CAP_AMS_SHIFT)
107 #define NVME_CAP_SET_TO(cap, val) \
108 ((cap) |= (uint64_t)((val) & CAP_TO_MASK) << CAP_TO_SHIFT)
109 #define NVME_CAP_SET_DSTRD(cap, val) \
110 ((cap) |= (uint64_t)((val) & CAP_DSTRD_MASK) << CAP_DSTRD_SHIFT)
111 #define NVME_CAP_SET_NSSRS(cap, val) \
112 ((cap) |= (uint64_t)((val) & CAP_NSSRS_MASK) << CAP_NSSRS_SHIFT)
113 #define NVME_CAP_SET_CSS(cap, val) \
114 ((cap) |= (uint64_t)((val) & CAP_CSS_MASK) << CAP_CSS_SHIFT)
115 #define NVME_CAP_SET_MPSMIN(cap, val) \
116 ((cap) |= (uint64_t)((val) & CAP_MPSMIN_MASK) << CAP_MPSMIN_SHIFT)
117 #define NVME_CAP_SET_MPSMAX(cap, val) \
118 ((cap) |= (uint64_t)((val) & CAP_MPSMAX_MASK) << CAP_MPSMAX_SHIFT)
119 #define NVME_CAP_SET_PMRS(cap, val) \
120 ((cap) |= (uint64_t)((val) & CAP_PMRS_MASK) << CAP_PMRS_SHIFT)
121 #define NVME_CAP_SET_CMBS(cap, val) \
122 ((cap) |= (uint64_t)((val) & CAP_CMBS_MASK) << CAP_CMBS_SHIFT)
125 NVME_CAP_CSS_NVM
= 1 << 0,
126 NVME_CAP_CSS_CSI_SUPP
= 1 << 6,
127 NVME_CAP_CSS_ADMIN_ONLY
= 1 << 7,
136 CC_IOSQES_SHIFT
= 16,
137 CC_IOCQES_SHIFT
= 20,
146 CC_IOSQES_MASK
= 0xf,
147 CC_IOCQES_MASK
= 0xf,
150 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK)
151 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK)
152 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK)
153 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK)
154 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK)
155 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK)
156 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK)
159 NVME_CC_CSS_NVM
= 0x0,
160 NVME_CC_CSS_CSI
= 0x6,
161 NVME_CC_CSS_ADMIN_ONLY
= 0x7,
164 #define NVME_SET_CC_EN(cc, val) \
165 (cc |= (uint32_t)((val) & CC_EN_MASK) << CC_EN_SHIFT)
166 #define NVME_SET_CC_CSS(cc, val) \
167 (cc |= (uint32_t)((val) & CC_CSS_MASK) << CC_CSS_SHIFT)
168 #define NVME_SET_CC_MPS(cc, val) \
169 (cc |= (uint32_t)((val) & CC_MPS_MASK) << CC_MPS_SHIFT)
170 #define NVME_SET_CC_AMS(cc, val) \
171 (cc |= (uint32_t)((val) & CC_AMS_MASK) << CC_AMS_SHIFT)
172 #define NVME_SET_CC_SHN(cc, val) \
173 (cc |= (uint32_t)((val) & CC_SHN_MASK) << CC_SHN_SHIFT)
174 #define NVME_SET_CC_IOSQES(cc, val) \
175 (cc |= (uint32_t)((val) & CC_IOSQES_MASK) << CC_IOSQES_SHIFT)
176 #define NVME_SET_CC_IOCQES(cc, val) \
177 (cc |= (uint32_t)((val) & CC_IOCQES_MASK) << CC_IOCQES_SHIFT)
183 CSTS_NSSRO_SHIFT
= 4,
189 CSTS_SHST_MASK
= 0x3,
190 CSTS_NSSRO_MASK
= 0x1,
194 NVME_CSTS_READY
= 1 << CSTS_RDY_SHIFT
,
195 NVME_CSTS_FAILED
= 1 << CSTS_CFS_SHIFT
,
196 NVME_CSTS_SHST_NORMAL
= 0 << CSTS_SHST_SHIFT
,
197 NVME_CSTS_SHST_PROGRESS
= 1 << CSTS_SHST_SHIFT
,
198 NVME_CSTS_SHST_COMPLETE
= 2 << CSTS_SHST_SHIFT
,
199 NVME_CSTS_NSSRO
= 1 << CSTS_NSSRO_SHIFT
,
202 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK)
203 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK)
204 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK)
205 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK)
213 AQA_ASQS_MASK
= 0xfff,
214 AQA_ACQS_MASK
= 0xfff,
217 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK)
218 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK)
220 enum NvmeCmblocShift
{
221 CMBLOC_BIR_SHIFT
= 0,
222 CMBLOC_CQMMS_SHIFT
= 3,
223 CMBLOC_CQPDS_SHIFT
= 4,
224 CMBLOC_CDPMLS_SHIFT
= 5,
225 CMBLOC_CDPCILS_SHIFT
= 6,
226 CMBLOC_CDMMMS_SHIFT
= 7,
227 CMBLOC_CQDA_SHIFT
= 8,
228 CMBLOC_OFST_SHIFT
= 12,
231 enum NvmeCmblocMask
{
232 CMBLOC_BIR_MASK
= 0x7,
233 CMBLOC_CQMMS_MASK
= 0x1,
234 CMBLOC_CQPDS_MASK
= 0x1,
235 CMBLOC_CDPMLS_MASK
= 0x1,
236 CMBLOC_CDPCILS_MASK
= 0x1,
237 CMBLOC_CDMMMS_MASK
= 0x1,
238 CMBLOC_CQDA_MASK
= 0x1,
239 CMBLOC_OFST_MASK
= 0xfffff,
242 #define NVME_CMBLOC_BIR(cmbloc) \
243 ((cmbloc >> CMBLOC_BIR_SHIFT) & CMBLOC_BIR_MASK)
244 #define NVME_CMBLOC_CQMMS(cmbloc) \
245 ((cmbloc >> CMBLOC_CQMMS_SHIFT) & CMBLOC_CQMMS_MASK)
246 #define NVME_CMBLOC_CQPDS(cmbloc) \
247 ((cmbloc >> CMBLOC_CQPDS_SHIFT) & CMBLOC_CQPDS_MASK)
248 #define NVME_CMBLOC_CDPMLS(cmbloc) \
249 ((cmbloc >> CMBLOC_CDPMLS_SHIFT) & CMBLOC_CDPMLS_MASK)
250 #define NVME_CMBLOC_CDPCILS(cmbloc) \
251 ((cmbloc >> CMBLOC_CDPCILS_SHIFT) & CMBLOC_CDPCILS_MASK)
252 #define NVME_CMBLOC_CDMMMS(cmbloc) \
253 ((cmbloc >> CMBLOC_CDMMMS_SHIFT) & CMBLOC_CDMMMS_MASK)
254 #define NVME_CMBLOC_CQDA(cmbloc) \
255 ((cmbloc >> CMBLOC_CQDA_SHIFT) & CMBLOC_CQDA_MASK)
256 #define NVME_CMBLOC_OFST(cmbloc) \
257 ((cmbloc >> CMBLOC_OFST_SHIFT) & CMBLOC_OFST_MASK)
259 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \
260 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT)
261 #define NVME_CMBLOC_SET_CQMMS(cmbloc, val) \
262 (cmbloc |= (uint64_t)(val & CMBLOC_CQMMS_MASK) << CMBLOC_CQMMS_SHIFT)
263 #define NVME_CMBLOC_SET_CQPDS(cmbloc, val) \
264 (cmbloc |= (uint64_t)(val & CMBLOC_CQPDS_MASK) << CMBLOC_CQPDS_SHIFT)
265 #define NVME_CMBLOC_SET_CDPMLS(cmbloc, val) \
266 (cmbloc |= (uint64_t)(val & CMBLOC_CDPMLS_MASK) << CMBLOC_CDPMLS_SHIFT)
267 #define NVME_CMBLOC_SET_CDPCILS(cmbloc, val) \
268 (cmbloc |= (uint64_t)(val & CMBLOC_CDPCILS_MASK) << CMBLOC_CDPCILS_SHIFT)
269 #define NVME_CMBLOC_SET_CDMMMS(cmbloc, val) \
270 (cmbloc |= (uint64_t)(val & CMBLOC_CDMMMS_MASK) << CMBLOC_CDMMMS_SHIFT)
271 #define NVME_CMBLOC_SET_CQDA(cmbloc, val) \
272 (cmbloc |= (uint64_t)(val & CMBLOC_CQDA_MASK) << CMBLOC_CQDA_SHIFT)
273 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \
274 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT)
276 #define NVME_CMBMSMC_SET_CRE (cmbmsc, val) \
277 (cmbmsc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBMSC_CRE_SHIFT)
279 enum NvmeCmbszShift
{
282 CMBSZ_LISTS_SHIFT
= 2,
290 CMBSZ_SQS_MASK
= 0x1,
291 CMBSZ_CQS_MASK
= 0x1,
292 CMBSZ_LISTS_MASK
= 0x1,
293 CMBSZ_RDS_MASK
= 0x1,
294 CMBSZ_WDS_MASK
= 0x1,
295 CMBSZ_SZU_MASK
= 0xf,
296 CMBSZ_SZ_MASK
= 0xfffff,
299 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK)
300 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK)
301 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK)
302 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK)
303 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK)
304 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK)
305 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK)
307 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \
308 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT)
309 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \
310 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT)
311 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \
312 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT)
313 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \
314 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT)
315 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \
316 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT)
317 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \
318 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT)
319 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \
320 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT)
322 #define NVME_CMBSZ_GETSIZE(cmbsz) \
323 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz))))
325 enum NvmeCmbmscShift
{
326 CMBMSC_CRE_SHIFT
= 0,
327 CMBMSC_CMSE_SHIFT
= 1,
328 CMBMSC_CBA_SHIFT
= 12,
331 enum NvmeCmbmscMask
{
332 CMBMSC_CRE_MASK
= 0x1,
333 CMBMSC_CMSE_MASK
= 0x1,
334 CMBMSC_CBA_MASK
= ((1ULL << 52) - 1),
337 #define NVME_CMBMSC_CRE(cmbmsc) \
338 ((cmbmsc >> CMBMSC_CRE_SHIFT) & CMBMSC_CRE_MASK)
339 #define NVME_CMBMSC_CMSE(cmbmsc) \
340 ((cmbmsc >> CMBMSC_CMSE_SHIFT) & CMBMSC_CMSE_MASK)
341 #define NVME_CMBMSC_CBA(cmbmsc) \
342 ((cmbmsc >> CMBMSC_CBA_SHIFT) & CMBMSC_CBA_MASK)
345 #define NVME_CMBMSC_SET_CRE(cmbmsc, val) \
346 (cmbmsc |= (uint64_t)(val & CMBMSC_CRE_MASK) << CMBMSC_CRE_SHIFT)
347 #define NVME_CMBMSC_SET_CMSE(cmbmsc, val) \
348 (cmbmsc |= (uint64_t)(val & CMBMSC_CMSE_MASK) << CMBMSC_CMSE_SHIFT)
349 #define NVME_CMBMSC_SET_CBA(cmbmsc, val) \
350 (cmbmsc |= (uint64_t)(val & CMBMSC_CBA_MASK) << CMBMSC_CBA_SHIFT)
352 enum NvmeCmbstsShift
{
353 CMBSTS_CBAI_SHIFT
= 0,
355 enum NvmeCmbstsMask
{
356 CMBSTS_CBAI_MASK
= 0x1,
359 #define NVME_CMBSTS_CBAI(cmbsts) \
360 ((cmbsts >> CMBSTS_CBAI_SHIFT) & CMBSTS_CBAI_MASK)
362 #define NVME_CMBSTS_SET_CBAI(cmbsts, val) \
363 (cmbsts |= (uint64_t)(val & CMBSTS_CBAI_MASK) << CMBSTS_CBAI_SHIFT)
365 enum NvmePmrcapShift
{
366 PMRCAP_RDS_SHIFT
= 3,
367 PMRCAP_WDS_SHIFT
= 4,
368 PMRCAP_BIR_SHIFT
= 5,
369 PMRCAP_PMRTU_SHIFT
= 8,
370 PMRCAP_PMRWBM_SHIFT
= 10,
371 PMRCAP_PMRTO_SHIFT
= 16,
372 PMRCAP_CMSS_SHIFT
= 24,
375 enum NvmePmrcapMask
{
376 PMRCAP_RDS_MASK
= 0x1,
377 PMRCAP_WDS_MASK
= 0x1,
378 PMRCAP_BIR_MASK
= 0x7,
379 PMRCAP_PMRTU_MASK
= 0x3,
380 PMRCAP_PMRWBM_MASK
= 0xf,
381 PMRCAP_PMRTO_MASK
= 0xff,
382 PMRCAP_CMSS_MASK
= 0x1,
385 #define NVME_PMRCAP_RDS(pmrcap) \
386 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK)
387 #define NVME_PMRCAP_WDS(pmrcap) \
388 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK)
389 #define NVME_PMRCAP_BIR(pmrcap) \
390 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK)
391 #define NVME_PMRCAP_PMRTU(pmrcap) \
392 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK)
393 #define NVME_PMRCAP_PMRWBM(pmrcap) \
394 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK)
395 #define NVME_PMRCAP_PMRTO(pmrcap) \
396 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK)
397 #define NVME_PMRCAP_CMSS(pmrcap) \
398 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK)
400 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \
401 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT)
402 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \
403 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT)
404 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \
405 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT)
406 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \
407 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT)
408 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \
409 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT)
410 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \
411 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT)
412 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \
413 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT)
415 enum NvmePmrctlShift
{
419 enum NvmePmrctlMask
{
420 PMRCTL_EN_MASK
= 0x1,
423 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK)
425 #define NVME_PMRCTL_SET_EN(pmrctl, val) \
426 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT)
428 enum NvmePmrstsShift
{
429 PMRSTS_ERR_SHIFT
= 0,
430 PMRSTS_NRDY_SHIFT
= 8,
431 PMRSTS_HSTS_SHIFT
= 9,
432 PMRSTS_CBAI_SHIFT
= 12,
435 enum NvmePmrstsMask
{
436 PMRSTS_ERR_MASK
= 0xff,
437 PMRSTS_NRDY_MASK
= 0x1,
438 PMRSTS_HSTS_MASK
= 0x7,
439 PMRSTS_CBAI_MASK
= 0x1,
442 #define NVME_PMRSTS_ERR(pmrsts) \
443 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK)
444 #define NVME_PMRSTS_NRDY(pmrsts) \
445 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK)
446 #define NVME_PMRSTS_HSTS(pmrsts) \
447 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK)
448 #define NVME_PMRSTS_CBAI(pmrsts) \
449 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK)
451 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \
452 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT)
453 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \
454 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT)
455 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \
456 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT)
457 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \
458 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT)
460 enum NvmePmrebsShift
{
461 PMREBS_PMRSZU_SHIFT
= 0,
462 PMREBS_RBB_SHIFT
= 4,
463 PMREBS_PMRWBZ_SHIFT
= 8,
466 enum NvmePmrebsMask
{
467 PMREBS_PMRSZU_MASK
= 0xf,
468 PMREBS_RBB_MASK
= 0x1,
469 PMREBS_PMRWBZ_MASK
= 0xffffff,
472 #define NVME_PMREBS_PMRSZU(pmrebs) \
473 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK)
474 #define NVME_PMREBS_RBB(pmrebs) \
475 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK)
476 #define NVME_PMREBS_PMRWBZ(pmrebs) \
477 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK)
479 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \
480 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT)
481 #define NVME_PMREBS_SET_RBB(pmrebs, val) \
482 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT)
483 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \
484 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT)
486 enum NvmePmrswtpShift
{
487 PMRSWTP_PMRSWTU_SHIFT
= 0,
488 PMRSWTP_PMRSWTV_SHIFT
= 8,
491 enum NvmePmrswtpMask
{
492 PMRSWTP_PMRSWTU_MASK
= 0xf,
493 PMRSWTP_PMRSWTV_MASK
= 0xffffff,
496 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \
497 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK)
498 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \
499 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK)
501 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \
502 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT)
503 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \
504 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT)
506 enum NvmePmrmsclShift
{
507 PMRMSCL_CMSE_SHIFT
= 1,
508 PMRMSCL_CBA_SHIFT
= 12,
511 enum NvmePmrmsclMask
{
512 PMRMSCL_CMSE_MASK
= 0x1,
513 PMRMSCL_CBA_MASK
= 0xfffff,
516 #define NVME_PMRMSCL_CMSE(pmrmscl) \
517 ((pmrmscl >> PMRMSCL_CMSE_SHIFT) & PMRMSCL_CMSE_MASK)
518 #define NVME_PMRMSCL_CBA(pmrmscl) \
519 ((pmrmscl >> PMRMSCL_CBA_SHIFT) & PMRMSCL_CBA_MASK)
521 #define NVME_PMRMSCL_SET_CMSE(pmrmscl, val) \
522 (pmrmscl |= (uint32_t)(val & PMRMSCL_CMSE_MASK) << PMRMSCL_CMSE_SHIFT)
523 #define NVME_PMRMSCL_SET_CBA(pmrmscl, val) \
524 (pmrmscl |= (uint32_t)(val & PMRMSCL_CBA_MASK) << PMRMSCL_CBA_SHIFT)
526 enum NvmeSglDescriptorType
{
527 NVME_SGL_DESCR_TYPE_DATA_BLOCK
= 0x0,
528 NVME_SGL_DESCR_TYPE_BIT_BUCKET
= 0x1,
529 NVME_SGL_DESCR_TYPE_SEGMENT
= 0x2,
530 NVME_SGL_DESCR_TYPE_LAST_SEGMENT
= 0x3,
531 NVME_SGL_DESCR_TYPE_KEYED_DATA_BLOCK
= 0x4,
533 NVME_SGL_DESCR_TYPE_VENDOR_SPECIFIC
= 0xf,
536 enum NvmeSglDescriptorSubtype
{
537 NVME_SGL_DESCR_SUBTYPE_ADDRESS
= 0x0,
540 typedef struct QEMU_PACKED NvmeSglDescriptor
{
547 #define NVME_SGL_TYPE(type) ((type >> 4) & 0xf)
548 #define NVME_SGL_SUBTYPE(type) (type & 0xf)
550 typedef union NvmeCmdDptr
{
556 NvmeSglDescriptor sgl
;
561 NVME_PSDT_SGL_MPTR_CONTIGUOUS
= 0x1,
562 NVME_PSDT_SGL_MPTR_SGL
= 0x2,
565 typedef struct QEMU_PACKED NvmeCmd
{
581 #define NVME_CMD_FLAGS_FUSE(flags) (flags & 0x3)
582 #define NVME_CMD_FLAGS_PSDT(flags) ((flags >> 6) & 0x3)
584 enum NvmeAdminCommands
{
585 NVME_ADM_CMD_DELETE_SQ
= 0x00,
586 NVME_ADM_CMD_CREATE_SQ
= 0x01,
587 NVME_ADM_CMD_GET_LOG_PAGE
= 0x02,
588 NVME_ADM_CMD_DELETE_CQ
= 0x04,
589 NVME_ADM_CMD_CREATE_CQ
= 0x05,
590 NVME_ADM_CMD_IDENTIFY
= 0x06,
591 NVME_ADM_CMD_ABORT
= 0x08,
592 NVME_ADM_CMD_SET_FEATURES
= 0x09,
593 NVME_ADM_CMD_GET_FEATURES
= 0x0a,
594 NVME_ADM_CMD_ASYNC_EV_REQ
= 0x0c,
595 NVME_ADM_CMD_ACTIVATE_FW
= 0x10,
596 NVME_ADM_CMD_DOWNLOAD_FW
= 0x11,
597 NVME_ADM_CMD_NS_ATTACHMENT
= 0x15,
598 NVME_ADM_CMD_VIRT_MNGMT
= 0x1c,
599 NVME_ADM_CMD_DBBUF_CONFIG
= 0x7c,
600 NVME_ADM_CMD_FORMAT_NVM
= 0x80,
601 NVME_ADM_CMD_SECURITY_SEND
= 0x81,
602 NVME_ADM_CMD_SECURITY_RECV
= 0x82,
605 enum NvmeIoCommands
{
606 NVME_CMD_FLUSH
= 0x00,
607 NVME_CMD_WRITE
= 0x01,
608 NVME_CMD_READ
= 0x02,
609 NVME_CMD_WRITE_UNCOR
= 0x04,
610 NVME_CMD_COMPARE
= 0x05,
611 NVME_CMD_WRITE_ZEROES
= 0x08,
613 NVME_CMD_VERIFY
= 0x0c,
614 NVME_CMD_COPY
= 0x19,
615 NVME_CMD_ZONE_MGMT_SEND
= 0x79,
616 NVME_CMD_ZONE_MGMT_RECV
= 0x7a,
617 NVME_CMD_ZONE_APPEND
= 0x7d,
620 typedef struct QEMU_PACKED NvmeDeleteQ
{
630 typedef struct QEMU_PACKED NvmeCreateCq
{
644 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1)
645 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1)
652 typedef struct QEMU_PACKED NvmeCreateSq
{
666 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1)
667 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3)
672 NVME_SQ_PRIO_URGENT
= 0,
673 NVME_SQ_PRIO_HIGH
= 1,
674 NVME_SQ_PRIO_NORMAL
= 2,
675 NVME_SQ_PRIO_LOW
= 3,
678 typedef struct QEMU_PACKED NvmeIdentify
{
695 typedef struct QEMU_PACKED NvmeRwCmd
{
714 NVME_RW_LR
= 1 << 15,
715 NVME_RW_FUA
= 1 << 14,
716 NVME_RW_DSM_FREQ_UNSPEC
= 0,
717 NVME_RW_DSM_FREQ_TYPICAL
= 1,
718 NVME_RW_DSM_FREQ_RARE
= 2,
719 NVME_RW_DSM_FREQ_READS
= 3,
720 NVME_RW_DSM_FREQ_WRITES
= 4,
721 NVME_RW_DSM_FREQ_RW
= 5,
722 NVME_RW_DSM_FREQ_ONCE
= 6,
723 NVME_RW_DSM_FREQ_PREFETCH
= 7,
724 NVME_RW_DSM_FREQ_TEMP
= 8,
725 NVME_RW_DSM_LATENCY_NONE
= 0 << 4,
726 NVME_RW_DSM_LATENCY_IDLE
= 1 << 4,
727 NVME_RW_DSM_LATENCY_NORM
= 2 << 4,
728 NVME_RW_DSM_LATENCY_LOW
= 3 << 4,
729 NVME_RW_DSM_SEQ_REQ
= 1 << 6,
730 NVME_RW_DSM_COMPRESSED
= 1 << 7,
731 NVME_RW_PIREMAP
= 1 << 9,
732 NVME_RW_PRINFO_PRACT
= 1 << 13,
733 NVME_RW_PRINFO_PRCHK_GUARD
= 1 << 12,
734 NVME_RW_PRINFO_PRCHK_APP
= 1 << 11,
735 NVME_RW_PRINFO_PRCHK_REF
= 1 << 10,
736 NVME_RW_PRINFO_PRCHK_MASK
= 7 << 10,
739 #define NVME_RW_PRINFO(control) ((control >> 10) & 0xf)
742 NVME_PRINFO_PRACT
= 1 << 3,
743 NVME_PRINFO_PRCHK_GUARD
= 1 << 2,
744 NVME_PRINFO_PRCHK_APP
= 1 << 1,
745 NVME_PRINFO_PRCHK_REF
= 1 << 0,
746 NVME_PRINFO_PRCHK_MASK
= 7 << 0,
749 typedef struct QEMU_PACKED NvmeDsmCmd
{
762 NVME_DSMGMT_IDR
= 1 << 0,
763 NVME_DSMGMT_IDW
= 1 << 1,
764 NVME_DSMGMT_AD
= 1 << 2,
767 typedef struct QEMU_PACKED NvmeDsmRange
{
774 NVME_COPY_FORMAT_0
= 0x0,
775 NVME_COPY_FORMAT_1
= 0x1,
778 typedef struct QEMU_PACKED NvmeCopyCmd
{
797 typedef struct QEMU_PACKED NvmeCopySourceRangeFormat0
{
805 } NvmeCopySourceRangeFormat0
;
807 typedef struct QEMU_PACKED NvmeCopySourceRangeFormat1
{
815 } NvmeCopySourceRangeFormat1
;
817 enum NvmeAsyncEventRequest
{
818 NVME_AER_TYPE_ERROR
= 0,
819 NVME_AER_TYPE_SMART
= 1,
820 NVME_AER_TYPE_NOTICE
= 2,
821 NVME_AER_TYPE_IO_SPECIFIC
= 6,
822 NVME_AER_TYPE_VENDOR_SPECIFIC
= 7,
823 NVME_AER_INFO_ERR_INVALID_DB_REGISTER
= 0,
824 NVME_AER_INFO_ERR_INVALID_DB_VALUE
= 1,
825 NVME_AER_INFO_ERR_DIAG_FAIL
= 2,
826 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR
= 3,
827 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR
= 4,
828 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR
= 5,
829 NVME_AER_INFO_SMART_RELIABILITY
= 0,
830 NVME_AER_INFO_SMART_TEMP_THRESH
= 1,
831 NVME_AER_INFO_SMART_SPARE_THRESH
= 2,
832 NVME_AER_INFO_NOTICE_NS_ATTR_CHANGED
= 0,
835 typedef struct QEMU_PACKED NvmeAerResult
{
842 typedef struct QEMU_PACKED NvmeZonedResult
{
846 typedef struct QEMU_PACKED NvmeCqe
{
855 enum NvmeStatusCodes
{
856 NVME_SUCCESS
= 0x0000,
857 NVME_INVALID_OPCODE
= 0x0001,
858 NVME_INVALID_FIELD
= 0x0002,
859 NVME_CID_CONFLICT
= 0x0003,
860 NVME_DATA_TRAS_ERROR
= 0x0004,
861 NVME_POWER_LOSS_ABORT
= 0x0005,
862 NVME_INTERNAL_DEV_ERROR
= 0x0006,
863 NVME_CMD_ABORT_REQ
= 0x0007,
864 NVME_CMD_ABORT_SQ_DEL
= 0x0008,
865 NVME_CMD_ABORT_FAILED_FUSE
= 0x0009,
866 NVME_CMD_ABORT_MISSING_FUSE
= 0x000a,
867 NVME_INVALID_NSID
= 0x000b,
868 NVME_CMD_SEQ_ERROR
= 0x000c,
869 NVME_INVALID_SGL_SEG_DESCR
= 0x000d,
870 NVME_INVALID_NUM_SGL_DESCRS
= 0x000e,
871 NVME_DATA_SGL_LEN_INVALID
= 0x000f,
872 NVME_MD_SGL_LEN_INVALID
= 0x0010,
873 NVME_SGL_DESCR_TYPE_INVALID
= 0x0011,
874 NVME_INVALID_USE_OF_CMB
= 0x0012,
875 NVME_INVALID_PRP_OFFSET
= 0x0013,
876 NVME_CMD_SET_CMB_REJECTED
= 0x002b,
877 NVME_INVALID_CMD_SET
= 0x002c,
878 NVME_LBA_RANGE
= 0x0080,
879 NVME_CAP_EXCEEDED
= 0x0081,
880 NVME_NS_NOT_READY
= 0x0082,
881 NVME_NS_RESV_CONFLICT
= 0x0083,
882 NVME_FORMAT_IN_PROGRESS
= 0x0084,
883 NVME_INVALID_CQID
= 0x0100,
884 NVME_INVALID_QID
= 0x0101,
885 NVME_MAX_QSIZE_EXCEEDED
= 0x0102,
886 NVME_ACL_EXCEEDED
= 0x0103,
887 NVME_RESERVED
= 0x0104,
888 NVME_AER_LIMIT_EXCEEDED
= 0x0105,
889 NVME_INVALID_FW_SLOT
= 0x0106,
890 NVME_INVALID_FW_IMAGE
= 0x0107,
891 NVME_INVALID_IRQ_VECTOR
= 0x0108,
892 NVME_INVALID_LOG_ID
= 0x0109,
893 NVME_INVALID_FORMAT
= 0x010a,
894 NVME_FW_REQ_RESET
= 0x010b,
895 NVME_INVALID_QUEUE_DEL
= 0x010c,
896 NVME_FID_NOT_SAVEABLE
= 0x010d,
897 NVME_FEAT_NOT_CHANGEABLE
= 0x010e,
898 NVME_FEAT_NOT_NS_SPEC
= 0x010f,
899 NVME_FW_REQ_SUSYSTEM_RESET
= 0x0110,
900 NVME_NS_ALREADY_ATTACHED
= 0x0118,
901 NVME_NS_PRIVATE
= 0x0119,
902 NVME_NS_NOT_ATTACHED
= 0x011a,
903 NVME_NS_CTRL_LIST_INVALID
= 0x011c,
904 NVME_INVALID_CTRL_ID
= 0x011f,
905 NVME_INVALID_SEC_CTRL_STATE
= 0x0120,
906 NVME_INVALID_NUM_RESOURCES
= 0x0121,
907 NVME_INVALID_RESOURCE_ID
= 0x0122,
908 NVME_CONFLICTING_ATTRS
= 0x0180,
909 NVME_INVALID_PROT_INFO
= 0x0181,
910 NVME_WRITE_TO_RO
= 0x0182,
911 NVME_CMD_SIZE_LIMIT
= 0x0183,
912 NVME_INVALID_ZONE_OP
= 0x01b6,
913 NVME_NOZRWA
= 0x01b7,
914 NVME_ZONE_BOUNDARY_ERROR
= 0x01b8,
915 NVME_ZONE_FULL
= 0x01b9,
916 NVME_ZONE_READ_ONLY
= 0x01ba,
917 NVME_ZONE_OFFLINE
= 0x01bb,
918 NVME_ZONE_INVALID_WRITE
= 0x01bc,
919 NVME_ZONE_TOO_MANY_ACTIVE
= 0x01bd,
920 NVME_ZONE_TOO_MANY_OPEN
= 0x01be,
921 NVME_ZONE_INVAL_TRANSITION
= 0x01bf,
922 NVME_WRITE_FAULT
= 0x0280,
923 NVME_UNRECOVERED_READ
= 0x0281,
924 NVME_E2E_GUARD_ERROR
= 0x0282,
925 NVME_E2E_APP_ERROR
= 0x0283,
926 NVME_E2E_REF_ERROR
= 0x0284,
927 NVME_CMP_FAILURE
= 0x0285,
928 NVME_ACCESS_DENIED
= 0x0286,
930 NVME_E2E_STORAGE_TAG_ERROR
= 0x0288,
933 NVME_NO_COMPLETE
= 0xffff,
936 typedef struct QEMU_PACKED NvmeFwSlotInfoLog
{
938 uint8_t reserved1
[7];
946 uint8_t reserved2
[448];
949 typedef struct QEMU_PACKED NvmeErrorLog
{
950 uint64_t error_count
;
953 uint16_t status_field
;
954 uint16_t param_error_location
;
961 typedef struct QEMU_PACKED NvmeSmartLog
{
962 uint8_t critical_warning
;
963 uint16_t temperature
;
964 uint8_t available_spare
;
965 uint8_t available_spare_threshold
;
966 uint8_t percentage_used
;
967 uint8_t reserved1
[26];
968 uint64_t data_units_read
[2];
969 uint64_t data_units_written
[2];
970 uint64_t host_read_commands
[2];
971 uint64_t host_write_commands
[2];
972 uint64_t controller_busy_time
[2];
973 uint64_t power_cycles
[2];
974 uint64_t power_on_hours
[2];
975 uint64_t unsafe_shutdowns
[2];
976 uint64_t media_errors
[2];
977 uint64_t number_of_error_log_entries
[2];
978 uint8_t reserved2
[320];
981 #define NVME_SMART_WARN_MAX 6
983 NVME_SMART_SPARE
= 1 << 0,
984 NVME_SMART_TEMPERATURE
= 1 << 1,
985 NVME_SMART_RELIABILITY
= 1 << 2,
986 NVME_SMART_MEDIA_READ_ONLY
= 1 << 3,
987 NVME_SMART_FAILED_VOLATILE_MEDIA
= 1 << 4,
988 NVME_SMART_PMR_UNRELIABLE
= 1 << 5,
991 typedef struct NvmeEffectsLog
{
998 NVME_CMD_EFF_CSUPP
= 1 << 0,
999 NVME_CMD_EFF_LBCC
= 1 << 1,
1000 NVME_CMD_EFF_NCC
= 1 << 2,
1001 NVME_CMD_EFF_NIC
= 1 << 3,
1002 NVME_CMD_EFF_CCC
= 1 << 4,
1003 NVME_CMD_EFF_CSE_MASK
= 3 << 16,
1004 NVME_CMD_EFF_UUID_SEL
= 1 << 19,
1007 enum NvmeLogIdentifier
{
1008 NVME_LOG_ERROR_INFO
= 0x01,
1009 NVME_LOG_SMART_INFO
= 0x02,
1010 NVME_LOG_FW_SLOT_INFO
= 0x03,
1011 NVME_LOG_CHANGED_NSLIST
= 0x04,
1012 NVME_LOG_CMD_EFFECTS
= 0x05,
1015 typedef struct QEMU_PACKED NvmePSD
{
1027 #define NVME_CONTROLLER_LIST_SIZE 2048
1028 #define NVME_IDENTIFY_DATA_SIZE 4096
1031 NVME_ID_CNS_NS
= 0x00,
1032 NVME_ID_CNS_CTRL
= 0x01,
1033 NVME_ID_CNS_NS_ACTIVE_LIST
= 0x02,
1034 NVME_ID_CNS_NS_DESCR_LIST
= 0x03,
1035 NVME_ID_CNS_CS_NS
= 0x05,
1036 NVME_ID_CNS_CS_CTRL
= 0x06,
1037 NVME_ID_CNS_CS_NS_ACTIVE_LIST
= 0x07,
1038 NVME_ID_CNS_NS_PRESENT_LIST
= 0x10,
1039 NVME_ID_CNS_NS_PRESENT
= 0x11,
1040 NVME_ID_CNS_NS_ATTACHED_CTRL_LIST
= 0x12,
1041 NVME_ID_CNS_CTRL_LIST
= 0x13,
1042 NVME_ID_CNS_PRIMARY_CTRL_CAP
= 0x14,
1043 NVME_ID_CNS_SECONDARY_CTRL_LIST
= 0x15,
1044 NVME_ID_CNS_CS_NS_PRESENT_LIST
= 0x1a,
1045 NVME_ID_CNS_CS_NS_PRESENT
= 0x1b,
1046 NVME_ID_CNS_IO_COMMAND_SET
= 0x1c,
1049 typedef struct QEMU_PACKED NvmeIdCtrl
{
1065 uint8_t rsvd100
[11];
1068 uint8_t rsvd128
[128];
1083 uint8_t tnvmcap
[16];
1084 uint8_t unvmcap
[16];
1094 uint8_t rsvd332
[180];
1110 uint8_t rsvd540
[228];
1111 uint8_t subnqn
[256];
1112 uint8_t rsvd1024
[1024];
1117 typedef struct NvmeIdCtrlZoned
{
1119 uint8_t rsvd1
[4095];
1122 typedef struct NvmeIdCtrlNvm
{
1129 uint8_t rsvd16
[4080];
1132 enum NvmeIdCtrlOaes
{
1133 NVME_OAES_NS_ATTR
= 1 << 8,
1136 enum NvmeIdCtrlCtratt
{
1137 NVME_CTRATT_ELBAS
= 1 << 15,
1140 enum NvmeIdCtrlOacs
{
1141 NVME_OACS_SECURITY
= 1 << 0,
1142 NVME_OACS_FORMAT
= 1 << 1,
1143 NVME_OACS_FW
= 1 << 2,
1144 NVME_OACS_NS_MGMT
= 1 << 3,
1145 NVME_OACS_DBBUF
= 1 << 8,
1148 enum NvmeIdCtrlOncs
{
1149 NVME_ONCS_COMPARE
= 1 << 0,
1150 NVME_ONCS_WRITE_UNCORR
= 1 << 1,
1151 NVME_ONCS_DSM
= 1 << 2,
1152 NVME_ONCS_WRITE_ZEROES
= 1 << 3,
1153 NVME_ONCS_FEATURES
= 1 << 4,
1154 NVME_ONCS_RESRVATIONS
= 1 << 5,
1155 NVME_ONCS_TIMESTAMP
= 1 << 6,
1156 NVME_ONCS_VERIFY
= 1 << 7,
1157 NVME_ONCS_COPY
= 1 << 8,
1160 enum NvmeIdCtrlOcfs
{
1161 NVME_OCFS_COPY_FORMAT_0
= 1 << NVME_COPY_FORMAT_0
,
1162 NVME_OCFS_COPY_FORMAT_1
= 1 << NVME_COPY_FORMAT_1
,
1165 enum NvmeIdctrlVwc
{
1166 NVME_VWC_PRESENT
= 1 << 0,
1167 NVME_VWC_NSID_BROADCAST_NO_SUPPORT
= 0 << 1,
1168 NVME_VWC_NSID_BROADCAST_RESERVED
= 1 << 1,
1169 NVME_VWC_NSID_BROADCAST_CTRL_SPEC
= 2 << 1,
1170 NVME_VWC_NSID_BROADCAST_SUPPORT
= 3 << 1,
1173 enum NvmeIdCtrlFrmw
{
1174 NVME_FRMW_SLOT1_RO
= 1 << 0,
1177 enum NvmeIdCtrlLpa
{
1178 NVME_LPA_NS_SMART
= 1 << 0,
1179 NVME_LPA_CSE
= 1 << 1,
1180 NVME_LPA_EXTENDED
= 1 << 2,
1183 enum NvmeIdCtrlCmic
{
1184 NVME_CMIC_MULTI_CTRL
= 1 << 1,
1187 enum NvmeNsAttachmentOperation
{
1188 NVME_NS_ATTACHMENT_ATTACH
= 0x0,
1189 NVME_NS_ATTACHMENT_DETACH
= 0x1,
1192 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf)
1193 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf)
1194 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf)
1195 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf)
1197 #define NVME_CTRL_SGLS_SUPPORT_MASK (0x3 << 0)
1198 #define NVME_CTRL_SGLS_SUPPORT_NO_ALIGN (0x1 << 0)
1199 #define NVME_CTRL_SGLS_SUPPORT_DWORD_ALIGN (0x1 << 1)
1200 #define NVME_CTRL_SGLS_KEYED (0x1 << 2)
1201 #define NVME_CTRL_SGLS_BITBUCKET (0x1 << 16)
1202 #define NVME_CTRL_SGLS_MPTR_CONTIGUOUS (0x1 << 17)
1203 #define NVME_CTRL_SGLS_EXCESS_LENGTH (0x1 << 18)
1204 #define NVME_CTRL_SGLS_MPTR_SGL (0x1 << 19)
1205 #define NVME_CTRL_SGLS_ADDR_OFFSET (0x1 << 20)
1207 #define NVME_ARB_AB(arb) (arb & 0x7)
1208 #define NVME_ARB_AB_NOLIMIT 0x7
1209 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff)
1210 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff)
1211 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff)
1213 #define NVME_INTC_THR(intc) (intc & 0xff)
1214 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff)
1216 #define NVME_INTVC_NOCOALESCING (0x1 << 16)
1218 #define NVME_TEMP_THSEL(temp) ((temp >> 20) & 0x3)
1219 #define NVME_TEMP_THSEL_OVER 0x0
1220 #define NVME_TEMP_THSEL_UNDER 0x1
1222 #define NVME_TEMP_TMPSEL(temp) ((temp >> 16) & 0xf)
1223 #define NVME_TEMP_TMPSEL_COMPOSITE 0x0
1225 #define NVME_TEMP_TMPTH(temp) (temp & 0xffff)
1227 #define NVME_AEC_SMART(aec) (aec & 0xff)
1228 #define NVME_AEC_NS_ATTR(aec) ((aec >> 8) & 0x1)
1229 #define NVME_AEC_FW_ACTIVATION(aec) ((aec >> 9) & 0x1)
1231 #define NVME_ERR_REC_TLER(err_rec) (err_rec & 0xffff)
1232 #define NVME_ERR_REC_DULBE(err_rec) (err_rec & 0x10000)
1234 enum NvmeFeatureIds
{
1235 NVME_ARBITRATION
= 0x1,
1236 NVME_POWER_MANAGEMENT
= 0x2,
1237 NVME_LBA_RANGE_TYPE
= 0x3,
1238 NVME_TEMPERATURE_THRESHOLD
= 0x4,
1239 NVME_ERROR_RECOVERY
= 0x5,
1240 NVME_VOLATILE_WRITE_CACHE
= 0x6,
1241 NVME_NUMBER_OF_QUEUES
= 0x7,
1242 NVME_INTERRUPT_COALESCING
= 0x8,
1243 NVME_INTERRUPT_VECTOR_CONF
= 0x9,
1244 NVME_WRITE_ATOMICITY
= 0xa,
1245 NVME_ASYNCHRONOUS_EVENT_CONF
= 0xb,
1246 NVME_TIMESTAMP
= 0xe,
1247 NVME_HOST_BEHAVIOR_SUPPORT
= 0x16,
1248 NVME_COMMAND_SET_PROFILE
= 0x19,
1249 NVME_SOFTWARE_PROGRESS_MARKER
= 0x80,
1250 NVME_FID_MAX
= 0x100,
1253 typedef enum NvmeFeatureCap
{
1254 NVME_FEAT_CAP_SAVE
= 1 << 0,
1255 NVME_FEAT_CAP_NS
= 1 << 1,
1256 NVME_FEAT_CAP_CHANGE
= 1 << 2,
1259 typedef enum NvmeGetFeatureSelect
{
1260 NVME_GETFEAT_SELECT_CURRENT
= 0x0,
1261 NVME_GETFEAT_SELECT_DEFAULT
= 0x1,
1262 NVME_GETFEAT_SELECT_SAVED
= 0x2,
1263 NVME_GETFEAT_SELECT_CAP
= 0x3,
1264 } NvmeGetFeatureSelect
;
1266 #define NVME_GETSETFEAT_FID_MASK 0xff
1267 #define NVME_GETSETFEAT_FID(dw10) (dw10 & NVME_GETSETFEAT_FID_MASK)
1269 #define NVME_GETFEAT_SELECT_SHIFT 8
1270 #define NVME_GETFEAT_SELECT_MASK 0x7
1271 #define NVME_GETFEAT_SELECT(dw10) \
1272 ((dw10 >> NVME_GETFEAT_SELECT_SHIFT) & NVME_GETFEAT_SELECT_MASK)
1274 #define NVME_SETFEAT_SAVE_SHIFT 31
1275 #define NVME_SETFEAT_SAVE_MASK 0x1
1276 #define NVME_SETFEAT_SAVE(dw10) \
1277 ((dw10 >> NVME_SETFEAT_SAVE_SHIFT) & NVME_SETFEAT_SAVE_MASK)
1279 typedef struct QEMU_PACKED NvmeRangeType
{
1289 typedef struct NvmeHostBehaviorSupport
{
1294 } NvmeHostBehaviorSupport
;
1296 typedef struct QEMU_PACKED NvmeLBAF
{
1302 typedef struct QEMU_PACKED NvmeLBAFE
{
1308 #define NVME_NSID_BROADCAST 0xffffffff
1309 #define NVME_MAX_NLBAF 64
1311 typedef struct QEMU_PACKED NvmeIdNs
{
1344 NvmeLBAF lbaf
[NVME_MAX_NLBAF
];
1348 #define NVME_ID_NS_NVM_ELBAF_PIF(elbaf) (((elbaf) >> 7) & 0x3)
1350 typedef struct QEMU_PACKED NvmeIdNsNvm
{
1354 uint32_t elbaf
[NVME_MAX_NLBAF
];
1355 uint8_t rsvd268
[3828];
1358 typedef struct QEMU_PACKED NvmeIdNsDescr
{
1364 enum NvmeNsIdentifierLength
{
1365 NVME_NIDL_EUI64
= 8,
1366 NVME_NIDL_NGUID
= 16,
1367 NVME_NIDL_UUID
= 16,
1371 enum NvmeNsIdentifierType
{
1372 NVME_NIDT_EUI64
= 0x01,
1373 NVME_NIDT_NGUID
= 0x02,
1374 NVME_NIDT_UUID
= 0x03,
1375 NVME_NIDT_CSI
= 0x04,
1379 NVME_NMIC_NS_SHARED
= 1 << 0,
1383 NVME_CSI_NVM
= 0x00,
1384 NVME_CSI_ZONED
= 0x02,
1387 #define NVME_SET_CSI(vec, csi) (vec |= (uint8_t)(1 << (csi)))
1389 typedef struct QEMU_PACKED NvmeIdNsZoned
{
1401 uint8_t rsvd53
[2763];
1402 NvmeLBAFE lbafe
[16];
1403 uint8_t rsvd3072
[768];
1407 enum NvmeIdNsZonedOzcs
{
1408 NVME_ID_NS_ZONED_OZCS_RAZB
= 1 << 0,
1409 NVME_ID_NS_ZONED_OZCS_ZRWASUP
= 1 << 1,
1412 enum NvmeIdNsZonedZrwacap
{
1413 NVME_ID_NS_ZONED_ZRWACAP_EXPFLUSHSUP
= 1 << 0,
1416 /*Deallocate Logical Block Features*/
1417 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10)
1418 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08)
1420 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7)
1421 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0
1422 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1
1423 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2
1426 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1))
1427 #define NVME_ID_NS_NSFEAT_DULBE(nsfeat) ((nsfeat >> 2) & 0x1)
1428 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1)
1429 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf))
1430 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1)
1431 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1))
1432 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1)
1433 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1)
1434 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1)
1435 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1)
1436 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1))
1437 #define NVME_ID_NS_DPC_TYPE_MASK 0x7
1440 NVME_ID_NS_DPS_TYPE_NONE
= 0,
1441 NVME_ID_NS_DPS_TYPE_1
= 1,
1442 NVME_ID_NS_DPS_TYPE_2
= 2,
1443 NVME_ID_NS_DPS_TYPE_3
= 3,
1444 NVME_ID_NS_DPS_TYPE_MASK
= 0x7,
1445 NVME_ID_NS_DPS_FIRST_EIGHT
= 8,
1448 enum NvmeIdNsFlbas
{
1449 NVME_ID_NS_FLBAS_EXTENDED
= 1 << 4,
1453 NVME_ID_NS_MC_EXTENDED
= 1 << 0,
1454 NVME_ID_NS_MC_SEPARATE
= 1 << 1,
1457 #define NVME_ID_NS_DPS_TYPE(dps) (dps & NVME_ID_NS_DPS_TYPE_MASK)
1460 NVME_PI_GUARD_16
= 0,
1461 NVME_PI_GUARD_64
= 2,
1464 typedef union NvmeDifTuple
{
1479 NVME_ZA_FINISHED_BY_CTLR
= 1 << 0,
1480 NVME_ZA_FINISH_RECOMMENDED
= 1 << 1,
1481 NVME_ZA_RESET_RECOMMENDED
= 1 << 2,
1482 NVME_ZA_ZRWA_VALID
= 1 << 3,
1483 NVME_ZA_ZD_EXT_VALID
= 1 << 7,
1486 typedef struct QEMU_PACKED NvmeZoneReportHeader
{
1489 } NvmeZoneReportHeader
;
1491 enum NvmeZoneReceiveAction
{
1492 NVME_ZONE_REPORT
= 0,
1493 NVME_ZONE_REPORT_EXTENDED
= 1,
1496 enum NvmeZoneReportType
{
1497 NVME_ZONE_REPORT_ALL
= 0,
1498 NVME_ZONE_REPORT_EMPTY
= 1,
1499 NVME_ZONE_REPORT_IMPLICITLY_OPEN
= 2,
1500 NVME_ZONE_REPORT_EXPLICITLY_OPEN
= 3,
1501 NVME_ZONE_REPORT_CLOSED
= 4,
1502 NVME_ZONE_REPORT_FULL
= 5,
1503 NVME_ZONE_REPORT_READ_ONLY
= 6,
1504 NVME_ZONE_REPORT_OFFLINE
= 7,
1508 NVME_ZONE_TYPE_RESERVED
= 0x00,
1509 NVME_ZONE_TYPE_SEQ_WRITE
= 0x02,
1512 typedef struct QEMU_PACKED NvmeZoneSendCmd
{
1527 enum NvmeZoneSendAction
{
1528 NVME_ZONE_ACTION_RSD
= 0x00,
1529 NVME_ZONE_ACTION_CLOSE
= 0x01,
1530 NVME_ZONE_ACTION_FINISH
= 0x02,
1531 NVME_ZONE_ACTION_OPEN
= 0x03,
1532 NVME_ZONE_ACTION_RESET
= 0x04,
1533 NVME_ZONE_ACTION_OFFLINE
= 0x05,
1534 NVME_ZONE_ACTION_SET_ZD_EXT
= 0x10,
1535 NVME_ZONE_ACTION_ZRWA_FLUSH
= 0x11,
1539 NVME_ZSFLAG_SELECT_ALL
= 1 << 0,
1540 NVME_ZSFLAG_ZRWA_ALLOC
= 1 << 1,
1543 typedef struct QEMU_PACKED NvmeZoneDescr
{
1554 typedef enum NvmeZoneState
{
1555 NVME_ZONE_STATE_RESERVED
= 0x00,
1556 NVME_ZONE_STATE_EMPTY
= 0x01,
1557 NVME_ZONE_STATE_IMPLICITLY_OPEN
= 0x02,
1558 NVME_ZONE_STATE_EXPLICITLY_OPEN
= 0x03,
1559 NVME_ZONE_STATE_CLOSED
= 0x04,
1560 NVME_ZONE_STATE_READ_ONLY
= 0x0d,
1561 NVME_ZONE_STATE_FULL
= 0x0e,
1562 NVME_ZONE_STATE_OFFLINE
= 0x0f,
1565 typedef struct QEMU_PACKED NvmePriCtrlCap
{
1583 uint8_t rsvd80
[4016];
1586 typedef enum NvmePriCtrlCapCrt
{
1587 NVME_CRT_VQ
= 1 << 0,
1588 NVME_CRT_VI
= 1 << 1,
1589 } NvmePriCtrlCapCrt
;
1591 typedef struct QEMU_PACKED NvmeSecCtrlEntry
{
1602 typedef struct QEMU_PACKED NvmeSecCtrlList
{
1605 NvmeSecCtrlEntry sec
[127];
1608 typedef enum NvmeVirtMngmtAction
{
1609 NVME_VIRT_MNGMT_ACTION_PRM_ALLOC
= 0x01,
1610 NVME_VIRT_MNGMT_ACTION_SEC_OFFLINE
= 0x07,
1611 NVME_VIRT_MNGMT_ACTION_SEC_ASSIGN
= 0x08,
1612 NVME_VIRT_MNGMT_ACTION_SEC_ONLINE
= 0x09,
1613 } NvmeVirtMngmtAction
;
1615 typedef enum NvmeVirtualResourceType
{
1616 NVME_VIRT_RES_QUEUE
= 0x00,
1617 NVME_VIRT_RES_INTERRUPT
= 0x01,
1618 } NvmeVirtualResourceType
;
1620 static inline void _nvme_check_size(void)
1622 QEMU_BUILD_BUG_ON(sizeof(NvmeBar
) != 4096);
1623 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult
) != 4);
1624 QEMU_BUILD_BUG_ON(sizeof(NvmeZonedResult
) != 8);
1625 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe
) != 16);
1626 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange
) != 16);
1627 QEMU_BUILD_BUG_ON(sizeof(NvmeCopySourceRangeFormat0
) != 32);
1628 QEMU_BUILD_BUG_ON(sizeof(NvmeCopySourceRangeFormat1
) != 40);
1629 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd
) != 64);
1630 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ
) != 64);
1631 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq
) != 64);
1632 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq
) != 64);
1633 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify
) != 64);
1634 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd
) != 64);
1635 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd
) != 64);
1636 QEMU_BUILD_BUG_ON(sizeof(NvmeCopyCmd
) != 64);
1637 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType
) != 64);
1638 QEMU_BUILD_BUG_ON(sizeof(NvmeHostBehaviorSupport
) != 512);
1639 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog
) != 64);
1640 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog
) != 512);
1641 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog
) != 512);
1642 QEMU_BUILD_BUG_ON(sizeof(NvmeEffectsLog
) != 4096);
1643 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl
) != 4096);
1644 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlZoned
) != 4096);
1645 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlNvm
) != 4096);
1646 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAF
) != 4);
1647 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAFE
) != 16);
1648 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs
) != 4096);
1649 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsNvm
) != 4096);
1650 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsZoned
) != 4096);
1651 QEMU_BUILD_BUG_ON(sizeof(NvmeSglDescriptor
) != 16);
1652 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsDescr
) != 4);
1653 QEMU_BUILD_BUG_ON(sizeof(NvmeZoneDescr
) != 64);
1654 QEMU_BUILD_BUG_ON(sizeof(NvmeDifTuple
) != 16);
1655 QEMU_BUILD_BUG_ON(sizeof(NvmePriCtrlCap
) != 4096);
1656 QEMU_BUILD_BUG_ON(sizeof(NvmeSecCtrlEntry
) != 32);
1657 QEMU_BUILD_BUG_ON(sizeof(NvmeSecCtrlList
) != 4096);