4 typedef struct QEMU_PACKED NvmeBar
{
35 NVME_REG_CAP
= offsetof(NvmeBar
, cap
),
36 NVME_REG_VS
= offsetof(NvmeBar
, vs
),
37 NVME_REG_INTMS
= offsetof(NvmeBar
, intms
),
38 NVME_REG_INTMC
= offsetof(NvmeBar
, intmc
),
39 NVME_REG_CC
= offsetof(NvmeBar
, cc
),
40 NVME_REG_CSTS
= offsetof(NvmeBar
, csts
),
41 NVME_REG_NSSR
= offsetof(NvmeBar
, nssr
),
42 NVME_REG_AQA
= offsetof(NvmeBar
, aqa
),
43 NVME_REG_ASQ
= offsetof(NvmeBar
, asq
),
44 NVME_REG_ACQ
= offsetof(NvmeBar
, acq
),
45 NVME_REG_CMBLOC
= offsetof(NvmeBar
, cmbloc
),
46 NVME_REG_CMBSZ
= offsetof(NvmeBar
, cmbsz
),
47 NVME_REG_BPINFO
= offsetof(NvmeBar
, bpinfo
),
48 NVME_REG_BPRSEL
= offsetof(NvmeBar
, bprsel
),
49 NVME_REG_BPMBL
= offsetof(NvmeBar
, bpmbl
),
50 NVME_REG_CMBMSC
= offsetof(NvmeBar
, cmbmsc
),
51 NVME_REG_CMBSTS
= offsetof(NvmeBar
, cmbsts
),
52 NVME_REG_PMRCAP
= offsetof(NvmeBar
, pmrcap
),
53 NVME_REG_PMRCTL
= offsetof(NvmeBar
, pmrctl
),
54 NVME_REG_PMRSTS
= offsetof(NvmeBar
, pmrsts
),
55 NVME_REG_PMREBS
= offsetof(NvmeBar
, pmrebs
),
56 NVME_REG_PMRSWTP
= offsetof(NvmeBar
, pmrswtp
),
57 NVME_REG_PMRMSCL
= offsetof(NvmeBar
, pmrmscl
),
58 NVME_REG_PMRMSCU
= offsetof(NvmeBar
, pmrmscu
),
69 CAP_MPSMIN_SHIFT
= 48,
70 CAP_MPSMAX_SHIFT
= 52,
76 CAP_MQES_MASK
= 0xffff,
83 CAP_MPSMIN_MASK
= 0xf,
84 CAP_MPSMAX_MASK
= 0xf,
89 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK)
90 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK)
91 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK)
92 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK)
93 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK)
94 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK)
95 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK)
96 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK)
97 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK)
98 #define NVME_CAP_PMRS(cap) (((cap) >> CAP_PMRS_SHIFT) & CAP_PMRS_MASK)
99 #define NVME_CAP_CMBS(cap) (((cap) >> CAP_CMBS_SHIFT) & CAP_CMBS_MASK)
101 #define NVME_CAP_SET_MQES(cap, val) (cap |= (uint64_t)(val & CAP_MQES_MASK) \
103 #define NVME_CAP_SET_CQR(cap, val) (cap |= (uint64_t)(val & CAP_CQR_MASK) \
105 #define NVME_CAP_SET_AMS(cap, val) (cap |= (uint64_t)(val & CAP_AMS_MASK) \
107 #define NVME_CAP_SET_TO(cap, val) (cap |= (uint64_t)(val & CAP_TO_MASK) \
109 #define NVME_CAP_SET_DSTRD(cap, val) (cap |= (uint64_t)(val & CAP_DSTRD_MASK) \
111 #define NVME_CAP_SET_NSSRS(cap, val) (cap |= (uint64_t)(val & CAP_NSSRS_MASK) \
113 #define NVME_CAP_SET_CSS(cap, val) (cap |= (uint64_t)(val & CAP_CSS_MASK) \
115 #define NVME_CAP_SET_MPSMIN(cap, val) (cap |= (uint64_t)(val & CAP_MPSMIN_MASK)\
117 #define NVME_CAP_SET_MPSMAX(cap, val) (cap |= (uint64_t)(val & CAP_MPSMAX_MASK)\
119 #define NVME_CAP_SET_PMRS(cap, val) (cap |= (uint64_t)(val & CAP_PMRS_MASK) \
121 #define NVME_CAP_SET_CMBS(cap, val) (cap |= (uint64_t)(val & CAP_CMBS_MASK) \
125 NVME_CAP_CSS_NVM
= 1 << 0,
126 NVME_CAP_CSS_CSI_SUPP
= 1 << 6,
127 NVME_CAP_CSS_ADMIN_ONLY
= 1 << 7,
136 CC_IOSQES_SHIFT
= 16,
137 CC_IOCQES_SHIFT
= 20,
146 CC_IOSQES_MASK
= 0xf,
147 CC_IOCQES_MASK
= 0xf,
150 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK)
151 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK)
152 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK)
153 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK)
154 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK)
155 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK)
156 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK)
159 NVME_CC_CSS_NVM
= 0x0,
160 NVME_CC_CSS_CSI
= 0x6,
161 NVME_CC_CSS_ADMIN_ONLY
= 0x7,
164 #define NVME_SET_CC_EN(cc, val) \
165 (cc |= (uint32_t)((val) & CC_EN_MASK) << CC_EN_SHIFT)
166 #define NVME_SET_CC_CSS(cc, val) \
167 (cc |= (uint32_t)((val) & CC_CSS_MASK) << CC_CSS_SHIFT)
168 #define NVME_SET_CC_MPS(cc, val) \
169 (cc |= (uint32_t)((val) & CC_MPS_MASK) << CC_MPS_SHIFT)
170 #define NVME_SET_CC_AMS(cc, val) \
171 (cc |= (uint32_t)((val) & CC_AMS_MASK) << CC_AMS_SHIFT)
172 #define NVME_SET_CC_SHN(cc, val) \
173 (cc |= (uint32_t)((val) & CC_SHN_MASK) << CC_SHN_SHIFT)
174 #define NVME_SET_CC_IOSQES(cc, val) \
175 (cc |= (uint32_t)((val) & CC_IOSQES_MASK) << CC_IOSQES_SHIFT)
176 #define NVME_SET_CC_IOCQES(cc, val) \
177 (cc |= (uint32_t)((val) & CC_IOCQES_MASK) << CC_IOCQES_SHIFT)
183 CSTS_NSSRO_SHIFT
= 4,
189 CSTS_SHST_MASK
= 0x3,
190 CSTS_NSSRO_MASK
= 0x1,
194 NVME_CSTS_READY
= 1 << CSTS_RDY_SHIFT
,
195 NVME_CSTS_FAILED
= 1 << CSTS_CFS_SHIFT
,
196 NVME_CSTS_SHST_NORMAL
= 0 << CSTS_SHST_SHIFT
,
197 NVME_CSTS_SHST_PROGRESS
= 1 << CSTS_SHST_SHIFT
,
198 NVME_CSTS_SHST_COMPLETE
= 2 << CSTS_SHST_SHIFT
,
199 NVME_CSTS_NSSRO
= 1 << CSTS_NSSRO_SHIFT
,
202 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK)
203 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK)
204 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK)
205 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK)
213 AQA_ASQS_MASK
= 0xfff,
214 AQA_ACQS_MASK
= 0xfff,
217 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK)
218 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK)
220 enum NvmeCmblocShift
{
221 CMBLOC_BIR_SHIFT
= 0,
222 CMBLOC_CQMMS_SHIFT
= 3,
223 CMBLOC_CQPDS_SHIFT
= 4,
224 CMBLOC_CDPMLS_SHIFT
= 5,
225 CMBLOC_CDPCILS_SHIFT
= 6,
226 CMBLOC_CDMMMS_SHIFT
= 7,
227 CMBLOC_CQDA_SHIFT
= 8,
228 CMBLOC_OFST_SHIFT
= 12,
231 enum NvmeCmblocMask
{
232 CMBLOC_BIR_MASK
= 0x7,
233 CMBLOC_CQMMS_MASK
= 0x1,
234 CMBLOC_CQPDS_MASK
= 0x1,
235 CMBLOC_CDPMLS_MASK
= 0x1,
236 CMBLOC_CDPCILS_MASK
= 0x1,
237 CMBLOC_CDMMMS_MASK
= 0x1,
238 CMBLOC_CQDA_MASK
= 0x1,
239 CMBLOC_OFST_MASK
= 0xfffff,
242 #define NVME_CMBLOC_BIR(cmbloc) \
243 ((cmbloc >> CMBLOC_BIR_SHIFT) & CMBLOC_BIR_MASK)
244 #define NVME_CMBLOC_CQMMS(cmbloc) \
245 ((cmbloc >> CMBLOC_CQMMS_SHIFT) & CMBLOC_CQMMS_MASK)
246 #define NVME_CMBLOC_CQPDS(cmbloc) \
247 ((cmbloc >> CMBLOC_CQPDS_SHIFT) & CMBLOC_CQPDS_MASK)
248 #define NVME_CMBLOC_CDPMLS(cmbloc) \
249 ((cmbloc >> CMBLOC_CDPMLS_SHIFT) & CMBLOC_CDPMLS_MASK)
250 #define NVME_CMBLOC_CDPCILS(cmbloc) \
251 ((cmbloc >> CMBLOC_CDPCILS_SHIFT) & CMBLOC_CDPCILS_MASK)
252 #define NVME_CMBLOC_CDMMMS(cmbloc) \
253 ((cmbloc >> CMBLOC_CDMMMS_SHIFT) & CMBLOC_CDMMMS_MASK)
254 #define NVME_CMBLOC_CQDA(cmbloc) \
255 ((cmbloc >> CMBLOC_CQDA_SHIFT) & CMBLOC_CQDA_MASK)
256 #define NVME_CMBLOC_OFST(cmbloc) \
257 ((cmbloc >> CMBLOC_OFST_SHIFT) & CMBLOC_OFST_MASK)
259 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \
260 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT)
261 #define NVME_CMBLOC_SET_CQMMS(cmbloc, val) \
262 (cmbloc |= (uint64_t)(val & CMBLOC_CQMMS_MASK) << CMBLOC_CQMMS_SHIFT)
263 #define NVME_CMBLOC_SET_CQPDS(cmbloc, val) \
264 (cmbloc |= (uint64_t)(val & CMBLOC_CQPDS_MASK) << CMBLOC_CQPDS_SHIFT)
265 #define NVME_CMBLOC_SET_CDPMLS(cmbloc, val) \
266 (cmbloc |= (uint64_t)(val & CMBLOC_CDPMLS_MASK) << CMBLOC_CDPMLS_SHIFT)
267 #define NVME_CMBLOC_SET_CDPCILS(cmbloc, val) \
268 (cmbloc |= (uint64_t)(val & CMBLOC_CDPCILS_MASK) << CMBLOC_CDPCILS_SHIFT)
269 #define NVME_CMBLOC_SET_CDMMMS(cmbloc, val) \
270 (cmbloc |= (uint64_t)(val & CMBLOC_CDMMMS_MASK) << CMBLOC_CDMMMS_SHIFT)
271 #define NVME_CMBLOC_SET_CQDA(cmbloc, val) \
272 (cmbloc |= (uint64_t)(val & CMBLOC_CQDA_MASK) << CMBLOC_CQDA_SHIFT)
273 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \
274 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT)
276 #define NVME_CMBMSMC_SET_CRE (cmbmsc, val) \
277 (cmbmsc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBMSC_CRE_SHIFT)
279 enum NvmeCmbszShift
{
282 CMBSZ_LISTS_SHIFT
= 2,
290 CMBSZ_SQS_MASK
= 0x1,
291 CMBSZ_CQS_MASK
= 0x1,
292 CMBSZ_LISTS_MASK
= 0x1,
293 CMBSZ_RDS_MASK
= 0x1,
294 CMBSZ_WDS_MASK
= 0x1,
295 CMBSZ_SZU_MASK
= 0xf,
296 CMBSZ_SZ_MASK
= 0xfffff,
299 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK)
300 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK)
301 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK)
302 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK)
303 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK)
304 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK)
305 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK)
307 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \
308 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT)
309 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \
310 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT)
311 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \
312 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT)
313 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \
314 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT)
315 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \
316 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT)
317 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \
318 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT)
319 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \
320 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT)
322 #define NVME_CMBSZ_GETSIZE(cmbsz) \
323 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz))))
325 enum NvmeCmbmscShift
{
326 CMBMSC_CRE_SHIFT
= 0,
327 CMBMSC_CMSE_SHIFT
= 1,
328 CMBMSC_CBA_SHIFT
= 12,
331 enum NvmeCmbmscMask
{
332 CMBMSC_CRE_MASK
= 0x1,
333 CMBMSC_CMSE_MASK
= 0x1,
334 CMBMSC_CBA_MASK
= ((1ULL << 52) - 1),
337 #define NVME_CMBMSC_CRE(cmbmsc) \
338 ((cmbmsc >> CMBMSC_CRE_SHIFT) & CMBMSC_CRE_MASK)
339 #define NVME_CMBMSC_CMSE(cmbmsc) \
340 ((cmbmsc >> CMBMSC_CMSE_SHIFT) & CMBMSC_CMSE_MASK)
341 #define NVME_CMBMSC_CBA(cmbmsc) \
342 ((cmbmsc >> CMBMSC_CBA_SHIFT) & CMBMSC_CBA_MASK)
345 #define NVME_CMBMSC_SET_CRE(cmbmsc, val) \
346 (cmbmsc |= (uint64_t)(val & CMBMSC_CRE_MASK) << CMBMSC_CRE_SHIFT)
347 #define NVME_CMBMSC_SET_CMSE(cmbmsc, val) \
348 (cmbmsc |= (uint64_t)(val & CMBMSC_CMSE_MASK) << CMBMSC_CMSE_SHIFT)
349 #define NVME_CMBMSC_SET_CBA(cmbmsc, val) \
350 (cmbmsc |= (uint64_t)(val & CMBMSC_CBA_MASK) << CMBMSC_CBA_SHIFT)
352 enum NvmeCmbstsShift
{
353 CMBSTS_CBAI_SHIFT
= 0,
355 enum NvmeCmbstsMask
{
356 CMBSTS_CBAI_MASK
= 0x1,
359 #define NVME_CMBSTS_CBAI(cmbsts) \
360 ((cmbsts >> CMBSTS_CBAI_SHIFT) & CMBSTS_CBAI_MASK)
362 #define NVME_CMBSTS_SET_CBAI(cmbsts, val) \
363 (cmbsts |= (uint64_t)(val & CMBSTS_CBAI_MASK) << CMBSTS_CBAI_SHIFT)
365 enum NvmePmrcapShift
{
366 PMRCAP_RDS_SHIFT
= 3,
367 PMRCAP_WDS_SHIFT
= 4,
368 PMRCAP_BIR_SHIFT
= 5,
369 PMRCAP_PMRTU_SHIFT
= 8,
370 PMRCAP_PMRWBM_SHIFT
= 10,
371 PMRCAP_PMRTO_SHIFT
= 16,
372 PMRCAP_CMSS_SHIFT
= 24,
375 enum NvmePmrcapMask
{
376 PMRCAP_RDS_MASK
= 0x1,
377 PMRCAP_WDS_MASK
= 0x1,
378 PMRCAP_BIR_MASK
= 0x7,
379 PMRCAP_PMRTU_MASK
= 0x3,
380 PMRCAP_PMRWBM_MASK
= 0xf,
381 PMRCAP_PMRTO_MASK
= 0xff,
382 PMRCAP_CMSS_MASK
= 0x1,
385 #define NVME_PMRCAP_RDS(pmrcap) \
386 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK)
387 #define NVME_PMRCAP_WDS(pmrcap) \
388 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK)
389 #define NVME_PMRCAP_BIR(pmrcap) \
390 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK)
391 #define NVME_PMRCAP_PMRTU(pmrcap) \
392 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK)
393 #define NVME_PMRCAP_PMRWBM(pmrcap) \
394 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK)
395 #define NVME_PMRCAP_PMRTO(pmrcap) \
396 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK)
397 #define NVME_PMRCAP_CMSS(pmrcap) \
398 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK)
400 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \
401 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT)
402 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \
403 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT)
404 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \
405 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT)
406 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \
407 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT)
408 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \
409 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT)
410 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \
411 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT)
412 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \
413 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT)
415 enum NvmePmrctlShift
{
419 enum NvmePmrctlMask
{
420 PMRCTL_EN_MASK
= 0x1,
423 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK)
425 #define NVME_PMRCTL_SET_EN(pmrctl, val) \
426 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT)
428 enum NvmePmrstsShift
{
429 PMRSTS_ERR_SHIFT
= 0,
430 PMRSTS_NRDY_SHIFT
= 8,
431 PMRSTS_HSTS_SHIFT
= 9,
432 PMRSTS_CBAI_SHIFT
= 12,
435 enum NvmePmrstsMask
{
436 PMRSTS_ERR_MASK
= 0xff,
437 PMRSTS_NRDY_MASK
= 0x1,
438 PMRSTS_HSTS_MASK
= 0x7,
439 PMRSTS_CBAI_MASK
= 0x1,
442 #define NVME_PMRSTS_ERR(pmrsts) \
443 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK)
444 #define NVME_PMRSTS_NRDY(pmrsts) \
445 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK)
446 #define NVME_PMRSTS_HSTS(pmrsts) \
447 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK)
448 #define NVME_PMRSTS_CBAI(pmrsts) \
449 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK)
451 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \
452 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT)
453 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \
454 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT)
455 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \
456 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT)
457 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \
458 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT)
460 enum NvmePmrebsShift
{
461 PMREBS_PMRSZU_SHIFT
= 0,
462 PMREBS_RBB_SHIFT
= 4,
463 PMREBS_PMRWBZ_SHIFT
= 8,
466 enum NvmePmrebsMask
{
467 PMREBS_PMRSZU_MASK
= 0xf,
468 PMREBS_RBB_MASK
= 0x1,
469 PMREBS_PMRWBZ_MASK
= 0xffffff,
472 #define NVME_PMREBS_PMRSZU(pmrebs) \
473 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK)
474 #define NVME_PMREBS_RBB(pmrebs) \
475 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK)
476 #define NVME_PMREBS_PMRWBZ(pmrebs) \
477 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK)
479 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \
480 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT)
481 #define NVME_PMREBS_SET_RBB(pmrebs, val) \
482 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT)
483 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \
484 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT)
486 enum NvmePmrswtpShift
{
487 PMRSWTP_PMRSWTU_SHIFT
= 0,
488 PMRSWTP_PMRSWTV_SHIFT
= 8,
491 enum NvmePmrswtpMask
{
492 PMRSWTP_PMRSWTU_MASK
= 0xf,
493 PMRSWTP_PMRSWTV_MASK
= 0xffffff,
496 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \
497 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK)
498 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \
499 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK)
501 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \
502 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT)
503 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \
504 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT)
506 enum NvmePmrmsclShift
{
507 PMRMSCL_CMSE_SHIFT
= 1,
508 PMRMSCL_CBA_SHIFT
= 12,
511 enum NvmePmrmsclMask
{
512 PMRMSCL_CMSE_MASK
= 0x1,
513 PMRMSCL_CBA_MASK
= 0xfffff,
516 #define NVME_PMRMSCL_CMSE(pmrmscl) \
517 ((pmrmscl >> PMRMSCL_CMSE_SHIFT) & PMRMSCL_CMSE_MASK)
518 #define NVME_PMRMSCL_CBA(pmrmscl) \
519 ((pmrmscl >> PMRMSCL_CBA_SHIFT) & PMRMSCL_CBA_MASK)
521 #define NVME_PMRMSCL_SET_CMSE(pmrmscl, val) \
522 (pmrmscl |= (uint32_t)(val & PMRMSCL_CMSE_MASK) << PMRMSCL_CMSE_SHIFT)
523 #define NVME_PMRMSCL_SET_CBA(pmrmscl, val) \
524 (pmrmscl |= (uint32_t)(val & PMRMSCL_CBA_MASK) << PMRMSCL_CBA_SHIFT)
526 enum NvmeSglDescriptorType
{
527 NVME_SGL_DESCR_TYPE_DATA_BLOCK
= 0x0,
528 NVME_SGL_DESCR_TYPE_BIT_BUCKET
= 0x1,
529 NVME_SGL_DESCR_TYPE_SEGMENT
= 0x2,
530 NVME_SGL_DESCR_TYPE_LAST_SEGMENT
= 0x3,
531 NVME_SGL_DESCR_TYPE_KEYED_DATA_BLOCK
= 0x4,
533 NVME_SGL_DESCR_TYPE_VENDOR_SPECIFIC
= 0xf,
536 enum NvmeSglDescriptorSubtype
{
537 NVME_SGL_DESCR_SUBTYPE_ADDRESS
= 0x0,
540 typedef struct QEMU_PACKED NvmeSglDescriptor
{
547 #define NVME_SGL_TYPE(type) ((type >> 4) & 0xf)
548 #define NVME_SGL_SUBTYPE(type) (type & 0xf)
550 typedef union NvmeCmdDptr
{
556 NvmeSglDescriptor sgl
;
561 NVME_PSDT_SGL_MPTR_CONTIGUOUS
= 0x1,
562 NVME_PSDT_SGL_MPTR_SGL
= 0x2,
565 typedef struct QEMU_PACKED NvmeCmd
{
581 #define NVME_CMD_FLAGS_FUSE(flags) (flags & 0x3)
582 #define NVME_CMD_FLAGS_PSDT(flags) ((flags >> 6) & 0x3)
584 enum NvmeAdminCommands
{
585 NVME_ADM_CMD_DELETE_SQ
= 0x00,
586 NVME_ADM_CMD_CREATE_SQ
= 0x01,
587 NVME_ADM_CMD_GET_LOG_PAGE
= 0x02,
588 NVME_ADM_CMD_DELETE_CQ
= 0x04,
589 NVME_ADM_CMD_CREATE_CQ
= 0x05,
590 NVME_ADM_CMD_IDENTIFY
= 0x06,
591 NVME_ADM_CMD_ABORT
= 0x08,
592 NVME_ADM_CMD_SET_FEATURES
= 0x09,
593 NVME_ADM_CMD_GET_FEATURES
= 0x0a,
594 NVME_ADM_CMD_ASYNC_EV_REQ
= 0x0c,
595 NVME_ADM_CMD_ACTIVATE_FW
= 0x10,
596 NVME_ADM_CMD_DOWNLOAD_FW
= 0x11,
597 NVME_ADM_CMD_NS_ATTACHMENT
= 0x15,
598 NVME_ADM_CMD_FORMAT_NVM
= 0x80,
599 NVME_ADM_CMD_SECURITY_SEND
= 0x81,
600 NVME_ADM_CMD_SECURITY_RECV
= 0x82,
603 enum NvmeIoCommands
{
604 NVME_CMD_FLUSH
= 0x00,
605 NVME_CMD_WRITE
= 0x01,
606 NVME_CMD_READ
= 0x02,
607 NVME_CMD_WRITE_UNCOR
= 0x04,
608 NVME_CMD_COMPARE
= 0x05,
609 NVME_CMD_WRITE_ZEROES
= 0x08,
611 NVME_CMD_VERIFY
= 0x0c,
612 NVME_CMD_COPY
= 0x19,
613 NVME_CMD_ZONE_MGMT_SEND
= 0x79,
614 NVME_CMD_ZONE_MGMT_RECV
= 0x7a,
615 NVME_CMD_ZONE_APPEND
= 0x7d,
618 typedef struct QEMU_PACKED NvmeDeleteQ
{
628 typedef struct QEMU_PACKED NvmeCreateCq
{
642 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1)
643 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1)
650 typedef struct QEMU_PACKED NvmeCreateSq
{
664 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1)
665 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3)
670 NVME_SQ_PRIO_URGENT
= 0,
671 NVME_SQ_PRIO_HIGH
= 1,
672 NVME_SQ_PRIO_NORMAL
= 2,
673 NVME_SQ_PRIO_LOW
= 3,
676 typedef struct QEMU_PACKED NvmeIdentify
{
693 typedef struct QEMU_PACKED NvmeRwCmd
{
711 NVME_RW_LR
= 1 << 15,
712 NVME_RW_FUA
= 1 << 14,
713 NVME_RW_DSM_FREQ_UNSPEC
= 0,
714 NVME_RW_DSM_FREQ_TYPICAL
= 1,
715 NVME_RW_DSM_FREQ_RARE
= 2,
716 NVME_RW_DSM_FREQ_READS
= 3,
717 NVME_RW_DSM_FREQ_WRITES
= 4,
718 NVME_RW_DSM_FREQ_RW
= 5,
719 NVME_RW_DSM_FREQ_ONCE
= 6,
720 NVME_RW_DSM_FREQ_PREFETCH
= 7,
721 NVME_RW_DSM_FREQ_TEMP
= 8,
722 NVME_RW_DSM_LATENCY_NONE
= 0 << 4,
723 NVME_RW_DSM_LATENCY_IDLE
= 1 << 4,
724 NVME_RW_DSM_LATENCY_NORM
= 2 << 4,
725 NVME_RW_DSM_LATENCY_LOW
= 3 << 4,
726 NVME_RW_DSM_SEQ_REQ
= 1 << 6,
727 NVME_RW_DSM_COMPRESSED
= 1 << 7,
728 NVME_RW_PIREMAP
= 1 << 9,
729 NVME_RW_PRINFO_PRACT
= 1 << 13,
730 NVME_RW_PRINFO_PRCHK_GUARD
= 1 << 12,
731 NVME_RW_PRINFO_PRCHK_APP
= 1 << 11,
732 NVME_RW_PRINFO_PRCHK_REF
= 1 << 10,
733 NVME_RW_PRINFO_PRCHK_MASK
= 7 << 10,
737 #define NVME_RW_PRINFO(control) ((control >> 10) & 0xf)
740 NVME_PRINFO_PRACT
= 1 << 3,
741 NVME_PRINFO_PRCHK_GUARD
= 1 << 2,
742 NVME_PRINFO_PRCHK_APP
= 1 << 1,
743 NVME_PRINFO_PRCHK_REF
= 1 << 0,
744 NVME_PRINFO_PRCHK_MASK
= 7 << 0,
747 typedef struct QEMU_PACKED NvmeDsmCmd
{
760 NVME_DSMGMT_IDR
= 1 << 0,
761 NVME_DSMGMT_IDW
= 1 << 1,
762 NVME_DSMGMT_AD
= 1 << 2,
765 typedef struct QEMU_PACKED NvmeDsmRange
{
772 NVME_COPY_FORMAT_0
= 0x0,
775 typedef struct QEMU_PACKED NvmeCopyCmd
{
792 typedef struct QEMU_PACKED NvmeCopySourceRange
{
800 } NvmeCopySourceRange
;
802 enum NvmeAsyncEventRequest
{
803 NVME_AER_TYPE_ERROR
= 0,
804 NVME_AER_TYPE_SMART
= 1,
805 NVME_AER_TYPE_NOTICE
= 2,
806 NVME_AER_TYPE_IO_SPECIFIC
= 6,
807 NVME_AER_TYPE_VENDOR_SPECIFIC
= 7,
808 NVME_AER_INFO_ERR_INVALID_DB_REGISTER
= 0,
809 NVME_AER_INFO_ERR_INVALID_DB_VALUE
= 1,
810 NVME_AER_INFO_ERR_DIAG_FAIL
= 2,
811 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR
= 3,
812 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR
= 4,
813 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR
= 5,
814 NVME_AER_INFO_SMART_RELIABILITY
= 0,
815 NVME_AER_INFO_SMART_TEMP_THRESH
= 1,
816 NVME_AER_INFO_SMART_SPARE_THRESH
= 2,
817 NVME_AER_INFO_NOTICE_NS_ATTR_CHANGED
= 0,
820 typedef struct QEMU_PACKED NvmeAerResult
{
827 typedef struct QEMU_PACKED NvmeZonedResult
{
831 typedef struct QEMU_PACKED NvmeCqe
{
840 enum NvmeStatusCodes
{
841 NVME_SUCCESS
= 0x0000,
842 NVME_INVALID_OPCODE
= 0x0001,
843 NVME_INVALID_FIELD
= 0x0002,
844 NVME_CID_CONFLICT
= 0x0003,
845 NVME_DATA_TRAS_ERROR
= 0x0004,
846 NVME_POWER_LOSS_ABORT
= 0x0005,
847 NVME_INTERNAL_DEV_ERROR
= 0x0006,
848 NVME_CMD_ABORT_REQ
= 0x0007,
849 NVME_CMD_ABORT_SQ_DEL
= 0x0008,
850 NVME_CMD_ABORT_FAILED_FUSE
= 0x0009,
851 NVME_CMD_ABORT_MISSING_FUSE
= 0x000a,
852 NVME_INVALID_NSID
= 0x000b,
853 NVME_CMD_SEQ_ERROR
= 0x000c,
854 NVME_INVALID_SGL_SEG_DESCR
= 0x000d,
855 NVME_INVALID_NUM_SGL_DESCRS
= 0x000e,
856 NVME_DATA_SGL_LEN_INVALID
= 0x000f,
857 NVME_MD_SGL_LEN_INVALID
= 0x0010,
858 NVME_SGL_DESCR_TYPE_INVALID
= 0x0011,
859 NVME_INVALID_USE_OF_CMB
= 0x0012,
860 NVME_INVALID_PRP_OFFSET
= 0x0013,
861 NVME_CMD_SET_CMB_REJECTED
= 0x002b,
862 NVME_INVALID_CMD_SET
= 0x002c,
863 NVME_LBA_RANGE
= 0x0080,
864 NVME_CAP_EXCEEDED
= 0x0081,
865 NVME_NS_NOT_READY
= 0x0082,
866 NVME_NS_RESV_CONFLICT
= 0x0083,
867 NVME_FORMAT_IN_PROGRESS
= 0x0084,
868 NVME_INVALID_CQID
= 0x0100,
869 NVME_INVALID_QID
= 0x0101,
870 NVME_MAX_QSIZE_EXCEEDED
= 0x0102,
871 NVME_ACL_EXCEEDED
= 0x0103,
872 NVME_RESERVED
= 0x0104,
873 NVME_AER_LIMIT_EXCEEDED
= 0x0105,
874 NVME_INVALID_FW_SLOT
= 0x0106,
875 NVME_INVALID_FW_IMAGE
= 0x0107,
876 NVME_INVALID_IRQ_VECTOR
= 0x0108,
877 NVME_INVALID_LOG_ID
= 0x0109,
878 NVME_INVALID_FORMAT
= 0x010a,
879 NVME_FW_REQ_RESET
= 0x010b,
880 NVME_INVALID_QUEUE_DEL
= 0x010c,
881 NVME_FID_NOT_SAVEABLE
= 0x010d,
882 NVME_FEAT_NOT_CHANGEABLE
= 0x010e,
883 NVME_FEAT_NOT_NS_SPEC
= 0x010f,
884 NVME_FW_REQ_SUSYSTEM_RESET
= 0x0110,
885 NVME_NS_ALREADY_ATTACHED
= 0x0118,
886 NVME_NS_PRIVATE
= 0x0119,
887 NVME_NS_NOT_ATTACHED
= 0x011a,
888 NVME_NS_CTRL_LIST_INVALID
= 0x011c,
889 NVME_CONFLICTING_ATTRS
= 0x0180,
890 NVME_INVALID_PROT_INFO
= 0x0181,
891 NVME_WRITE_TO_RO
= 0x0182,
892 NVME_CMD_SIZE_LIMIT
= 0x0183,
893 NVME_ZONE_BOUNDARY_ERROR
= 0x01b8,
894 NVME_ZONE_FULL
= 0x01b9,
895 NVME_ZONE_READ_ONLY
= 0x01ba,
896 NVME_ZONE_OFFLINE
= 0x01bb,
897 NVME_ZONE_INVALID_WRITE
= 0x01bc,
898 NVME_ZONE_TOO_MANY_ACTIVE
= 0x01bd,
899 NVME_ZONE_TOO_MANY_OPEN
= 0x01be,
900 NVME_ZONE_INVAL_TRANSITION
= 0x01bf,
901 NVME_WRITE_FAULT
= 0x0280,
902 NVME_UNRECOVERED_READ
= 0x0281,
903 NVME_E2E_GUARD_ERROR
= 0x0282,
904 NVME_E2E_APP_ERROR
= 0x0283,
905 NVME_E2E_REF_ERROR
= 0x0284,
906 NVME_CMP_FAILURE
= 0x0285,
907 NVME_ACCESS_DENIED
= 0x0286,
911 NVME_NO_COMPLETE
= 0xffff,
914 typedef struct QEMU_PACKED NvmeFwSlotInfoLog
{
916 uint8_t reserved1
[7];
924 uint8_t reserved2
[448];
927 typedef struct QEMU_PACKED NvmeErrorLog
{
928 uint64_t error_count
;
931 uint16_t status_field
;
932 uint16_t param_error_location
;
939 typedef struct QEMU_PACKED NvmeSmartLog
{
940 uint8_t critical_warning
;
941 uint16_t temperature
;
942 uint8_t available_spare
;
943 uint8_t available_spare_threshold
;
944 uint8_t percentage_used
;
945 uint8_t reserved1
[26];
946 uint64_t data_units_read
[2];
947 uint64_t data_units_written
[2];
948 uint64_t host_read_commands
[2];
949 uint64_t host_write_commands
[2];
950 uint64_t controller_busy_time
[2];
951 uint64_t power_cycles
[2];
952 uint64_t power_on_hours
[2];
953 uint64_t unsafe_shutdowns
[2];
954 uint64_t media_errors
[2];
955 uint64_t number_of_error_log_entries
[2];
956 uint8_t reserved2
[320];
959 #define NVME_SMART_WARN_MAX 6
961 NVME_SMART_SPARE
= 1 << 0,
962 NVME_SMART_TEMPERATURE
= 1 << 1,
963 NVME_SMART_RELIABILITY
= 1 << 2,
964 NVME_SMART_MEDIA_READ_ONLY
= 1 << 3,
965 NVME_SMART_FAILED_VOLATILE_MEDIA
= 1 << 4,
966 NVME_SMART_PMR_UNRELIABLE
= 1 << 5,
969 typedef struct NvmeEffectsLog
{
976 NVME_CMD_EFF_CSUPP
= 1 << 0,
977 NVME_CMD_EFF_LBCC
= 1 << 1,
978 NVME_CMD_EFF_NCC
= 1 << 2,
979 NVME_CMD_EFF_NIC
= 1 << 3,
980 NVME_CMD_EFF_CCC
= 1 << 4,
981 NVME_CMD_EFF_CSE_MASK
= 3 << 16,
982 NVME_CMD_EFF_UUID_SEL
= 1 << 19,
985 enum NvmeLogIdentifier
{
986 NVME_LOG_ERROR_INFO
= 0x01,
987 NVME_LOG_SMART_INFO
= 0x02,
988 NVME_LOG_FW_SLOT_INFO
= 0x03,
989 NVME_LOG_CHANGED_NSLIST
= 0x04,
990 NVME_LOG_CMD_EFFECTS
= 0x05,
993 typedef struct QEMU_PACKED NvmePSD
{
1005 #define NVME_CONTROLLER_LIST_SIZE 2048
1006 #define NVME_IDENTIFY_DATA_SIZE 4096
1009 NVME_ID_CNS_NS
= 0x00,
1010 NVME_ID_CNS_CTRL
= 0x01,
1011 NVME_ID_CNS_NS_ACTIVE_LIST
= 0x02,
1012 NVME_ID_CNS_NS_DESCR_LIST
= 0x03,
1013 NVME_ID_CNS_CS_NS
= 0x05,
1014 NVME_ID_CNS_CS_CTRL
= 0x06,
1015 NVME_ID_CNS_CS_NS_ACTIVE_LIST
= 0x07,
1016 NVME_ID_CNS_NS_PRESENT_LIST
= 0x10,
1017 NVME_ID_CNS_NS_PRESENT
= 0x11,
1018 NVME_ID_CNS_NS_ATTACHED_CTRL_LIST
= 0x12,
1019 NVME_ID_CNS_CTRL_LIST
= 0x13,
1020 NVME_ID_CNS_CS_NS_PRESENT_LIST
= 0x1a,
1021 NVME_ID_CNS_CS_NS_PRESENT
= 0x1b,
1022 NVME_ID_CNS_IO_COMMAND_SET
= 0x1c,
1025 typedef struct QEMU_PACKED NvmeIdCtrl
{
1041 uint8_t rsvd100
[11];
1044 uint8_t rsvd128
[128];
1059 uint8_t tnvmcap
[16];
1060 uint8_t unvmcap
[16];
1070 uint8_t rsvd332
[180];
1086 uint8_t rsvd540
[228];
1087 uint8_t subnqn
[256];
1088 uint8_t rsvd1024
[1024];
1093 typedef struct NvmeIdCtrlZoned
{
1095 uint8_t rsvd1
[4095];
1098 typedef struct NvmeIdCtrlNvm
{
1105 uint8_t rsvd16
[4080];
1108 enum NvmeIdCtrlOaes
{
1109 NVME_OAES_NS_ATTR
= 1 << 8,
1112 enum NvmeIdCtrlOacs
{
1113 NVME_OACS_SECURITY
= 1 << 0,
1114 NVME_OACS_FORMAT
= 1 << 1,
1115 NVME_OACS_FW
= 1 << 2,
1116 NVME_OACS_NS_MGMT
= 1 << 3,
1119 enum NvmeIdCtrlOncs
{
1120 NVME_ONCS_COMPARE
= 1 << 0,
1121 NVME_ONCS_WRITE_UNCORR
= 1 << 1,
1122 NVME_ONCS_DSM
= 1 << 2,
1123 NVME_ONCS_WRITE_ZEROES
= 1 << 3,
1124 NVME_ONCS_FEATURES
= 1 << 4,
1125 NVME_ONCS_RESRVATIONS
= 1 << 5,
1126 NVME_ONCS_TIMESTAMP
= 1 << 6,
1127 NVME_ONCS_VERIFY
= 1 << 7,
1128 NVME_ONCS_COPY
= 1 << 8,
1131 enum NvmeIdCtrlOcfs
{
1132 NVME_OCFS_COPY_FORMAT_0
= 1 << 0,
1135 enum NvmeIdctrlVwc
{
1136 NVME_VWC_PRESENT
= 1 << 0,
1137 NVME_VWC_NSID_BROADCAST_NO_SUPPORT
= 0 << 1,
1138 NVME_VWC_NSID_BROADCAST_RESERVED
= 1 << 1,
1139 NVME_VWC_NSID_BROADCAST_CTRL_SPEC
= 2 << 1,
1140 NVME_VWC_NSID_BROADCAST_SUPPORT
= 3 << 1,
1143 enum NvmeIdCtrlFrmw
{
1144 NVME_FRMW_SLOT1_RO
= 1 << 0,
1147 enum NvmeIdCtrlLpa
{
1148 NVME_LPA_NS_SMART
= 1 << 0,
1149 NVME_LPA_CSE
= 1 << 1,
1150 NVME_LPA_EXTENDED
= 1 << 2,
1153 enum NvmeIdCtrlCmic
{
1154 NVME_CMIC_MULTI_CTRL
= 1 << 1,
1157 enum NvmeNsAttachmentOperation
{
1158 NVME_NS_ATTACHMENT_ATTACH
= 0x0,
1159 NVME_NS_ATTACHMENT_DETACH
= 0x1,
1162 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf)
1163 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf)
1164 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf)
1165 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf)
1167 #define NVME_CTRL_SGLS_SUPPORT_MASK (0x3 << 0)
1168 #define NVME_CTRL_SGLS_SUPPORT_NO_ALIGN (0x1 << 0)
1169 #define NVME_CTRL_SGLS_SUPPORT_DWORD_ALIGN (0x1 << 1)
1170 #define NVME_CTRL_SGLS_KEYED (0x1 << 2)
1171 #define NVME_CTRL_SGLS_BITBUCKET (0x1 << 16)
1172 #define NVME_CTRL_SGLS_MPTR_CONTIGUOUS (0x1 << 17)
1173 #define NVME_CTRL_SGLS_EXCESS_LENGTH (0x1 << 18)
1174 #define NVME_CTRL_SGLS_MPTR_SGL (0x1 << 19)
1175 #define NVME_CTRL_SGLS_ADDR_OFFSET (0x1 << 20)
1177 #define NVME_ARB_AB(arb) (arb & 0x7)
1178 #define NVME_ARB_AB_NOLIMIT 0x7
1179 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff)
1180 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff)
1181 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff)
1183 #define NVME_INTC_THR(intc) (intc & 0xff)
1184 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff)
1186 #define NVME_INTVC_NOCOALESCING (0x1 << 16)
1188 #define NVME_TEMP_THSEL(temp) ((temp >> 20) & 0x3)
1189 #define NVME_TEMP_THSEL_OVER 0x0
1190 #define NVME_TEMP_THSEL_UNDER 0x1
1192 #define NVME_TEMP_TMPSEL(temp) ((temp >> 16) & 0xf)
1193 #define NVME_TEMP_TMPSEL_COMPOSITE 0x0
1195 #define NVME_TEMP_TMPTH(temp) (temp & 0xffff)
1197 #define NVME_AEC_SMART(aec) (aec & 0xff)
1198 #define NVME_AEC_NS_ATTR(aec) ((aec >> 8) & 0x1)
1199 #define NVME_AEC_FW_ACTIVATION(aec) ((aec >> 9) & 0x1)
1201 #define NVME_ERR_REC_TLER(err_rec) (err_rec & 0xffff)
1202 #define NVME_ERR_REC_DULBE(err_rec) (err_rec & 0x10000)
1204 enum NvmeFeatureIds
{
1205 NVME_ARBITRATION
= 0x1,
1206 NVME_POWER_MANAGEMENT
= 0x2,
1207 NVME_LBA_RANGE_TYPE
= 0x3,
1208 NVME_TEMPERATURE_THRESHOLD
= 0x4,
1209 NVME_ERROR_RECOVERY
= 0x5,
1210 NVME_VOLATILE_WRITE_CACHE
= 0x6,
1211 NVME_NUMBER_OF_QUEUES
= 0x7,
1212 NVME_INTERRUPT_COALESCING
= 0x8,
1213 NVME_INTERRUPT_VECTOR_CONF
= 0x9,
1214 NVME_WRITE_ATOMICITY
= 0xa,
1215 NVME_ASYNCHRONOUS_EVENT_CONF
= 0xb,
1216 NVME_TIMESTAMP
= 0xe,
1217 NVME_COMMAND_SET_PROFILE
= 0x19,
1218 NVME_SOFTWARE_PROGRESS_MARKER
= 0x80,
1219 NVME_FID_MAX
= 0x100,
1222 typedef enum NvmeFeatureCap
{
1223 NVME_FEAT_CAP_SAVE
= 1 << 0,
1224 NVME_FEAT_CAP_NS
= 1 << 1,
1225 NVME_FEAT_CAP_CHANGE
= 1 << 2,
1228 typedef enum NvmeGetFeatureSelect
{
1229 NVME_GETFEAT_SELECT_CURRENT
= 0x0,
1230 NVME_GETFEAT_SELECT_DEFAULT
= 0x1,
1231 NVME_GETFEAT_SELECT_SAVED
= 0x2,
1232 NVME_GETFEAT_SELECT_CAP
= 0x3,
1233 } NvmeGetFeatureSelect
;
1235 #define NVME_GETSETFEAT_FID_MASK 0xff
1236 #define NVME_GETSETFEAT_FID(dw10) (dw10 & NVME_GETSETFEAT_FID_MASK)
1238 #define NVME_GETFEAT_SELECT_SHIFT 8
1239 #define NVME_GETFEAT_SELECT_MASK 0x7
1240 #define NVME_GETFEAT_SELECT(dw10) \
1241 ((dw10 >> NVME_GETFEAT_SELECT_SHIFT) & NVME_GETFEAT_SELECT_MASK)
1243 #define NVME_SETFEAT_SAVE_SHIFT 31
1244 #define NVME_SETFEAT_SAVE_MASK 0x1
1245 #define NVME_SETFEAT_SAVE(dw10) \
1246 ((dw10 >> NVME_SETFEAT_SAVE_SHIFT) & NVME_SETFEAT_SAVE_MASK)
1248 typedef struct QEMU_PACKED NvmeRangeType
{
1258 typedef struct QEMU_PACKED NvmeLBAF
{
1264 typedef struct QEMU_PACKED NvmeLBAFE
{
1270 #define NVME_NSID_BROADCAST 0xffffffff
1272 typedef struct QEMU_PACKED NvmeIdNs
{
1306 uint8_t rsvd192
[192];
1310 typedef struct QEMU_PACKED NvmeIdNsDescr
{
1316 enum NvmeNsIdentifierLength
{
1317 NVME_NIDL_EUI64
= 8,
1318 NVME_NIDL_NGUID
= 16,
1319 NVME_NIDL_UUID
= 16,
1323 enum NvmeNsIdentifierType
{
1324 NVME_NIDT_EUI64
= 0x01,
1325 NVME_NIDT_NGUID
= 0x02,
1326 NVME_NIDT_UUID
= 0x03,
1327 NVME_NIDT_CSI
= 0x04,
1331 NVME_NMIC_NS_SHARED
= 1 << 0,
1335 NVME_CSI_NVM
= 0x00,
1336 NVME_CSI_ZONED
= 0x02,
1339 #define NVME_SET_CSI(vec, csi) (vec |= (uint8_t)(1 << (csi)))
1341 typedef struct QEMU_PACKED NvmeIdNsZoned
{
1348 uint8_t rsvd20
[2796];
1349 NvmeLBAFE lbafe
[16];
1350 uint8_t rsvd3072
[768];
1354 /*Deallocate Logical Block Features*/
1355 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10)
1356 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08)
1358 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7)
1359 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0
1360 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1
1361 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2
1364 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1))
1365 #define NVME_ID_NS_NSFEAT_DULBE(nsfeat) ((nsfeat >> 2) & 0x1)
1366 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1)
1367 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf))
1368 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1)
1369 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1))
1370 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1)
1371 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1)
1372 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1)
1373 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1)
1374 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1))
1375 #define NVME_ID_NS_DPC_TYPE_MASK 0x7
1378 NVME_ID_NS_DPS_TYPE_NONE
= 0,
1379 NVME_ID_NS_DPS_TYPE_1
= 1,
1380 NVME_ID_NS_DPS_TYPE_2
= 2,
1381 NVME_ID_NS_DPS_TYPE_3
= 3,
1382 NVME_ID_NS_DPS_TYPE_MASK
= 0x7,
1383 NVME_ID_NS_DPS_FIRST_EIGHT
= 8,
1386 enum NvmeIdNsFlbas
{
1387 NVME_ID_NS_FLBAS_EXTENDED
= 1 << 4,
1391 NVME_ID_NS_MC_EXTENDED
= 1 << 0,
1392 NVME_ID_NS_MC_SEPARATE
= 1 << 1,
1395 #define NVME_ID_NS_DPS_TYPE(dps) (dps & NVME_ID_NS_DPS_TYPE_MASK)
1397 typedef struct NvmeDifTuple
{
1404 NVME_ZA_FINISHED_BY_CTLR
= 1 << 0,
1405 NVME_ZA_FINISH_RECOMMENDED
= 1 << 1,
1406 NVME_ZA_RESET_RECOMMENDED
= 1 << 2,
1407 NVME_ZA_ZD_EXT_VALID
= 1 << 7,
1410 typedef struct QEMU_PACKED NvmeZoneReportHeader
{
1413 } NvmeZoneReportHeader
;
1415 enum NvmeZoneReceiveAction
{
1416 NVME_ZONE_REPORT
= 0,
1417 NVME_ZONE_REPORT_EXTENDED
= 1,
1420 enum NvmeZoneReportType
{
1421 NVME_ZONE_REPORT_ALL
= 0,
1422 NVME_ZONE_REPORT_EMPTY
= 1,
1423 NVME_ZONE_REPORT_IMPLICITLY_OPEN
= 2,
1424 NVME_ZONE_REPORT_EXPLICITLY_OPEN
= 3,
1425 NVME_ZONE_REPORT_CLOSED
= 4,
1426 NVME_ZONE_REPORT_FULL
= 5,
1427 NVME_ZONE_REPORT_READ_ONLY
= 6,
1428 NVME_ZONE_REPORT_OFFLINE
= 7,
1432 NVME_ZONE_TYPE_RESERVED
= 0x00,
1433 NVME_ZONE_TYPE_SEQ_WRITE
= 0x02,
1436 enum NvmeZoneSendAction
{
1437 NVME_ZONE_ACTION_RSD
= 0x00,
1438 NVME_ZONE_ACTION_CLOSE
= 0x01,
1439 NVME_ZONE_ACTION_FINISH
= 0x02,
1440 NVME_ZONE_ACTION_OPEN
= 0x03,
1441 NVME_ZONE_ACTION_RESET
= 0x04,
1442 NVME_ZONE_ACTION_OFFLINE
= 0x05,
1443 NVME_ZONE_ACTION_SET_ZD_EXT
= 0x10,
1446 typedef struct QEMU_PACKED NvmeZoneDescr
{
1457 typedef enum NvmeZoneState
{
1458 NVME_ZONE_STATE_RESERVED
= 0x00,
1459 NVME_ZONE_STATE_EMPTY
= 0x01,
1460 NVME_ZONE_STATE_IMPLICITLY_OPEN
= 0x02,
1461 NVME_ZONE_STATE_EXPLICITLY_OPEN
= 0x03,
1462 NVME_ZONE_STATE_CLOSED
= 0x04,
1463 NVME_ZONE_STATE_READ_ONLY
= 0x0d,
1464 NVME_ZONE_STATE_FULL
= 0x0e,
1465 NVME_ZONE_STATE_OFFLINE
= 0x0f,
1468 static inline void _nvme_check_size(void)
1470 QEMU_BUILD_BUG_ON(sizeof(NvmeBar
) != 4096);
1471 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult
) != 4);
1472 QEMU_BUILD_BUG_ON(sizeof(NvmeZonedResult
) != 8);
1473 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe
) != 16);
1474 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange
) != 16);
1475 QEMU_BUILD_BUG_ON(sizeof(NvmeCopySourceRange
) != 32);
1476 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd
) != 64);
1477 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ
) != 64);
1478 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq
) != 64);
1479 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq
) != 64);
1480 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify
) != 64);
1481 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd
) != 64);
1482 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd
) != 64);
1483 QEMU_BUILD_BUG_ON(sizeof(NvmeCopyCmd
) != 64);
1484 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType
) != 64);
1485 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog
) != 64);
1486 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog
) != 512);
1487 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog
) != 512);
1488 QEMU_BUILD_BUG_ON(sizeof(NvmeEffectsLog
) != 4096);
1489 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl
) != 4096);
1490 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlZoned
) != 4096);
1491 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrlNvm
) != 4096);
1492 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAF
) != 4);
1493 QEMU_BUILD_BUG_ON(sizeof(NvmeLBAFE
) != 16);
1494 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs
) != 4096);
1495 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsZoned
) != 4096);
1496 QEMU_BUILD_BUG_ON(sizeof(NvmeSglDescriptor
) != 16);
1497 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNsDescr
) != 4);
1498 QEMU_BUILD_BUG_ON(sizeof(NvmeZoneDescr
) != 64);
1499 QEMU_BUILD_BUG_ON(sizeof(NvmeDifTuple
) != 8);