Wrong capacity for drive under usage with nvme-cli 2.8.
sarabjotsoit opened this issue · comments
Steps to recreate:
- Power ON the enclosure and connect to the Ubuntu 24.04 host in non-DS mdoe.
- Connect the drives and check nvme list
Observation:
Ubuntu24.04-nvme-cli-2.8: Wrong capacity for drive under usage. Drive is coming as 30TB instead of 3.84TB.
# nvme list
Node Generic SN Model Namespace Usage Format FW Rev
--------------------- --------------------- -------------------- ---------------------------------------- ---------- -------------------------- ---------------- --------
/dev/nvme1n1 /dev/ng1n1 23181L900115 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme2n1 /dev/ng2n1 23181L900096 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme3n1 /dev/ng3n1 23181L900065 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme4n1 /dev/ng4n1 23181L900017 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme5n1 /dev/ng5n1 23181L900044 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme6n1 /dev/ng6n1 23181L900126 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme7n1 /dev/ng7n1 23181L900175 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme8n1 /dev/ng8n1 23181L900163 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
Which version of libnvme are you using? It sound like this one here is missing:
Hi @igaw
Please find the libnvme version::
nvme version
nvme version 2.8 (git 2.8)
libnvme version 1.8 (git 1.8)
Can you post the output from nvme id-ns /dev/nvme1n1
? Thanks
Please find the data:
# nvme list
Node Generic SN Model Namespace Usage Format FW Rev
--------------------- --------------------- -------------------- ---------------------------------------- ---------- -------------------------- ---------------- --------
/dev/nvme10n1 /dev/ng10n1 23181L900125 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme11n1 /dev/ng11n1 23181L900181 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme12n1 /dev/ng12n1 23181L900052 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme13n1 /dev/ng13n1 23181L900064 WUS5EA138ESP7E3 0x1 3.83 TB / 3.83 TB 512 B + 0 B RC610004
/dev/nvme14n1 /dev/ng14n1 23181L900184 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme15n1 /dev/ng15n1 23181L900056 WUS5EA138ESP7E3 0x1 3.83 TB / 3.83 TB 512 B + 0 B RC610004
/dev/nvme16n1 /dev/ng16n1 23181L900020 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme17n1 /dev/ng17n1 23181L900115 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme19n1 /dev/ng19n1 23181L900096 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme1n1 /dev/ng1n1 23181L900089 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme20n1 /dev/ng20n1 23181L900065 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme21n1 /dev/ng21n1 23181L900017 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme22n1 /dev/ng22n1 23181L900044 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme23n1 /dev/ng23n1 23181L900126 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme24n1 /dev/ng24n1 23181L900175 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme25n1 /dev/ng25n1 23181L900163 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme2n1 /dev/ng2n1 23181L900081 WUS5EA138ESP7E3 0x1 3.83 TB / 3.83 TB 512 B + 0 B RC610004
/dev/nvme3n1 /dev/ng3n1 23181L900033 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme4n1 /dev/ng4n1 23181L900165 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme5n1 /dev/ng5n1 23181L900173 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme6n1 /dev/ng6n1 23181L900048 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme7n1 /dev/ng7n1 23181L900167 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme8n1 /dev/ng8n1 23181L900127 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme9n1 /dev/ng9n1 23181L900090 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
# nvme id-ns /dev/nvme1n1
NVME Identify Namespace 1:
nsze : 0x37bb0a04
ncap : 0x37bb0a04
nuse : 0x37bb0a04
nsfeat : 0x10
nlbaf : 3
flbas : 0x2
mc : 0x1
dpc : 0x17
dps : 0
nmic : 0x1
rescap : 0
fpi : 0x80
dlfeat : 9
nawun : 7
nawupf : 7
nacwu : 7
nabsn : 0
nabo : 0
nabspf : 0
noiob : 0
nvmcap : 3830037086208
npwg : 7
npwa : 7
npdg : 32767
npda : 0
nows : 0
mssrl : 0
mcl : 0
msrc : 0
nulbaf : 0
anagrpid: 0
nsattr : 0
nvmsetid: 0
endgid : 0
nguid : 45110000000000000014ee84015e1000
eui64 : 0014ee84015e1000
lbaf 0 : ms:0 lbads:9 rp:0
lbaf 1 : ms:8 lbads:9 rp:0
lbaf 2 : ms:0 lbads:12 rp:0 (in use)
lbaf 3 : ms:64 lbads:12 rp:0
As expected the driver reports the correct settings (nsze, lbaf): (0x37bb0a04 << 12)/1024/1024/1024 ~= 3566
Could you also post the contains of sysfs. On this information libnvme reports the size etc.
# cat /sys/class/nvme/nvme1/nvme1n1/queue/{logical_block_size,physical_block_size}
# cat /sys/class/nvme/nvme1/nvme1n1/size
e.g. for my system I get:
# cat /sys/class/nvme/nvme0/nvme0n1/queue/{logical_block_size,physical_block_size}
512
512
# cat /sys/class/nvme/nvme0/nvme0n1/size
1000215216
Hi, @igaw
Please find the requested data:
# nvme list
Node Generic SN Model Namespace Usage Format FW Rev
--------------------- --------------------- -------------------- ---------------------------------------- ---------- -------------------------- ---------------- --------
/dev/nvme10n1 /dev/ng10n1 23181L900125 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme11n1 /dev/ng11n1 23181L900181 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme12n1 /dev/ng12n1 23181L900052 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme13n1 /dev/ng13n1 23181L900064 WUS5EA138ESP7E3 0x1 3.83 TB / 3.83 TB 512 B + 0 B RC610004
/dev/nvme14n1 /dev/ng14n1 23181L900184 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme15n1 /dev/ng15n1 23181L900056 WUS5EA138ESP7E3 0x1 3.83 TB / 3.83 TB 512 B + 0 B RC610004
/dev/nvme16n1 /dev/ng16n1 23181L900020 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme17n1 /dev/ng17n1 23181L900115 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme19n1 /dev/ng19n1 23181L900096 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme1n1 /dev/ng1n1 23181L900089 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme20n1 /dev/ng20n1 23181L900065 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme21n1 /dev/ng21n1 23181L900017 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme22n1 /dev/ng22n1 23181L900044 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme23n1 /dev/ng23n1 23181L900126 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme24n1 /dev/ng24n1 23181L900175 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme25n1 /dev/ng25n1 23181L900163 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme28n1 /dev/ng28n1 USCOS02823SB0008BC OpenFlex Data24 4213 0x1 68.72 GB / 549.76 GB 4 KiB + 0 B 0.9.224
/dev/nvme2n1 /dev/ng2n1 23181L900081 WUS5EA138ESP7E3 0x1 3.83 TB / 3.83 TB 512 B + 0 B RC610004
/dev/nvme3n1 /dev/ng3n1 23181L900033 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme4n1 /dev/ng4n1 23181L900165 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme54n1 /dev/ng54n1 USCOS02823SB0008AC OpenFlex Data24 4213 0x1 68.72 GB / 549.76 GB 4 KiB + 0 B 0.9.224
/dev/nvme5n1 /dev/ng5n1 23181L900173 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme6n1 /dev/ng6n1 23181L900048 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme7n1 /dev/ng7n1 23181L900167 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme8n1 /dev/ng8n1 23181L900127 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
/dev/nvme9n1 /dev/ng9n1 23181L900090 WUS5EA138ESP7E3 0x1 3.83 TB / 30.64 TB 4 KiB + 0 B RC610004
# nvme id-ns /dev/nvme1n1
NVME Identify Namespace 1:
nsze : 0x37bb0a04
ncap : 0x37bb0a04
nuse : 0x37bb0a04
nsfeat : 0x10
nlbaf : 3
flbas : 0x2
mc : 0x1
dpc : 0x17
dps : 0
nmic : 0x1
rescap : 0
fpi : 0x80
dlfeat : 9
nawun : 7
nawupf : 7
nacwu : 7
nabsn : 0
nabo : 0
nabspf : 0
noiob : 0
nvmcap : 3830037086208
npwg : 7
npwa : 7
npdg : 32767
npda : 0
nows : 0
mssrl : 0
mcl : 0
msrc : 0
nulbaf : 0
anagrpid: 0
nsattr : 0
nvmsetid: 0
endgid : 0
nguid : 45110000000000000014ee84015e1000
eui64 : 0014ee84015e1000
lbaf 0 : ms:0 lbads:9 rp:0
lbaf 1 : ms:8 lbads:9 rp:0
lbaf 2 : ms:0 lbads:12 rp:0 (in use)
lbaf 3 : ms:64 lbads:12 rp:0
# cat /sys/class/nvme/nvme1/nvme1c1n1/queue/{logical_block_size,physical_block_size}
4096
32768
# cat /sys/class/nvme/nvme1/nvme1c1n1/size
7480037408
I think I see the problem. The size
file is in 512 bytes units and libnvme assumes it is in logical_block_size
.
So we should do something like:
lba_count = size / (logical_block_size/512)
@keithbusch do agree with this?
The size is converted by the function nvme_lba_to_sect().
https://github.com/torvalds/linux/blob/master/drivers/nvme/host/nvme.h#L673
(Added)
Seems same from the initial NVMe driver.
https://github.com/torvalds/linux/blame/7be50e9/drivers/block/nvme-core.c#L2027