Hi i have a OMV with 4 Raid 1
md0pX is System
md126 and md127 are to store Data
aditional there is a md1 but it in not avalible i think there is an mismatch by the naming i have 2 of openmediavault:1
Code
root@openmediavault:~# lsblk
NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINT
sda 8:0 0 3.6T 0 disk
└─md1 9:1 0 3.6T 0 raid1
sdb 8:16 0 931.5G 0 disk
└─md126 9:126 0 931.4G 0 raid1 /srv/dev-disk-by-uuid-1a443dd8-7827-4043-9bdd-ff4053bdb16a
sdc 8:32 0 3.6T 0 disk
└─md1 9:1 0 3.6T 0 raid1
sdd 8:48 0 931.5G 0 disk
└─md126 9:126 0 931.4G 0 raid1 /srv/dev-disk-by-uuid-1a443dd8-7827-4043-9bdd-ff4053bdb16a
sde 8:64 0 931.5G 0 disk
└─md127 9:127 0 931.4G 0 raid1 /srv/dev-disk-by-uuid-ffe7557f-f3bc-41be-b8ab-c5b71838e53d
sdf 8:80 0 931.5G 0 disk
└─md127 9:127 0 931.4G 0 raid1 /srv/dev-disk-by-uuid-ffe7557f-f3bc-41be-b8ab-c5b71838e53d
sdg 8:96 0 465.8G 0 disk
└─md0 9:0 0 465.8G 0 raid1
├─md0p1 259:0 0 463.8G 0 part /
├─md0p2 259:1 0 1K 0 part
├─md0p3 259:2 0 999M 0 part /boot
└─md0p5 259:3 0 975M 0 part [SWAP]
sdh 8:112 0 465.8G 0 disk
└─md0 9:0 0 465.8G 0 raid1
├─md0p1 259:0 0 463.8G 0 part /
├─md0p2 259:1 0 1K 0 part
├─md0p3 259:2 0 999M 0 part /boot
└─md0p5 259:3 0 975M 0 part [SWAP]
root@openmediavault:~#
Alles anzeigen
Code
root@openmediavault:~# blkid
/dev/sdd: UUID="e8813293-c748-ef06-59ba-08ababec093c" UUID_SUB="36399272-3004-00b8-b74b-ed173af2494d" LABEL="openmediavault:1" TYPE="linux_raid_member"
/dev/sdb: UUID="e8813293-c748-ef06-59ba-08ababec093c" UUID_SUB="a083e658-407e-64b2-7b1a-a45a0f50d763" LABEL="openmediavault:1" TYPE="linux_raid_member"
/dev/sdc: UUID="42742a45-5aa6-b35c-01ad-5d5d84af9dd1" UUID_SUB="f4c85efc-0313-6318-ddae-22ca70d58759" LABEL="openmediavault:1" TYPE="linux_raid_member"
/dev/sda: UUID="42742a45-5aa6-b35c-01ad-5d5d84af9dd1" UUID_SUB="e6600289-e9a3-a339-bca8-1ab38f0c240e" LABEL="openmediavault:1" TYPE="linux_raid_member"
/dev/sdf: UUID="89fa9f64-c5ed-ca5b-f119-26b9b826a1ae" UUID_SUB="b17c3c7d-e2f6-d89a-9870-909bc77ea5fb" LABEL="openmediavault:0" TYPE="linux_raid_member"
/dev/sdh: UUID="e4729b4d-e3a6-82a4-edd8-d016611227ea" TYPE="linux_raid_member"
/dev/sdg: UUID="e4729b4d-e3a6-82a4-edd8-d016611227ea" TYPE="linux_raid_member"
/dev/sde: UUID="89fa9f64-c5ed-ca5b-f119-26b9b826a1ae" UUID_SUB="168812a8-13a9-ae76-e3be-19abd41a97a1" LABEL="openmediavault:0" TYPE="linux_raid_member"
/dev/md0p1: UUID="60bd3f63-67c0-4ed1-86bf-4a3e0608ea6a" BLOCK_SIZE="4096" TYPE="ext4" PARTUUID="bada8aab-01"
/dev/md0p3: UUID="9fe9e1e3-b5c1-41eb-a384-f395b01446bf" BLOCK_SIZE="1024" TYPE="ext2" PARTUUID="bada8aab-03"
/dev/md0p5: UUID="9901d57c-84de-415c-8be9-60d2b1630a36" TYPE="swap" PARTUUID="bada8aab-05"
/dev/md127: UUID="ffe7557f-f3bc-41be-b8ab-c5b71838e53d" BLOCK_SIZE="4096" TYPE="ext4"
/dev/md126: UUID="1a443dd8-7827-4043-9bdd-ff4053bdb16a" BLOCK_SIZE="4096" TYPE="ext4"
root@openmediavault:~#
Alles anzeigen
Code
root@openmediavault:~# cat /etc/fstab
# /etc/fstab: static file system information.
#
# Use 'blkid' to print the universally unique identifier for a
# device; this may be used with UUID= as a more robust way to name devices
# that works even if disks are added and removed. See fstab(5).
#
# systemd generates mount units based on this file, see systemd.mount(5).
# Please run 'systemctl daemon-reload' after making changes here.
#
# <file system> <mount point> <type> <options> <dump> <pass>
# / was on /dev/md0p1 during installation
UUID=60bd3f63-67c0-4ed1-86bf-4a3e0608ea6a / ext4 errors=remount-ro 0 1
# /boot was on /dev/md0p3 during installation
UUID=9fe9e1e3-b5c1-41eb-a384-f395b01446bf /boot ext2 defaults 0 2
# swap was on /dev/md0p5 during installation
UUID=9901d57c-84de-415c-8be9-60d2b1630a36 none swap sw 0 0
# >>> [openmediavault]
/dev/disk/by-uuid/ffe7557f-f3bc-41be-b8ab-c5b71838e53d /srv/dev-disk-by-uuid-ffe7557f-f3bc-41be-b8ab-c5b71838e53d ext4 defaults,nofail,user_xattr,usrjquota=aquota.user,grpjquota=aquota.group,jqfmt=vfsv0,acl 0 2
/dev/disk/by-uuid/1a443dd8-7827-4043-9bdd-ff4053bdb16a /srv/dev-disk-by-uuid-1a443dd8-7827-4043-9bdd-ff4053bdb16a ext4 defaults,nofail,user_xattr,usrjquota=aquota.user,grpjquota=aquota.group,jqfmt=vfsv0,acl 0 2
# <<< [openmediavault]
root@openmediavault:~#
Alles anzeigen
Code
root@openmediavault:~# mdadm --detail /dev/md0
/dev/md0:
Version : 0.90
Creation Time : Mon Nov 6 00:28:19 2023
Raid Level : raid1
Array Size : 488386496 (465.76 GiB 500.11 GB)
Used Dev Size : 488386496 (465.76 GiB 500.11 GB)
Raid Devices : 2
Total Devices : 2
Preferred Minor : 0
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Fri Nov 24 15:04:50 2023
State : clean
Active Devices : 2
Working Devices : 2
Failed Devices : 0
Spare Devices : 0
Consistency Policy : bitmap
UUID : e4729b4d:e3a682a4:edd8d016:611227ea
Events : 0.1351
Number Major Minor RaidDevice State
0 8 96 0 active sync /dev/sdg
1 8 112 1 active sync /dev/sdh
root@openmediavault:~# mdadm --detail /dev/md1
/dev/md1:
Version : 1.2
Creation Time : Sat Nov 11 21:40:38 2023
Raid Level : raid1
Array Size : 3906886464 (3725.90 GiB 4000.65 GB)
Used Dev Size : 3906886464 (3725.90 GiB 4000.65 GB)
Raid Devices : 2
Total Devices : 2
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Thu Nov 23 22:59:50 2023
State : clean
Active Devices : 2
Working Devices : 2
Failed Devices : 0
Spare Devices : 0
Consistency Policy : bitmap
Name : openmediavault:1 (local to host openmediavault)
UUID : 42742a45:5aa6b35c:01ad5d5d:84af9dd1
Events : 6212
Number Major Minor RaidDevice State
0 8 0 0 active sync /dev/sda
1 8 32 1 active sync /dev/sdc
root@openmediavault:~# mdadm --detail /dev/md126
/dev/md126:
Version : 1.2
Creation Time : Sun Jun 11 03:27:00 2023
Raid Level : raid1
Array Size : 976630464 (931.39 GiB 1000.07 GB)
Used Dev Size : 976630464 (931.39 GiB 1000.07 GB)
Raid Devices : 2
Total Devices : 2
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Fri Nov 24 00:17:33 2023
State : clean
Active Devices : 2
Working Devices : 2
Failed Devices : 0
Spare Devices : 0
Consistency Policy : bitmap
Name : openmediavault:1 (local to host openmediavault)
UUID : e8813293:c748ef06:59ba08ab:abec093c
Events : 6340
Number Major Minor RaidDevice State
2 8 16 0 active sync /dev/sdb
1 8 48 1 active sync /dev/sdd
root@openmediavault:~# mdadm --detail /dev/md127
/dev/md127:
Version : 1.2
Creation Time : Sun Jun 11 03:25:36 2023
Raid Level : raid1
Array Size : 976630464 (931.39 GiB 1000.07 GB)
Used Dev Size : 976630464 (931.39 GiB 1000.07 GB)
Raid Devices : 2
Total Devices : 2
Persistence : Superblock is persistent
Intent Bitmap : Internal
Update Time : Fri Nov 24 00:17:33 2023
State : clean
Active Devices : 2
Working Devices : 2
Failed Devices : 0
Spare Devices : 0
Consistency Policy : bitmap
Name : openmediavault:0 (local to host openmediavault)
UUID : 89fa9f64:c5edca5b:f11926b9:b826a1ae
Events : 2936
Number Major Minor RaidDevice State
0 8 64 0 active sync /dev/sde
1 8 80 1 active sync /dev/sdf
root@openmediavault:~#
Alles anzeigen
Code
root@openmediavault:/dev/disk/by-uuid# ls -al
total 0
drwxr-xr-x 2 root root 160 Nov 24 00:15 .
drwxr-xr-x 5 root root 100 Nov 24 00:15 ..
lrwxrwxrwx 1 root root 11 Nov 24 00:15 1a443dd8-7827-4043-9bdd-ff4053bdb16a -> ../../md126
lrwxrwxrwx 1 root root 11 Nov 24 00:15 60bd3f63-67c0-4ed1-86bf-4a3e0608ea6a -> ../../md0p1
lrwxrwxrwx 1 root root 11 Nov 24 00:15 9901d57c-84de-415c-8be9-60d2b1630a36 -> ../../md0p5
lrwxrwxrwx 1 root root 11 Nov 24 00:15 9fe9e1e3-b5c1-41eb-a384-f395b01446bf -> ../../md0p3
lrwxrwxrwx 1 root root 9 Nov 24 00:15 a13b9b2b-2ab7-4b6d-9518-7a1a4aab7460 -> ../../md1
lrwxrwxrwx 1 root root 11 Nov 24 00:15 ffe7557f-f3bc-41be-b8ab-c5b71838e53d -> ../../md127
root@openmediavault:/dev/disk/by-uuid#
Alles anzeigen
What is hapend and how can i fix this?