ZFS Administration 2

ZFS Disk Scrubbing

Disk Scrubbing

Finds latent errors while they're still correctable
 ECC memory scrubbing for disks
Verifies the integrity of all data
 Traverses pool metadata to read every copy of every block
 All mirror copies, all RAID-Z parity, and all ditto blocks
 Verifies each copy against its 256-bit checksum
 Self-healing as it goes

bash-3.00# zpool clear pool2 
bash-3.00# zpool status
 pool: pool2
 state: ONLINE
 scrub: scrub completed with 0 errors on Thu Jan 17 17:59:25 2008
config:

NAME STATE READ WRITE CKSUM
 pool2 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t5d0 ONLINE 0 0 0
 c1t6d0 ONLINE 0 0 0
 spares
 c1t4d0 AVAIL

errors: No known data errors

bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool2 69.4M 907M 25.5K /pool2
pool2/home 69.2M 907M 31.5K /pool2/home
pool2/home/profile1 24.5K 907M 24.5K /pool2/home/profile1
pool2/home/profile2 24.5K 907M 24.5K /export/profile5
pool2/home/profile3 49.1M 959K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 22.5K - 49.0M -
pool2/home/profile4 10.0M 907M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 0 - 10.0M -
pool2/home/profile417jan5.12pm 10.1M 907M 10.0M /pool2/home/profile417jan5.12pm
pool2/home/profile417jan5.12pm@17jan5.12pm 22.5K - 10.0M -

bash-3.00# cd pool2/home/profile3
bash-3.00# ls -l
total 100389
-rw------T 1 root root 20971520 Jan 17 16:58 file1
-rw------T 1 root root 20971520 Jan 17 16:58 file2
-rw------T 1 root root 9437184 Jan 17 17:03 file4

bash-3.00# cat > file7
zfs working properly :)
^Z
[1]+ Stopped cat >file7

bash-3.00# ls -l
total 100391
-rw------T 1 root root 20971520 Jan 17 16:58 file1
-rw------T 1 root root 20971520 Jan 17 16:58 file2
-rw------T 1 root root 9437184 Jan 17 17:03 file4
-rw-r--r-- 1 root root 24 Jan 17 18:00 file7

bash-3.00# more file7
zfs working properly :)

bash-3.00# df -h | grep pool
pool2 976M 25K 907M 1% /pool2
pool2/home 976M 31K 907M 1% /pool2/home
pool2/home/profile2 976M 24K 907M 1% /export/profile5
pool2/home/profile417jan5.12pm 976M 10M 907M 2% /pool2/home/profile417jan5.12pm
pool2/home/profile1 976M 24K 907M 1% /pool2/home/profile1
pool2/home/profile4 976M 10M 907M 2% /pool2/home/profile4
pool2/home/profile3 50M 49M 957K 99% /pool2/home/profile3

bash-3.00# prtvtoc /dev/rdsk/c1t5d0
* /dev/rdsk/c1t5d0 partition map
*
* Dimensions:
* 512 bytes/sector
* 2097151 sectors
* 2097084 accessible sectors
*
* Flags:
* 1: unmountable
* 10: read-only
*
* First Sector Last
* Partition Tag Flags Sector Count Sector Mount Directory
 0 4 00 34 2080700 2080733
 8 11 00 2080734 16384 2097117

Note : "ZFS formats the disk using an EFI label to contain a single, large slice"

bash-3.00# prtvtoc /dev/rdsk/c1t6d0
* /dev/rdsk/c1t6d0 partition map
*
* Dimensions:
* 512 bytes/sector
* 2097151 sectors
* 2097084 accessible sectors
*
* Flags:
* 1: unmountable
* 10: read-only
*
* First Sector Last
* Partition Tag Flags Sector Count Sector Mount Directory
 0 4 00 34 2080700 2080733
 8 11 00 2080734 16384 2097117

bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool2 69.4M 907M 25.5K /pool2
pool2/home 69.3M 907M 31.5K /pool2/home
pool2/home/profile1 24.5K 907M 24.5K /pool2/home/profile1
pool2/home/profile2 24.5K 907M 24.5K /export/profile5
pool2/home/profile3 49.1M 958K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 23.5K - 49.0M -
pool2/home/profile4 10.0M 907M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 0 - 10.0M -
pool2/home/profile417jan5.12pm 10.1M 907M 10.0M /pool2/home/profile417jan5.12pm
pool2/home/profile417jan5.12pm@17jan5.12pm 22.5K - 10.0M -

To rename a file system

bash-3.00# zfs rename pool2/home/profile1 pool2/home/profile6
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool2 69.4M 907M 25.5K /pool2
pool2/home 69.3M 907M 31.5K /pool2/home
pool2/home/profile2 24.5K 907M 24.5K /export/profile5
pool2/home/profile3 49.1M 958K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 23.5K - 49.0M -
pool2/home/profile4 10.0M 907M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 0 - 10.0M -
pool2/home/profile417jan5.12pm 10.1M 907M 10.0M /pool2/home/profile417jan5.12pm
pool2/home/profile417jan5.12pm@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile6 24.5K 907M 24.5K /pool2/home/profile6 ["changed"]

bash-3.00# zfs create pool2/home/profile8
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool2 69.4M 907M 25.5K /pool2
pool2/home 69.3M 907M 32.5K /pool2/home
pool2/home/profile2 24.5K 907M 24.5K /export/profile5
pool2/home/profile3 49.1M 958K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 23.5K - 49.0M -
pool2/home/profile4 10.0M 907M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 0 - 10.0M -
pool2/home/profile417jan5.12pm 10.1M 907M 10.0M /pool2/home/profile417jan5.12pm
pool2/home/profile417jan5.12pm@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile6 24.5K 907M 24.5K /pool2/home/profile6
pool2/home/profile8 24.5K 907M 24.5K /pool2/home/profile8

bash-3.00# cd /pool2/home/profile8
bash-3.00# mkfile 2m example1
bash-3.00# mkfile 2m example2
bash-3.00# mkfile 2m example3
bash-3.00# ls -l
total 12303
-rw------T 1 root root 2097152 Jan 17 18:38 example1
-rw------T 1 root root 2097152 Jan 17 18:38 example2
-rw------T 1 root root 2097152 Jan 17 18:38 example3
bash-3.00# cd

bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool2 75.4M 901M 25.5K /pool2
pool2/home 75.3M 901M 32.5K /pool2/home
pool2/home/profile2 24.5K 901M 24.5K /export/profile5
pool2/home/profile3 49.1M 958K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 23.5K - 49.0M -
pool2/home/profile4 10.0M 901M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 0 - 10.0M -
pool2/home/profile417jan5.12pm 10.1M 901M 10.0M /pool2/home/profile417jan5.12pm
pool2/home/profile417jan5.12pm@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile6 24.5K 901M 24.5K /pool2/home/profile6
pool2/home/profile8 6.03M 901M 6.03M /pool2/home/profile8

bash-3.00# zfs snapshot pool2/home/profile8@17jan6.39pm
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool2 75.4M 901M 25.5K /pool2
pool2/home 75.3M 901M 32.5K /pool2/home
pool2/home/profile2 24.5K 901M 24.5K /export/profile5
pool2/home/profile3 49.1M 958K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 23.5K - 49.0M -
pool2/home/profile4 10.0M 901M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 0 - 10.0M -
pool2/home/profile417jan5.12pm 10.1M 901M 10.0M /pool2/home/profile417jan5.12pm
pool2/home/profile417jan5.12pm@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile6 24.5K 901M 24.5K /pool2/home/profile6
pool2/home/profile8 6.03M 901M 6.03M /pool2/home/profile8
pool2/home/profile8@17jan6.39pm 0 - 6.03M -

In ZFS Snapshot
Read-only point-in-time copy of a filesystem
 Instantaneous creation, unlimited number
 No additional space used – blocks copied only when they change
 Accessible through .zfs/snapshot in root of each filesystem

How To Clone a File system

bash-3.00# zfs clone pool2/home/profile8@17jan6.39pm pool2/home/profile8c
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool2 75.4M 901M 25.5K /pool2
pool2/home 75.3M 901M 32.5K /pool2/home
pool2/home/profile2 24.5K 901M 24.5K /export/profile5
pool2/home/profile3 49.1M 958K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 23.5K – 49.0M –
pool2/home/profile4 10.0M 901M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 0 – 10.0M –
pool2/home/profile417jan5.12pm 10.1M 901M 10.0M /pool2/home/profile417jan5.12pm
pool2/home/profile417jan5.12pm@17jan5.12pm 22.5K – 10.0M –
pool2/home/profile6 24.5K 901M 24.5K /pool2/home/profile6
pool2/home/profile8 6.03M 901M 6.03M /pool2/home/profile8
pool2/home/profile8@17jan6.39pm 0 – 6.03M –
pool2/home/profile8c 0 901M 6.03M /pool2/home/profile8c –[Cloned dataset]

Note : Clone can be done only on snap shot
In ZFS Clones
Writable copy of a snapshot
Instantaneous creation, unlimited number
Ideal for storing many private copies of mostly-shared data
Software installations
Workspaces
Diskless clients

bash-3.00# df -h | grep pool
pool2 976M 25K 901M 1% /pool2
pool2/home 976M 33K 901M 1% /pool2/home
pool2/home/profile417jan5.12pm 976M 10M 901M 2% /pool2/home/profile417jan5.12pm
pool2/home/profile2 976M 24K 901M 1% /export/profile5
pool2/home/profile4 976M 10M 901M 2% /pool2/home/profile4
pool2/home/profile3 50M 49M 957K 99% /pool2/home/profile3
pool2/home/profile6 976M 24K 901M 1% /pool2/home/profile6
pool2/home/profile8 976M 6.0M 901M 1% /pool2/home/profile8
pool2/home/profile8c 976M 6.0M 901M 1% /pool2/home/profile8c

bash-3.00# cd /pool2/home/profile8c
bash-3.00# ls -l
total 12303
-rw——T 1 root root 2097152 Jan 17 18:38 example1
-rw——T 1 root root 2097152 Jan 17 18:38 example2
-rw——T 1 root root 2097152 Jan 17 18:38 example3

bash-3.00#

To unmount / mount a file system temporarily

bash-3.00# zfs umount pool2/home/profile6
bash-3.00# df -h | grep pool
pool2 976M 25K 901M 1% /pool2
pool2/home 976M 33K 901M 1% /pool2/home
pool2/home/profile417jan5.12pm 976M 10M 901M 2% 
/pool2/home/profile417jan5.12pm
pool2/home/profile2 976M 24K 901M 1% /export/profile5
pool2/home/profile4 976M 10M 901M 2% /pool2/home/profile4
pool2/home/profile3 50M 49M 957K 99% /pool2/home/profile3
pool2/home/profile8 976M 6.0M 901M 1% /pool2/home/profile8
pool2/home/profile8c 976M 6.0M 901M 1% /pool2/home/profile8c

bash-3.00# zfs mount pool2/home/profile6

bash-3.00# df -h | grep pool
pool2 976M 25K 901M 1% /pool2
pool2/home 976M 33K 901M 1% /pool2/home
pool2/home/profile417jan5.12pm 976M 10M 901M 2% /pool2/home/profile417jan5.12pm
pool2/home/profile2 976M 24K 901M 1% /export/profile5
pool2/home/profile4 976M 10M 901M 2% /pool2/home/profile4
pool2/home/profile3 50M 49M 957K 99% /pool2/home/profile3
pool2/home/profile8 976M 6.0M 901M 1% /pool2/home/profile8
pool2/home/profile8c 976M 6.0M 901M 1% /pool2/home/profile8c
pool2/home/profile6 976M 24K 901M 1% /pool2/home/profile6

bash-3.00# zfs get all pool2/home/profile6
NAME PROPERTY VALUE SOURCE
pool2/home/profile6 type filesystem - 
pool2/home/profile6 creation Thu Jan 17 16:13 2008 - 
pool2/home/profile6 used 24.5K - 
pool2/home/profile6 available 901M - 
pool2/home/profile6 referenced 24.5K - 
pool2/home/profile6 compressratio 1.00x - 
pool2/home/profile6 mounted yes - 
pool2/home/profile6 quota none default 
pool2/home/profile6 reservation none default 
pool2/home/profile6 recordsize 128K default 
pool2/home/profile6 mountpoint /pool2/home/profile6 default 
pool2/home/profile6 sharenfs off default 
pool2/home/profile6 checksum on default 
pool2/home/profile6 compression off default 
pool2/home/profile6 atime on default 
pool2/home/profile6 devices on default 
pool2/home/profile6 exec on default 
pool2/home/profile6 setuid on default 
pool2/home/profile6 readonly off default 
pool2/home/profile6 zoned off default 
pool2/home/profile6 snapdir hidden default 
pool2/home/profile6 aclmode groupmask default 
pool2/home/profile6 aclinherit secure default

How To Set a Value  to ZFS 

bash-3.00# zfs get sharenfs pool2/home/profile6
NAME PROPERTY VALUE SOURCE
pool2/home/profile6 sharenfs off default

bash-3.00# zfs set sharenfs=on pool2/home/profile6

bash-3.00# zfs get sharenfs pool2/home/profile6
NAME PROPERTY VALUE SOURCE
pool2/home/profile6 sharenfs on local


bash-3.00# zpool status
 pool: pool2
 state: ONLINE
 scrub: scrub completed with 0 errors on Thu Jan 17 17:59:25 2008
config:

NAME STATE READ WRITE CKSUM
 pool2 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t5d0 ONLINE 0 0 0
 c1t6d0 ONLINE 0 0 0
 spares
 c1t4d0 AVAIL

errors: No known data errors

Replace disk in pool

bash-3.00# zpool replace pool2 c1t5d0 c1t4d0
bash-3.00# zpool status
 pool: pool2
 state: ONLINE
status: One or more devices is currently being resilvered. The pool will
 continue to function, possibly in a degraded state.
action: Wait for the resilver to complete.
 scrub: resilver in progress, 78.64% done, 0h0m to go
config:

NAME STATE READ WRITE CKSUM
 pool2 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 spare ONLINE 0 0 0
 c1t5d0 ONLINE 0 0 0
 c1t4d0 ONLINE 0 0 0
 c1t6d0 ONLINE 0 0 0
 spares
 c1t4d0 INUSE currently in use

errors: No known data errors

ZPOOL Striping / Mirroring

zpool create pool10 c1t3d0 c1t4d0
bash-3.00# zpool status
 pool: pool10
 state: ONLINE
 scrub: none requested
config:

NAME STATE READ WRITE CKSUM
 pool10 ONLINE 0 0 0
 c1t3d0 ONLINE 0 0 0
 c1t4d0 ONLINE 0 0 0

errors: No known data errors
bash-3.00# zpool list
NAME SIZE USED AVAIL CAP HEALTH ALTROOT
pool10 1.97G 58.5K 1.97G 0% ONLINE -
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool10 54K 1.94G 1.50K /pool10

bash-3.00# zpool create pool5 mirror c1t3d0 c1t4d0
bash-3.00# zpool status
 pool: pool5
 state: ONLINE
 scrub: none requested
config:

NAME STATE READ WRITE CKSUM
 pool5 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t3d0 ONLINE 0 0 0
 c1t4d0 ONLINE 0 0 0

errors: No known data errors
bash-3.00# zpool add pool5 mirror c1t5d0 c1t6d0
bash-3.00# zpool status
 pool: pool5
 state: ONLINE
 scrub: none requested
config:

NAME STATE READ WRITE CKSUM
 pool5 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t3d0 ONLINE 0 0 0
 c1t4d0 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t5d0 ONLINE 0 0 0
 c1t6d0 ONLINE 0 0 0

errors: No known data errors
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool5 170K 1.94G 24.5K /pool5
bash-3.00# zpool list
NAME SIZE USED AVAIL CAP HEALTH ALTROOT
pool5 1.97G 307K 1.97G 0% ONLINE -

Converting from Stripe to Mirror

zpool create pool10 c1t3d0 c1t4d0
bash-3.00# zpool status
 pool: pool10
 state: ONLINE
 scrub: none requested
config:

NAME STATE READ WRITE CKSUM
 pool10 ONLINE 0 0 0
 c1t3d0 ONLINE 0 0 0
 c1t4d0 ONLINE 0 0 0

errors: No known data errors

bash-3.00# zpool attach pool10 c1t3d0 c1t5d0
bash-3.00# zpool status
 pool: pool10
 state: ONLINE
 scrub: resilver completed with 0 errors on Mon Jan 21 21:37:17 2008
config:

NAME STATE READ WRITE CKSUM
 pool10 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t3d0 ONLINE 0 0 0
 c1t5d0 ONLINE 0 0 0
 c1t4d0 ONLINE 0 0 0

errors: No known data errors

bash-3.00# zpool list
NAME SIZE USED AVAIL CAP HEALTH ALTROOT
pool10 1.97G 117K 1.97G 0% ONLINE -
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool10 78K 1.94G 24.5K /pool10
bash-3.00# zpool attach pool10 c1t4d0 c1t6d0
bash-3.00# zpool status
 pool: pool10
 state: ONLINE
 scrub: resilver completed with 0 errors on Mon Jan 21 21:37:41 2008
config:

NAME STATE READ WRITE CKSUM
 pool10 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t3d0 ONLINE 0 0 0
 c1t5d0 ONLINE 0 0 0
 mirror ONLINE 0 0 0
 c1t4d0 ONLINE 0 0 0
 c1t6d0 ONLINE 0 0 0

errors: No known data errors
bash-3.00# zpool list
NAME SIZE USED AVAIL CAP HEALTH ALTROOT
pool10 1.97G 188K 1.97G 0% ONLINE -
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool10 79K 1.94G 24.5K /pool10

How To mount Legacy mountpoint

bash-3.00# zfs create pool2/home/profile50
bash-3.00# zfs list
NAME USED AVAIL REFER MOUNTPOINT
pool1 140K 1.94G 25.5K /pool1
pool1/home 50K 1.94G 25.5K /pool1/home
pool1/home/user1 24.5K 1.94G 24.5K /pool1/home/user1
pool2 69.5M 907M 25.5K /pool2
pool2/home 69.3M 907M 30.5K /pool2/home
pool2/home/profile1 24.5K 907M 24.5K /pool2/home/profile1
pool2/home/profile3 49.0M 982K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 0 - 49.0M -
pool2/home/profile4 10.1M 907M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile417jan 10.1M 907M 10.0M /pool2/home/profile417jan
pool2/home/profile417jan@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile2 24.5K 907M 24.5K /export/profile5
pool2/home/profile50 24.5K 907M 24.5K /pool2/home/profile50

bash-3.00# zfs set mountpoint=legacy /pool2/home/profile50

If mount point is under legacy then we have to put an entry in /etc/vfstab to mount the file system.

ZFS will not take care if the mountpoint is under legacy.

bash-3.00# zfs list

NAME USED AVAIL REFER MOUNTPOINT
pool1 140K 1.94G 25.5K /pool1
pool1/home 50K 1.94G 25.5K /pool1/home
pool1/home/user1 24.5K 1.94G 24.5K /pool1/home/user1
pool2 69.5M 907M 25.5K /pool2
pool2/home 69.3M 907M 30.5K /pool2/home
pool2/home/profile1 24.5K 907M 24.5K /pool2/home/profile1
pool2/home/profile3 49.0M 982K 49.0M /pool2/home/profile3
pool2/home/profile3@jan175.06pm 0 - 49.0M -
pool2/home/profile4 10.1M 907M 10.0M /pool2/home/profile4
pool2/home/profile4@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile417jan 10.1M 907M 10.0M /pool2/home/profile417jan
pool2/home/profile417jan@17jan5.12pm 22.5K - 10.0M -
pool2/home/profile2 24.5K 907M 24.5K /export/profile5
pool2/home/profile50 24.5K 907M 24.5K legacy

bash-3.00# vi /etc/vfstab
#device device mount FS fsck mount mount
#to mount to fsck point type pass at boot options
#
fd - /dev/fd fd - no -
/proc - /proc proc - no -
/dev/dsk/c1t0d0s1 - - swap - no -
/dev/dsk/c1t0d0s0 /dev/rdsk/c1t0d0s0 / ufs 1 no -
/dev/dsk/c1t0d0s4 /dev/rdsk/c1t0d0s4 /var ufs 1 no -
/dev/dsk/c1t0d0s7 /dev/rdsk/c1t0d0s7 /export/home ufs 2 yes -
/devices - /devices devfs - no -
ctfs - /system/contract ctfs - no -
objfs - /system/object objfs - no -
swap - /tmp tmpfs - yes -
pool2/home/profile50 - /profilemount zfs - yes -

bash-3.00# mkdir profilemount

bash-3.00# mountall

bash-3.00# df -h | grep profilemount
pool2/home/profile50 976M 24K 907M 1% /profilemount