{{ score }}
  # Quick  re-run  on  large  datasets using different ranking criteria on second
rmlint $my_dir
rmlint --replay rmlint.json $my_dir -S MaD

# Merge together previous runs, but prefer the originals to be from b.json  and
# make sure that no files are deleted from b.json:
rmlint --replay a.json // b.json -k

# Search only for duplicates and duplicate directories
rmlint -T "df,dd" .

# Compare files byte-by-byte in current directory:
rmlint -pp .

# Find duplicates with same basename (excluding extension):
rmlint -e

# Do more complex traversal using find(1).
# find all duplicate .so files
find  /usr/lib  -iname  '*.so' -type f | rmlint -

# as above but handles filenames with newline character in them
find /usr/lib -iname '*.so' -type f -print0 | rmlint -0

# compare png files only
find ~/pics -iname '*.png' | ./rmlint -

# Limit file size range to investigate:
rmlint -s 2GB    # Find everything >= 2GB
rmlint -s 0-2GB  # Find everything <  2GB

# Only find writable and executable files:
rmlint --perms wx

# Reflink  on  btrfs, else try to hardlink duplicates to original. If that does
# not work, replace duplicate with a symbolic link:
rmlint -c sh:link

# Inject user-defined command into shell script output:
rmlint -o sh -c sh:cmd='echo "original:" "$2" "is the same as" "$1"'

# Use data as master directory. Find only duplicates in backup that are also in
# data. Do not delete any files in data:
rmlint backup // data --keep-all-tagged --must-match-tagged

# Compare if the directories a b c are equal
rmlint  --equal  a  b  c  && echo "Files are equal" || echo "Files are not equal"

# Test if two files are reflinks
rmlint --is-reflink a b  &&  echo  "Files  are reflinks" || echo "Files are not reflinks"