From 4baf1a3da1c3a459845e49fa05ba7c95dfc7dc4d Mon Sep 17 00:00:00 2001
From: Rob Swindell <rob@synchro.net>
Date: Wed, 7 Apr 2021 00:09:10 -0700
Subject: [PATCH] Use the new file_size_str() usage for prettier sizes-in-bytes

---
 exec/dupefind.js | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/exec/dupefind.js b/exec/dupefind.js
index e27923f4d0..e8916be594 100755
--- a/exec/dupefind.js
+++ b/exec/dupefind.js
@@ -110,7 +110,7 @@ for(var i in dir_list) {
 }
 
 log("Searching for duplicates in " + total_files + " files (" 
-	+ file_size_str(total_bytes) + " bytes) ...");
+	+ file_size_str(total_bytes, /* unit */1, /* precision */1) + " bytes) ...");
 var dupe = { name: [], hash: []};
 var name_bytes = 0;
 var hash_bytes = 0;
@@ -133,7 +133,7 @@ for(var n in hash) {
 }
 
 if(options.names) {
-	log(dupe.name.length + " duplicate file names (" + file_size_str(name_bytes) + " bytes)");
+	log(dupe.name.length + " duplicate file names (" + file_size_str(name_bytes,1 , 1) + " bytes)");
 	if(options.json)
 		writeln(JSON.stringify(dupe.name, null, 4));
 	else
@@ -141,7 +141,7 @@ if(options.names) {
 }
 if(hash_type) {
 	log(dupe.hash.length + " duplicate file " + hash_type.toUpperCase() + " sums of at least " 
-		+ min_size + " bytes (" + file_size_str(hash_bytes) + " bytes)");
+		+ min_size + " bytes (" + file_size_str(hash_bytes, 1, 1) + " bytes)");
 	if(options.json)
 		writeln(JSON.stringify(dupe.hash, null, 4));
 	else
-- 
GitLab