From ff34f5ee9df8449f200b68b9e62d302e87651cd3 Mon Sep 17 00:00:00 2001 From: "Ira W. Snyder" Date: Wed, 27 Dec 2006 23:28:34 -0800 Subject: [PATCH] [RARSLAVE] Eliminate duplicates when finding deleteable files Use a set() to eliminate duplicates when searching for deletable files. This fixes the OSError that happens when the file cannot be found because it was already deleted. Signed-off-by: Ira W. Snyder --- rarslave.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/rarslave.py b/rarslave.py index bd79779..6ffe57f 100644 --- a/rarslave.py +++ b/rarslave.py @@ -340,7 +340,9 @@ def find_deleteable_files (dir, p2file): DELETE_REGEX = config.get_value ('regular expressions', 'delete_regex') dregex = re.compile (DELETE_REGEX, re.IGNORECASE) - return [f for f in likely if dregex.match (f)] + dfiles = [f for f in likely if dregex.match (f)] + dset = set(dfiles) # to eliminate dupes + return list(dset) def printlist (li): for f in li: -- 2.25.1