0,0 → 1,392 |
#!/usr/bin/env python |
|
# Copyright: Ira W. Snyder (devel@irasnyder.com) |
# Start Date: 2005-10-13 |
# End Date: |
# License: GNU General Public License v2 (or at your option, any later version) |
# |
# Changelog Follows: |
# - 2005-10-13 |
# - Added get_par2_filenames() to parse par2 files |
# - Added the parset object to represent each parset. |
# |
# - 2005-10-14 |
# - Finished the parset object. It will now verify and extract parsets. |
# - Small changes to the parset object. This makes the parjoin part |
# much more reliable. |
# - Added the OptionParser to make this nice to run at the command line. |
# - Made recursiveness an option. |
# - Made start directory an option. |
# - Check for appropriate programs before starting. |
# |
|
################################################################################ |
# REQUIREMENTS: |
# |
# This code requires the programs cfv, par2repair, lxsplit, and rar to be able |
# to function properly. I will attempt to check that these are in your path. |
################################################################################ |
|
################################################################################ |
# Global Variables |
################################################################################ |
WORK_DIR = '~/downloads/usenet' |
################################################################################ |
|
################################################################################ |
# The PAR2 Parser |
# |
# This was stolen from cfv (see http://cfv.sourceforge.net/ for a copy) |
################################################################################ |
|
import struct, errno |
|
# We always want to do crc checks |
docrcchecks = True |
|
def chompnulls(line): |
p = line.find('\0') |
if p < 0: return line |
else: return line[:p] |
|
def get_par2_filenames(filename): |
"""Get all of the filenames that are protected by the par2 |
file given as the filename""" |
|
try: |
file = open(filename, 'rb') |
except: |
print 'Could not open %s' % (filename, ) |
return [] |
|
pkt_header_fmt = '< 8s Q 16s 16s 16s' |
pkt_header_size = struct.calcsize(pkt_header_fmt) |
file_pkt_fmt = '< 16s 16s 16s Q' |
file_pkt_size = struct.calcsize(file_pkt_fmt) |
main_pkt_fmt = '< Q I' |
main_pkt_size = struct.calcsize(main_pkt_fmt) |
|
seen_file_ids = {} |
expected_file_ids = None |
filenames = [] |
|
while 1: |
d = file.read(pkt_header_size) |
if not d: |
break |
|
magic, pkt_len, pkt_md5, set_id, pkt_type = struct.unpack(pkt_header_fmt, d) |
|
if docrcchecks: |
import md5 |
control_md5 = md5.new() |
control_md5.update(d[0x20:]) |
d = file.read(pkt_len - pkt_header_size) |
control_md5.update(d) |
|
if control_md5.digest() != pkt_md5: |
raise EnvironmentError, (errno.EINVAL, \ |
"corrupt par2 file - bad packet hash") |
|
if pkt_type == 'PAR 2.0\0FileDesc': |
if not docrcchecks: |
d = file.read(pkt_len - pkt_header_size) |
|
file_id, file_md5, file_md5_16k, file_size = \ |
struct.unpack(file_pkt_fmt, d[:file_pkt_size]) |
|
if seen_file_ids.get(file_id) is None: |
seen_file_ids[file_id] = 1 |
filename = chompnulls(d[file_pkt_size:]) |
filenames.append(filename) |
|
elif pkt_type == "PAR 2.0\0Main\0\0\0\0": |
if not docrcchecks: |
d = file.read(pkt_len - pkt_header_size) |
|
if expected_file_ids is None: |
expected_file_ids = [] |
slice_size, num_files = struct.unpack(main_pkt_fmt, d[:main_pkt_size]) |
num_nonrecovery = (len(d)-main_pkt_size)/16 - num_files |
|
for i in range(main_pkt_size,main_pkt_size+(num_files+num_nonrecovery)*16,16): |
expected_file_ids.append(d[i:i+16]) |
|
else: |
if not docrcchecks: |
file.seek(pkt_len - pkt_header_size, 1) |
|
if expected_file_ids is None: |
raise EnvironmentError, (errno.EINVAL, \ |
"corrupt or unsupported par2 file - no main packet found") |
|
for id in expected_file_ids: |
if not seen_file_ids.has_key(id): |
raise EnvironmentError, (errno.EINVAL, \ |
"corrupt or unsupported par2 file - " \ |
"expected file description packet not found") |
|
return filenames |
|
################################################################################ |
# The parset object |
# |
# This is an object based representation of a parset, and will verify itself |
# and extract itself, if possible. |
################################################################################ |
|
import os, glob |
|
class parset: |
def __init__(self, par_filename): |
self.parfile = par_filename |
self.extra_pars = [] |
self.files = False |
self.used_parjoin = False |
self.verified = False |
self.extracted = False |
|
def get_filenames(self): |
return get_par2_filenames(parfile) |
|
def all_there(self): |
"""Check if all the files for the parset are present. |
This will help us decide which par2 checker to use first""" |
for f in self.files: |
if not os.path.isfile(f): |
return False |
|
# The files were all there |
return True |
|
def verify(self): |
"""This will verify the parset by the most efficient method first, |
and then move to a slower method if that one fails""" |
|
retval = False #not verified yet |
|
# if all the files are there, try verifying fast |
if self.all_there(): |
retval = self.__fast_verify() |
|
if retval == False: |
# Failed to verify fast, so try it slow, maybe it needs repair |
retval = self.__slow_verify() |
|
# If we've got a video file, maybe we should try to parjoin it |
elif self.__has_video_file(): |
retval = self.__parjoin() |
|
else: #not all there, maybe we can slow-repair |
retval = self.__slow_verify() |
|
self.verified = retval |
return self.verified |
|
def __fast_verify(self): |
retval = os.system('cfv -v -f "%s"' % (self.parfile, )) |
|
if retval == 0: |
return True #success |
|
return False #failure |
|
def __slow_verify(self): |
retval = os.system('par2repair "%s"' % (self.parfile, )) |
|
if retval == 0: |
return True #success |
|
return False #failure |
|
def __parjoin(self): |
retval = os.system('lxsplit -j "%s.001"' % (self.files[0], )) |
|
retval = self.__fast_verify() |
|
if retval == False: |
# Failed to verify fast, so try it slow, maybe it needs repair |
retval = self.__slow_verify() |
|
if retval == False: # failed to verify, so remove the lxsplit created file |
os.remove(self.files[0]) |
|
self.used_parjoin = retval |
self.verified = retval |
return self.verified |
|
def __has_video_file(self): |
for f in self.files: |
if os.path.splitext(f)[1] in ('.avi', '.ogm', '.mkv'): |
return True |
|
return False |
|
def __remove_currentset(self): |
"""Remove all of the files that are extractable, as well as the pars. |
Leave everything else alone""" |
|
if not self.extracted: |
print 'Did not extract yet, not removing currentset' |
return |
|
# remove the main par |
os.remove(self.parfile) |
|
# remove all of the extra pars |
for i in self.extra_pars: |
os.remove(i) |
|
# remove any rars that are associated (leave EVERYTHING else) |
for i in self.files: |
if i[-3:] == 'rar': |
os.remove(i) |
|
# remove any .0?? files (from parjoin) |
if self.used_parjoin: |
for i in os.listdir(os.getcwd()): |
if i != self.files[0] and self.files[0] in i: |
os.remove(i) |
|
# remove any temp repair files |
for i in glob.glob('*.1'): |
os.remove(i) |
|
def __get_extract_file(self): |
"""Find the first extractable file""" |
for i in self.files: |
if os.path.splitext(i)[1] == '.rar': |
return i |
|
return None |
|
def extract(self): |
"""Attempt to extract all of the files related to this parset""" |
if not self.verified: |
self.extracted = False |
print 'Not (successfully) verified, not extracting' |
return False #failed to extract |
|
extract_file = self.__get_extract_file() |
|
if extract_file != None: |
retval = os.system('rar e -o+ "%s"' % (extract_file, )) |
|
if retval != 0: |
print 'Failed to extract' |
self.extracted = False |
return self.extracted |
|
# we extracted ok, so remove the currentset |
self.extracted = True |
self.__remove_currentset() |
|
return self.extracted |
|
|
################################################################################ |
# The rarslave program itself |
################################################################################ |
|
import os, sys, glob |
from optparse import OptionParser |
|
def check_required_progs(): |
"""Check if the required programs are installed""" |
|
shell_not_found = 32512 |
needed = [] |
|
if os.system('cfv --help > /dev/null 2>&1') == shell_not_found: |
needed.append('cfv') |
|
if os.system('par2repair --help > /dev/null 2>&1') == shell_not_found: |
needed.append('par2repair') |
|
if os.system('lxsplit --help > /dev/null 2>&1') == shell_not_found: |
needed.append('lxpsplit') |
|
if os.system('rar --help > /dev/null 2>&1') == shell_not_found: |
needed.append('rar') |
|
if needed: |
for n in needed: |
print 'Needed program "%s" not found in $PATH' % (n, ) |
|
sys.exit(1) |
|
def get_parsets(): |
"""Get a representation of each parset in the current directory, and |
return them as a list of parset instances""" |
|
par2files = glob.glob('*.par2') |
par2files += glob.glob('*.PAR2') |
|
parsets = [] |
|
for i in par2files: |
filenames = get_par2_filenames(i) |
create_new = True |
|
# if we already have an instance for this set, append |
# this par file to the extra_pars field |
for j in parsets: |
if j.files == filenames: |
j.extra_pars.append(i) |
create_new = False |
|
# we haven't seen this set yet, so we'll create it now |
if create_new == True: |
cur = parset(i) |
cur.files = filenames |
parsets.append(cur) |
|
return parsets |
|
def directory_worker(dir): |
"""Attempts to find, verify, and extract every parset in the directory |
given as a parameter""" |
|
cwd = os.getcwd() |
os.chdir(dir) |
|
parsets = get_parsets() |
|
# Verify each parset |
for p in parsets: |
p.verify() |
|
# Attempt to extract each parset |
for p in parsets: |
p.extract() |
|
os.chdir(cwd) |
|
def main(): |
|
# Build the OptionParser |
parser = OptionParser() |
parser.add_option('-n', '--not-recursive', action='store_false', dest='recursive', |
default=True, help="don't run recursively") |
parser.add_option('-d', '--start-dir', dest='work_dir', default=WORK_DIR, |
help='start running at DIR', metavar='DIR') |
|
# Parse the given options |
(options, args) = parser.parse_args() |
|
# Fix up the working directory |
options.work_dir = os.path.abspath(os.path.expanduser(options.work_dir)) |
|
# Check that we have the required programs installed |
check_required_progs() |
|
# Run rarslave! |
if options.recursive: |
for root, dirs, files in os.walk(options.work_dir): |
directory_worker(root) |
else: |
directory_worker(options.work_dir) |
|
if __name__ == '__main__': |
main() |
|
Property changes: |
Added: svn:executable |
## -0,0 +1 ## |
+* |
\ No newline at end of property |
Index: animesorter.dict |
=================================================================== |
--- animesorter.dict (revision 128) |
+++ animesorter.dict (revision 129) |
@@ -1,6 +1,9 @@ |
\[A-Future_-_XD\]_Amaenaide_Yo\!_-_.+_\[.+\].avi = Amaenaideyo |
\[Froth-Bite\]Amaenaideyo_.+\[.+\].avi = Amaenaideyo |
+\[Oyasumi\]_Black_Cat_.+_\[.+\].avi = Black Cat |
\[Lunar\]_Bleach_-_.+_\[.+\].avi = Bleach |
+\[Live-eviL\]_Blood\+_Ep_.+\[.+\].avi = Blood+ |
+\[Lunar\]_Canvas_2_-_.+_\[.+\]\.avi = Canvas 2 |
\[Shinsen-Subs\]_Da_Capo_Second_Season_-_.+_\[.+\].avi = Da Capo Second Season |
\[ah\]_Eien_no_Aseria_-_Aseria_the_Eternal-_.+_\[.+\].avi = Eien no Aseria |
\[Nanashi\]Eureka_[sS]eve[nN]_-_.+_\[.+\].avi = Eureka Seven |
@@ -17,14 +20,19 @@ |
\[ICE_CREAM\]_Ichigo_Mashimaro_-_.+.mkv = Ichigo Mashimaro |
Ichigo_Mashimaro_-_.+.mkv = Ichigo Mashimaro |
Ichigo_Mashimaro-.+_HDTV_720p_\[ICE-CANDY\].mkv = Ichigo Mashimaro |
+\[manhole\]_Ichigo_Mashimaro_.+.mkv = Ichigo Mashimaro/DVD_Versions |
Kaleido_Star_-_.+_\[Freedom-Rise\]_\[.+\].avi = Kaleido Star |
Kamichu\!-.+_HDTV_720p_\[.+\].mkv = Kamichu |
Kamichu\!-.+_FAKEHDTV_720p_\[.+\]\.mkv = Kamichu |
+\[Triad\]_Magical_Girl_Lyrical_Nanoha_A's_-_.+\.avi = Magical Girl Lyrical Nanoha A's |
\[Froth-Bite\]Mahoraba_Heartful_Days_-_.+\[.+\].avi = Mahoraba Heartful Days |
\[AnCo\]Mahou_Sensei_Negima_-_.+_\[.+\].avi = Mahou Sensei Negima |
+\[Doremi-OTOME\].Mai.Otome..+.avi = Mai Otome |
+\[SS\]_Mai-Otome_-_.+_HQ_\[.+\].avi = Mai Otome |
\[DB\]_Naruto_.+_\[.+\].avi = Naruto |
\[yesy\]_Okusama_wa_Joshikousei_-_.+_\[.+\].avi = Okusama wa Joshikosei |
\[K-F\]_One_Piece_.+_\[.+\].avi = One Piece |
+\[K-F\]_One_Piece_.+_\[.+\].mp4 = One Piece |
\[Oyasumi\]_Pani_Poni_Dash_-_.+_\[.+\].avi = Pani Poni Dash |
\[Oyasumi\]_Pani_Poni_Dash\!_.+_\[.+\].avi = Pani Poni Dash |
\[PF\]_Pani_Poni_Dash_-_.+_\[.+\].avi = Pani Poni Dash |
@@ -34,6 +42,7 @@ |
\[WF\]_School_Rumble_-_.+_\[.+\].avi = School Rumble |
\[AnimeU\]_SHUFFLE\!_.+_\[.+\].avi = Shuffle |
\[AonE_A-Kingdom\]_Sousei_no_Aquarion_-_.+_\[.+\].avi = Sousei no Aquarion |
+\[mahou\]_Sousei_no_Aquarion_-_.+_\[.+\]\.avi = Sousei no Aquarion |
\[Ani-Kraze\]_Speed_Grapher_-_.+_\[.+\].avi = Speed Grapher |
\[Shinsen-Subs\]_Speed_Grapher_.+_\[.+\].avi = Speed Grapher |
\[SS\&Y\]_Stratos4_Advance_-_OVA_.+_\[.+\].avi = Stratos4 Advance |
@@ -40,6 +49,7 @@ |
\[Lunar\]_Suzuka_-_.+_\[.+\].avi = Suzuka |
\[Arienai\]_TIDE-LINE_Blue_-_.+_\[.+\].avi = Tide Line Blue |
\[Hell-Fansubs\]_Tide-Line_Blue_.+\[.+\].avi = Tide Line Blue |
+To_Heart_2_-_.+_\[Shinobu\].avi = To Heart 2 |
\[Conclave-Faith\]_To_Heart_Remember_my_Memories_.+_\[.+\].avi = To Heart - Remember my Memories |
\[Keep-Faith\]_To_Heart_Remember_my_Memories_.+_\[.+\].avi = To Heart - Remember my Memories |
Tokyo_Mew_Mew_-_.+_\[h-b\]\[.+\].avi = Tokyo Mew Mew |
@@ -48,4 +58,4 @@ |
\[A-E\]_Yakitate_Japan_.+_\[.+\].avi = Yakitate Japan |
\[Triad\]_Zettai_Shonen_-_.+.avi = Zettai Shonen |
reno\.911\.3.+\.dsr-.+\.avi = /data/Downloads/Reno 911 |
-family\.guy\.5..\.pdtv\.avi = /data/Downloads/Family Guy/Season 5 |
+family\.guy\.5.+\.avi = /data/Downloads/Family Guy/Season 5 |