Skip to content

Commit

Permalink
Upgrade to Python 3, fix tpwrules#3
Browse files Browse the repository at this point in the history
  • Loading branch information
roccodev committed Jun 26, 2023
1 parent 84ea025 commit 6e98321
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 30 deletions.
56 changes: 28 additions & 28 deletions ARCTool.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,16 @@ def openOutput(f):
try:
return open(f, "wb")
except IOError:
print "Output file could not be opened!"
print("Output file could not be opened!")
exit()

def makedir(dirname):
global quiet
try:
os.mkdir(dirname)
except OSError, e:
except OSError as e:
if quiet == False:
print "WARNING: Directory", dirname, "already exists!"
print("WARNING: Directory", dirname, "already exists!")

class rarc_header_class:
_structformat = ">I4xI16xI8xI4xI8x"
Expand Down Expand Up @@ -106,15 +106,15 @@ def unyaz(input, output):
#shamelessly stolen^W borrowed from yagcd
data_size, = struct.unpack_from(">I", input.read(4)) #uncompressed data size
if list:
print "Uncompressed size:", data_size, "bytes"
print("Uncompressed size:", data_size, "bytes")
return
t = input.read(8) #dummy
srcplace = 0
dstplace = 0
bitsleft = 0
currbyte = 0
if quiet == False:
print "Reading input"
print("Reading input")
src = input.read()
dst = [" "]*data_size
#print len(dst), len(src)
Expand Down Expand Up @@ -160,7 +160,7 @@ def unyaz(input, output):
sys.stdout.flush()
percent = calcpercent
if quiet == False:
print "\nWriting output"
print("\nWriting output")
output.write("".join(dst))

def getNode(index, f, h):
Expand Down Expand Up @@ -194,11 +194,11 @@ def processNode(node, h, f):
nodename = getString(node.filenameOffset + h.stringTableOffset + 0x20, f)
if list == False:
if quiet == False:
print "Processing node", nodename
print("Processing node", nodename)
makedir(nodename)
os.chdir(nodename)
else:
print (" "*depthnum) + nodename + "/"
print((" "*depthnum) + nodename + "/")
depthnum += 1
for i in range(0, node.numFileEntries):
currfile = getFileEntry(node.firstFileEntryOffset + i, h, f)
Expand All @@ -208,10 +208,10 @@ def processNode(node, h, f):
processNode(getNode(currfile.dataOffset, f, h), h, f)
else:
if list:
print (" "*depthnum) + currname, "-", currfile.dataSize
print((" "*depthnum) + currname, "-", currfile.dataSize)
continue
if quiet == False:
print "Dumping", nodename + "/" + currname, " 0%",
print("Dumping", nodename + "/" + currname, " 0%", end=' ')
try:
percent = 0
dest = open(currname, "wb")
Expand All @@ -233,10 +233,10 @@ def processNode(node, h, f):
if percent > 9:
sys.stdout.write("\b")
sys.stdout.write("\b\b100%")
print ""
print("")
dest.close()
except IOError:
print "OMG SOMETHING WENT WRONG!!!!1111!!!!!"
print("OMG SOMETHING WENT WRONG!!!!1111!!!!!")
exit()
if list == False:
os.chdir("..")
Expand All @@ -260,9 +260,9 @@ def get_u8_name(i, g, node):
i.seek(g.string_table + node.name_offset-1)
while True:
t = i.read(1)
if t == "\0":
if t == b"\0":
break
retval.append(t)
retval.append(t.decode("utf8"))
return "".join(retval)

def get_u8_node(i, g, index):
Expand Down Expand Up @@ -295,14 +295,14 @@ def unu8(i, o):
name = get_u8_name(i, g, node)
if list:
if node.type == 0:
print (" "*depthnum) + name, "-", node.fsize, "bytes"
print((" "*depthnum) + name, "-", node.fsize, "bytes")
elif node.type == 0x0100:
print (" "*depthnum) + name + "/"
print((" "*depthnum) + name + "/")
depthnum += 1
depth.append(node.fsize)
elif node.type == 0:
if quiet == False:
print "Dumping file node", name, " 0%",
print("Dumping file node", name, " 0%", end=' ')
i.seek(node.data_offset)
try:
dest = open(name, "wb")
Expand All @@ -317,19 +317,19 @@ def unu8(i, o):
sys.stdout.write("\b\b" + str(calcpercent) + "%")
sys.stdout.flush()
percent = calcpercent
dest.write(f.read(size))
dest.write(i.read(size))
size -= 1024
if quiet == False:
if percent > 9:
sys.stdout.write("\b")
sys.stdout.write("\b\b100%\n")
dest.close()
except IOError:
print "OMG SOMETHING WENT WRONG!!!!!!!111111111!!!!!!!!"
print("OMG SOMETHING WENT WRONG!!!!!!!111111111!!!!!!!!")
exit()
elif node.type == 0x0100:
if quiet == False:
print "Processing node", name
print("Processing node", name)
makedir(name)
os.chdir(name)
depth.append(node.fsize)
Expand Down Expand Up @@ -373,23 +373,23 @@ def main():
try:
f = open(file, "rb")
except IOError:
print "Input file could not be opened!"
print("Input file could not be opened!")
exit()
type = f.read(4)
if type == "Yaz0":
if type == b"Yaz0":
if quiet == False:
print "Yaz0 compressed archive"
print("Yaz0 compressed archive")
unyaz(f, openOutput(of))
elif type == "RARC":
elif type == b"RARC":
if quiet == False:
print "RARC archive"
print("RARC archive")
unrarc(f, of)
elif type == "U\xAA8-":
elif type == b"U\xAA8-":
if quiet == False:
print "U8 archive"
print("U8 archive")
unu8(f, of)
else:
print "Unknown archive type!"
print("Unknown archive type!")
exit()
f.close()

Expand Down
4 changes: 2 additions & 2 deletions readme.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ By tpw_rules

ARCTool is a Python script that can extract the multitude of different formats found in .arc game files. It has support for Yaz0, U8, and RARC, which are all that I have found.
The inspiration for this tool came about when I wrote a RARC extractor and realized that all the files I wanted to extract were U8, but they still had the arc extension.
I have tested it on Mac OS X 10.5.8 with Python 2.5 and 2.6. It should work on other platforms (Windows and Linux) provided Python is properly installed.
It should work on all platforms provided Python is properly installed.
I have confirmed Yaz0 and U8 support to be 100% working.
If you have any trouble with it, message me on IRC (nick is tpw_rules) or leave a note on the talk page.

Expand All @@ -21,7 +21,7 @@ Options:
archive (ignores -q)

Requirements:
Python 2.5 or higher (not Python 3.x however). Get Python for your OS at http://python.org/download/
Python 3.x. Get Python for your OS at http://python.org/download/

THANKS TO

Expand Down

0 comments on commit 6e98321

Please sign in to comment.