Repository for mbEditorPro 2.0
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

904 lines
25 KiB

# • ▌ ▄ ·. ▄▄▄▄· ▄▄▄ .·▄▄▄▄ ▪ ▄▄▄▄▄ ▄▄▄ ▄▄▄·▄▄▄
# ·██ ▐███▪▐█ ▀█▪ ▀▄.▀·██▪ ██ ██ •██ ▪ ▀▄ █· ▐█ ▄█▀▄ █·▪
# ▐█ ▌▐▌▐█·▐█▀▀█▄ ▐▀▀▪▄▐█· ▐█▌▐█· ▐█.▪ ▄█▀▄ ▐▀▀▄ ██▀·▐▀▀▄ ▄█▀▄
# ██ ██▌▐█▌██▄▪▐█ ▐█▄▄▌██. ██ ▐█▌ ▐█▌·▐█▌.▐▌▐█•█▌ ▐█▪·•▐█•█▌▐█▌.▐▌
# ▀▀ █▪▀▀▀·▀▀▀▀ ▀▀▀ ▀▀▀▀▀• ▀▀▀ ▀▀▀ ▀█▄▀▪.▀ ▀ .▀ .▀ ▀ ▀█▄▀▪
# Magicbane Emulator Project © 2013 - 2022
# www.magicbane.com
#
import glob
import io
import json
import os
import sys
import wave
from tqdm import tqdm
from arcane.ArcImage import *
from arcane.ArcMesh import *
from arcane.ArcMotion import *
from arcane.ArcRender import *
from arcane.ArcSkeleton import *
from arcane.ArcSound import *
from arcane.ArcTile import *
from arcane.ArcVisual import *
from arcane.enums.arc_object import *
from arcane.objects import *
from arcane.objects import ArcObj, ArcDoorObject, ArcStaticObject, ArcStructureObject, ArcAssetStructureObject, \
ArcDungeonUnitObject, ArcDungeonExitObject, ArcDungeonStairObject, ArcItem, ArcCharacter
from arcane.util import *
from arcane.zones import *
DUMP_DIRECTORY = 'ARCANE_DUMP'
WORKING_DIRECTORY = os.path.dirname(__file__)
TARGET_DIRECTORY = os.path.join(WORKING_DIRECTORY, DUMP_DIRECTORY)
COBJECTS_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'COBJECTS')
CZONE_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'CZONE')
SOUND_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'SOUND')
TEXTURE_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'TEXTURE')
TERRAIN_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'TERRAIN')
MESH_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'MESH')
VISUAL_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'VISUAL')
MOTION_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'MOTION')
TILE_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'TILE')
SKELETON_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'SKELETON')
RENDER_DIRECTORY = os.path.join(TARGET_DIRECTORY, 'RENDER')
COBJECTS_MAGIC = 0x434c4e54
COBJECTS_MAP = {
OBJECT_TYPE_LIGHT: ArcObj,
OBJECT_TYPE_DOOR: ArcDoorObject,
OBJECT_TYPE_STATIC: ArcStaticObject,
OBJECT_TYPE_STRUCTURE: ArcStructureObject,
OBJECT_TYPE_ASSETSTRUCTURE: ArcAssetStructureObject,
OBJECT_TYPE_DUNGEONUNIT: ArcDungeonUnitObject,
OBJECT_TYPE_DUNGEONEXIT: ArcDungeonExitObject,
OBJECT_TYPE_DUNGEONSTAIR: ArcDungeonStairObject,
OBJECT_TYPE_ITEM: ArcItem,
OBJECT_TYPE_PLAYER: ArcCharacter,
OBJECT_TYPE_MOBILE: ArcCharacter,
OBJECT_TYPE_RUNE: ArcRune,
OBJECT_TYPE_CONTAINER: ArcContainerObject,
OBJECT_TYPE_DEED: ArcDeed,
OBJECT_TYPE_KEY: ArcKey,
OBJECT_TYPE_ASSET: ArcCityAssetTemplate,
OBJECT_TYPE_OBJECT: ArcObj,
}
EDITOR_COMMANDS = [
'pack',
'unpack',
'test'
]
EDITOR_TARGETS = [
'cobjects',
'czones',
'sound',
'texture',
'mesh',
'visual',
'motion',
'tile',
'skeleton',
'terrain',
'render',
'all'
]
def init_cobjects():
for obj_type in COBJECTS_MAP:
directory_path = os.path.join(COBJECTS_DIRECTORY, OBJECT_TYPE_TO_STRING[obj_type])
os.makedirs(directory_path, exist_ok=True)
def init_czones():
directory_path = CZONE_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_sound():
directory_path = SOUND_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_texture():
directory_path = TEXTURE_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_terrain():
directory_path = TERRAIN_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_mesh():
directory_path = MESH_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_visual():
directory_path = VISUAL_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_motion():
directory_path = MOTION_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_tile():
directory_path = TILE_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_skeleton():
directory_path = SKELETON_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def init_render():
directory_path = RENDER_DIRECTORY
os.makedirs(directory_path, exist_ok=True)
def unpack_cobjects():
init_cobjects()
resources = load_cache_file('CObjects.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
in_stream = ResStream(data)
magic = in_stream.read_dword()
obj_type = in_stream.read_dword()
arc_in = COBJECTS_MAP[obj_type]()
filepath = os.path.join(
COBJECTS_DIRECTORY,
OBJECT_TYPE_TO_STRING[obj_type],
f'{res_id:d}.json'
)
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_cobjects():
init_cobjects()
resources = []
for obj_type in COBJECTS_MAP:
directory = os.path.join(
COBJECTS_DIRECTORY,
OBJECT_TYPE_TO_STRING[obj_type]
)
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
out_stream.write_dword(COBJECTS_MAGIC)
out_stream.write_dword(obj_type)
arc_in = COBJECTS_MAP[obj_type]()
arc_in.load_json(json_data)
arc_in.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('CObjects.cache.new', resources)
def test_cobjects():
resources = load_cache_file('CObjects.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
magic = in_stream.read_dword()
o_type = in_stream.read_dword()
out_stream.write_dword(magic)
out_stream.write_dword(o_type)
arc_in = COBJECTS_MAP[o_type]()
arc_out = COBJECTS_MAP[o_type]()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_czones():
init_czones()
resources = load_cache_file('CZone.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_zone = ArcZone()
in_stream = ResStream(data)
filepath = os.path.join(
CZONE_DIRECTORY,
f'{res_id:d}.json'
)
arc_zone.load_binary(in_stream)
parsed = arc_zone.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_czones():
init_czones()
resources = []
directory = CZONE_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
arc_zone = ArcZone()
arc_zone.load_json(json_data)
arc_zone.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('CZone.cache.new', resources)
def test_czones():
resources = load_cache_file('CZone.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcZone()
arc_out = ArcZone()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_sound():
init_sound()
resources = load_cache_file('Sound.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_sound = ArcSound()
in_stream = ResStream(data)
filepath = os.path.join(
SOUND_DIRECTORY,
f'{res_id:d}.wav'
)
arc_sound.load_binary(in_stream)
with open(filepath, 'wb') as fp:
wave_writer = wave.Wave_write(fp)
arc_sound.save_wav(wave_writer)
wave_writer.close()
pBar.update(1)
def pack_sound():
init_sound()
resources = []
directory = SOUND_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.wav')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.wav')[0])
wav_reader = Wave_read(open(filepath, 'rb'))
out_stream = ResStream()
arc_sound = ArcSound()
arc_sound.load_wav(wav_reader)
arc_sound.save_binary(out_stream)
wav_reader.close()
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Sound.cache.new', resources, False)
def test_sound():
resources = load_cache_file('Sound.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcSound()
arc_out = ArcSound()
arc_in.load_binary(in_stream)
temp = io.BytesIO()
writer = wave.Wave_write(temp)
arc_in.save_wav(writer)
temp.seek(0, 0)
reader = wave.Wave_read(temp)
arc_out.load_wav(reader)
reader.close()
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_texture():
init_texture()
print('Decompressing Texture Cache...')
resources = load_cache_file('Textures.cache')
print('Writing images')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
filepath = os.path.join(
TEXTURE_DIRECTORY,
f'{res_id:d}.tga'
)
arc_texture = ArcTexture()
in_stream = ResStream(data)
arc_texture.load_binary(in_stream)
arc_texture.save_img(filepath)
pBar.update(1)
def pack_texture():
init_texture()
resources = []
directory = TEXTURE_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.tga')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.tga')[0])
arc_texture = ArcTexture()
out_stream = ResStream()
arc_texture.load_img(filepath)
arc_texture.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Textures.cache.new', resources)
def test_texture():
resources = load_cache_file('Textures.cache')
temp = 'temp.tga'
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcTexture()
arc_out = ArcTexture()
temp = 'temp.tga'
arc_in.load_binary(in_stream)
arc_in.save_img(temp)
arc_out.load_img(temp)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
try:
os.unlink(temp)
except:
pass
def unpack_terrain():
init_terrain()
resources = load_cache_file('TerrainAlpha.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
filepath = os.path.join(
TERRAIN_DIRECTORY,
f'{res_id:d}.tga'
)
arc_terrain = ArcTerrain()
in_stream = ResStream(data)
arc_terrain.load_binary(in_stream)
arc_terrain.save_img(filepath)
pBar.update(1)
def pack_terrain():
init_terrain()
resources = []
directory = TERRAIN_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.tga')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.tga')[0])
arc_terrain = ArcTerrain()
out_stream = ResStream()
arc_terrain.load_img(filepath)
arc_terrain.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('TerrainAlpha.cache.new', resources)
def test_terrain():
resources = load_cache_file('TerrainAlpha.cache')
temp = 'temp.tga'
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcTerrain()
arc_out = ArcTerrain()
temp = 'temp.tga'
arc_in.load_binary(in_stream)
arc_in.save_img(temp)
arc_out.load_img(temp)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
print()
break
try:
os.unlink(temp)
except:
pass
def unpack_mesh():
init_mesh()
resources = load_cache_file('Mesh.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_mesh = ArcMesh()
in_stream = ResStream(data)
filepath = os.path.join(
MESH_DIRECTORY,
f'{res_id:d}.json'
)
arc_mesh.load_binary(in_stream)
parsed = arc_mesh.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_mesh():
init_mesh()
resources = []
directory = MESH_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
arc_mesh = ArcMesh()
arc_mesh.load_json(json_data)
arc_mesh.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Mesh.cache.new', resources)
def test_mesh():
resources = load_cache_file('Mesh.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcMesh()
arc_out = ArcMesh()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_visual():
init_visual()
resources = load_cache_file('Visual.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_visual = ArcVisual()
in_stream = ResStream(data)
filepath = os.path.join(
VISUAL_DIRECTORY,
f'{res_id:d}.json'
)
arc_visual.load_binary(in_stream)
parsed = arc_visual.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_visual():
init_visual()
resources = []
directory = VISUAL_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
arc_visual = ArcVisual()
arc_visual.load_json(json_data)
arc_visual.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Visual.cache.new', resources)
def test_visual():
resources = load_cache_file('Visual.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcVisual()
arc_out = ArcVisual()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_motion():
init_motion()
resources = load_cache_file('Motion.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_motion = ArcMotion()
in_stream = ResStream(data)
filepath = os.path.join(
MOTION_DIRECTORY,
f'{res_id:d}.json'
)
arc_motion.load_binary(in_stream)
parsed = arc_motion.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_motion():
init_motion()
resources = []
directory = MOTION_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
arc_motion = ArcMotion()
arc_motion.load_json(json_data)
arc_motion.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Motion.cache.new', resources)
def test_motion():
resources = load_cache_file('Motion.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcMotion()
arc_out = ArcMotion()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_tile():
init_tile()
resources = load_cache_file('Tile.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_tile = ArcTileManager()
in_stream = ResStream(data)
filepath = os.path.join(
TILE_DIRECTORY,
f'{res_id:d}.json'
)
arc_tile.load_binary(in_stream)
parsed = arc_tile.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_tile():
init_tile()
resources = []
directory = TILE_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
arc_tile = ArcTileManager()
arc_tile.load_json(json_data)
arc_tile.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Tile.cache.new', resources)
def test_tile():
resources = load_cache_file('Tile.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcTileManager()
arc_out = ArcTileManager()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_skeleton():
init_skeleton()
resources = load_cache_file('Skeleton.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_skeleton = ArcSkeleton()
in_stream = ResStream(data)
filepath = os.path.join(
SKELETON_DIRECTORY,
f'{res_id:d}.json'
)
arc_skeleton.load_binary(in_stream)
parsed = arc_skeleton.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_skeleton():
init_skeleton()
resources = []
directory = SKELETON_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
arc_skeleton = ArcSkeleton()
arc_skeleton.load_json(json_data)
arc_skeleton.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Skeleton.cache.new', resources)
def test_skeleton():
resources = load_cache_file('Skeleton.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcSkeleton()
arc_out = ArcSkeleton()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_render():
init_render()
resources = load_cache_file('render.cache')
with tqdm(total=len(resources)) as pBar:
for res_id, data in resources:
arc_render = ArcRender()
in_stream = ResStream(data)
filepath = os.path.join(
RENDER_DIRECTORY,
f'{res_id:d}.json'
)
arc_render.load_binary(in_stream)
parsed = arc_render.save_json()
with open(filepath, 'w') as fp:
json.dump(parsed, fp, indent=2)
pBar.update(1)
def pack_render():
init_render()
resources = []
directory = RENDER_DIRECTORY
for filepath in glob.glob(os.path.join(directory, '*.json')):
filename = os.path.basename(filepath)
res_id = int(filename.split('.json')[0])
json_data = json.load(open(filepath))
out_stream = ResStream()
arc_render = ArcRender()
arc_render.load_json(json_data)
arc_render.save_binary(out_stream)
resources.append([res_id, out_stream.get_bytes()])
save_cache_file('Render.cache.new', resources)
def test_render():
resources = load_cache_file('Render.cache')
for res_id, data in resources:
in_stream = ResStream(data)
out_stream = ResStream()
arc_in = ArcRender()
arc_out = ArcRender()
arc_in.load_binary(in_stream)
parsed = arc_in.save_json()
arc_out.load_json(parsed)
arc_out.save_binary(out_stream)
try:
assert in_stream.get_bytes() == out_stream.get_bytes()
except:
print(res_id, in_stream.buffer.tell(), out_stream.buffer.tell())
def unpack_all():
for cache_type in EDITOR_TARGETS[:-1]:
method = 'unpack_' + cache_type + '()'
print(method)
try:
exec(method)
except:
print('***Error: Missing or corrupt ' + cache_type + ' cache.')
def pack_all():
for cache_type in EDITOR_TARGETS[:-1]:
method = 'pack_' + cache_type + '()'
print(method)
try:
exec(method)
except:
print('***Error: Missing or corrupt ' + cache_type + ' cache.')
def test_all():
for cache_type in EDITOR_TARGETS[:-1]:
method = 'test_' + cache_type + '()'
print(method)
try:
exec(method)
except:
print('***Error: Missing or corrupt ' + cache_type + ' cache.')
def usage():
print('_____________mbEditorPro_2.0_______________')
print(' www.magicbane.com | repo.magicbane.com')
print('')
print('Commands: ')
print(*EDITOR_COMMANDS)
print('Targets: ')
print(*EDITOR_TARGETS)
print('')
print('example: unpack texture')
print('example: pack texture')
exit(1)
def main():
if not sys.argv[2:]:
usage()
if sys.argv[1] not in EDITOR_COMMANDS:
usage()
if sys.argv[2] not in EDITOR_TARGETS:
usage()
method = '_'.join(sys.argv[1:3]) + '()'
print(method)
exec(method)
main()