it is now checking the skins if they have same pixel sizes as the unit images themselves.
(also handles the "OVERLAPPERS" folder - by the means: it ignores it - does not check them)
Code: Select all
from PIL import Image
from os import listdir
from os.path import isfile, join
import json
import os
app_prefix = "AOS"
app_ver = "101"
def get_num_pixels_string(filepath):
width, height = Image.open(filepath).size
#this is commented now as it was a wrong plan to do, all overlappers are now exluded
#if "OVERLAPPERS" in filepath:
# if app_prefix == "AOS" or app_prefix == "AOF":
# height = 64
# else:
# height = 128
return "[" + str(width) + "x" + str(height) + "]"
my_os_sep="/"
path_work="effects/"
fname_output = "id/gen_effect_id.txt"
path_units="units/"
path_skin_packs="gfx/cosm_units/"
path_gfx="gfx/"
fname_unit_id_filename_rel = "id/gen_unit_id.txt"
fname_unit_categories = "id/gen_unit_categories.txt"
fname_unit_builders = "id/gen_unit_builders.json"
fname_unit_directories = "id/gen_dirs.txt"
fname_shopitems = "others/shopconfig.json"
fname_manif = "id/gen_manif.txt"
fname_app = "id/gen_app.txt"
never_duplicate_in_unit_json = [
"unitTypeName","uiDefinition",
"isTechnology","costTurn","hpMax","power","rangeAttack","rangeAttackMin","armorPierce","armorNormal",
"sight","unitSizeCol","unitSizeRow","isUnderConstruction","isFactory","isWalkableThrough",
"powerAccuracyPenalty","powerRange","powerRangePenaltyPercent","attackTypeBurningRock","attackTypeArrow",
"rangeWalk","trnWalkTerrain","convertRange","convertResistance","convertPossibility",
"rangeHeal","healRate","rangeMend","mendRate","canBuild","bonusMendingWhenConstructing","bonusMending",
"bonusHealing","carryCapacity","canBeCarried","isCarriedUnitSurvivesIfIDie",
"isCarrierCanCarryUnderConstruction","isCarrierHidesCarriedUnits","isCarrierLetCarriedUnitsToShoot",
"isCarrierRemovesTerrainDrawbacks","maxActionCount","canOccupyBuilding",
"trnEffectAffectTurnsLeft","trnWeaponEffects","trnTransformTo2","trnTransformOnDie","trnRaces",
"trnCategories","trnSpecUnitActions","trnBuilders","trnBonusList","trnBestAgainstMeWater","trnBestAgainstMeGround"
]
never_duplicate_in_effect_json = [
"effectNameString","effectDefID","effectDefIDRelated",
"trnActionUnitID","trnActionUnitIDForCaster",
"trnIndicatorTextureID","trnButtonTextureID","trnProjectileSpriteID","trnActionTextureID","trnSpotAnimationSpriteID",
"indicatorTextureID","buttonTextureID","projectileSpriteID","actionTextureID","spotAnimationSpriteID",
"chancePercent","lastsTurnNr","effectBehaviour","effectTargets","trnSystemSpecialEffectID","auraRadius",
"isOneTimeSpell","castRangeModifier","cooldownTurnNr",
"specCostNr","specCost","priority"
]
print("Reading App info")
if os.path.exists(fname_manif):
f_app = open(fname_app, 'w')
file1 = open(fname_manif, 'r')
app_lines = file1.readlines()
for app_line in app_lines :
if "android:versionCode=" in app_line:
app_ver=app_line.replace("android:versionCode=", '')
app_ver=app_ver.replace("\"", '')
app_ver=app_ver.replace(" ", '')
if "package=" in app_line:
if "com.zts.ageofmodernwars" in app_line:
app_prefix = "AOMW"
elif "com.zts.ageofstrategy" in app_line:
app_prefix = "AOS"
elif "com.zts.ageoffantasy" in app_line:
app_prefix = "AOF"
elif "com.zts.ageofgalaxy" in app_line:
app_prefix = "AOG"
elif "com.zts.ageofworldwars" in app_line:
app_prefix = "AOW"
elif "com.zts.ageofalder" in app_line:
app_prefix = "AOA"
else:
app_prefix = "UNKNOWN_PREFIX"
f_app.write(app_prefix + ":" + app_ver)
print("Reading App info:" + app_prefix + ":" + app_ver)
else:
print("ERROR " + fname_manif + " not exists")
exit()
print("Effects start")
collected_errors = ""
collected_errors_of_duplicated_effect_TAGs=""
used_effect_names=","
used_effect_ids=","
used_effect_id_max=0
onlyfiles = [f for f in listdir(path_work) if isfile(join(path_work, f)) and f.endswith(".json")]
f_workfile = open(fname_output, 'w')
cnt=0
sum=len(onlyfiles)
for filename in onlyfiles :
with open(path_work+filename) as f_dupl:
file_as_text = f_dupl.read()
for never_duplicate_this in never_duplicate_in_effect_json:
if file_as_text.count('"' + never_duplicate_this + '"') > 1:
collected_errors_of_duplicated_effect_TAGs = collected_errors_of_duplicated_effect_TAGs + "!! Duplicate EFFECT TAG:" + never_duplicate_this + " in:" + filename + "\n"
with open(path_work+filename) as f:
cnt=cnt+1
print(str(cnt)+"/"+str(sum)+ " effect processing:" + filename)
data = json.load(f)
effect_id=str(data['effectDefID'])
effect_name_string=data['effectNameString']
if 'effectDefIDRelated' in data and data['effectDefIDRelated'] is not None and data['effectDefIDRelated'] != 0:
collected_errors = collected_errors + "ERROR effectDefIDRelated must not be used!:" + effect_name_string + "\n"
if ","+effect_name_string+"," in used_effect_names:
collected_errors = collected_errors + "duplicate effect name:" + effect_name_string + "\n"
else:
used_effect_names=used_effect_names+effect_name_string+","
if ","+effect_id+"," in used_effect_ids:
collected_errors = collected_errors + "duplicate effect id:" + effect_id + "\n"
else:
used_effect_ids=used_effect_ids+effect_id+","
if used_effect_id_max < int(effect_id):
used_effect_id_max = int(effect_id)
f_workfile.write(effect_id + "," + effect_name_string + "," + filename + ";" + "\n")
print("\n\n")
print(fname_output + " written\n")
print("Cosmetic pack checks, caching all filenames\n\n")
skin_filenames=","
skin_filenames_n_sizes=","
for root, dirs, files in os.walk(path_skin_packs):
#print("DIR:" + root)
if path_skin_packs != root:
for f in files:
#print(f)
if ".meta" not in f:
skin_filenames=skin_filenames + f + ","
filepath = os.path.join(root, f)
if "OVERLAPPERS" not in filepath:
skin_filenames_n_sizes=skin_filenames_n_sizes + f + get_num_pixels_string(filepath) + ","
#print(skin_filenames_n_sizes)
print("Units start")
onlyfiles = [f for f in listdir(path_units) if isfile(join(path_units, f)) and f.endswith(".json")]
f_unit_id_filename_rel = open(fname_unit_id_filename_rel, 'w')
f_unit_categories = open(fname_unit_categories, 'w')
f_unit_builders = open(fname_unit_builders, 'w')
f_unit_builders.write("{\"list\":[" + "\n")
used_ids=","
used_id_max=0
used_unit_type_names=","
collected_non_jsoned=","
collected_errors_of_duplicatedTAGs=""
cnt=0
sum=len(onlyfiles)
known_non_jsoneds=",EFFECT_INSTANT_SWING_AROUND_EFFECT,EFFECT_ENCHANT_VANISHING,EFFECT_FIRE_ARROWS,EFFECT_FIRE_ARROWS_INSTANT,EFFECT_FIRE_ARROWS_ON_EVERYONE,EFFECT_ENCHANT_VANISHING_SILENT,EFFECT_ENCHANT_POISONED_WEAPON,EFFECT_TRAMPLE,EFFECT_ENCHANT_FIRE_WEAPON,EFFECT_ENCHANT_LIGHTNING_WEAPON,EFFECT_INSTANT_THUNDER_STRIKE,EFFECT_INSTANT_FIREBALL_UNDEAD,EFFECT_INSTANT_FIREBALL_EFFECT,EFFECT_INSTANT_THUNDER_STORM,EFFECT_ENCHANTMENT_CURSED,EFFECT_INSTANT_DRAGON_FIRE,EFFECT_INSTANT_DRAGON_BREATH,EFFECT_INSTANT_BANSHEE_SCREAM,EFFECT_ENCHANT_FEAR,EFFECT_IMMEDIATE_CONVERSION,EFFECT_INSTANT_SACRI_HP_DAMAGE,EFFECT_SLOWING,EFFECT_SLOWING_1,EFFECT_INSTANT_TROLL_ROT,EFFECT_DISARMOR,EFFECT_INSTANT_PAIN_DAMAGER,EFFECT_INSTANT_CASTER_AND_TARGET_DIES,EFFECT_INSTANT_SUMMON_SKELETON,EFFECT_ANIMATE_DEAD,EFFECT_ANIMATE_DEAD_CLOSE,EFFECT_INSTANT_INCREMENT_VANISHING,EFFECT_INSTANT_MAGIC_ARROW,EFFECT_AURA_LIFELINK,EFFECT_ENCHANT_VISION,EFFECT_AURA_VISION_ULIMITED,EFFECT_VISION,EFFECT_ENCHANT_STRENGTHEN,EFFECT_ENCHANT_STRENGTHEN_MYSELF,EFFECT_INSTANT_TRAMPLE,EFFECT_INSTANT_HEAL_3,EFFECT_INSTANT_EAT_CORPSE_TO_HEAL,EFFECT_STRENGTH_1ATTACK,EFFECT_STRENGTH_2ATTACK,EFFECT_STRENGTH_3ATTACK,EFFECT_AURA_STRENGTH_1ATTACK_ULIMITED,EFFECT_AURA_STRENGTH_2ATTACK_ULIMITED,EFFECT_AURA_STRENGTH_3ATTACK_ULIMITED,EFFECT_CAST_AURA_STRENGTH_1ATTACK,EFFECT_AURA_STRENGTH_1ATTACK,EFFECT_CAST_FIRE_WEAPON,EFFECT_CAST_LIGHTNING_WEAPON,EFFECT_ENCHANT_FORESTWALK,"
for filename in onlyfiles :
never_duplicate_in_unit_json
with open(path_units+filename) as f_dupl:
file_as_text = f_dupl.read()
for never_duplicate_this in never_duplicate_in_unit_json:
if file_as_text.count('"' + never_duplicate_this + '"') > 1:
collected_errors_of_duplicatedTAGs = collected_errors_of_duplicatedTAGs + "!! Duplicate UNIT TAG:" + never_duplicate_this + " in:" + filename + "\n"
with open(path_units+filename) as f:
cnt=cnt+1
print(str(cnt)+"/"+str(sum)+ " unit processing:" + filename)
data = json.load(f)
specs=""
races=""
is_fact="N"
is_tech="N"
default_image=""
default_image_imgcolumns="1"
unit_cost="0"
unit_typeid=str(data['unitType'])
unit_typename=data['unitTypeName']
if 'unitStatSheet' in data:
if 'unit' in data['unitStatSheet']:
if 'trnRaces' in data['unitStatSheet']['unit']:
if data['unitStatSheet']['unit']['trnRaces'] is not None:
races=str(data['unitStatSheet']['unit']['trnRaces']).replace("'", "").replace(" ", "").replace("[", "").replace("]", "")
if 'trnSpecUnitActions' in data['unitStatSheet']['unit']:
if data['unitStatSheet']['unit']['trnSpecUnitActions'] is not None:
specs=str(data['unitStatSheet']['unit']['trnSpecUnitActions']).replace("'", "").replace(" ", "").replace("[", "").replace("]", "")
if 'trnWeaponEffects' in data['unitStatSheet']['unit']:
we=data['unitStatSheet']['unit']['trnWeaponEffects']
if we is not None:
for weaponeffect in we:
if 'effOptions' in weaponeffect:
if weaponeffect['effOptions'] is not None:
effects=str(weaponeffect['effOptions']).replace("'", "").replace(" ", "").replace("[", "").replace("]", "")
effectssplit=effects.split(sep=",")
if ('effDefault' not in weaponeffect):
collected_errors = collected_errors + ".........................MISSING trnWeaponEffects/effDefault in:" + unit_typename + "\n"
else:
if (weaponeffect['effDefault'] is None):
collected_errors = collected_errors + ".........................EMPTY trnWeaponEffects/effDefault in:" + unit_typename + "\n"
if ('effDefault' in weaponeffect) and (weaponeffect['effDefault'] is not None):
effect_default_option=str(weaponeffect['effDefault']).replace("'", "").replace(" ", "").replace("[", "").replace("]", "")
effectssplit.append(effect_default_option)
for effectitem in effectssplit:
if ","+effectitem+"," not in used_effect_names:
if ","+effectitem+"," in known_non_jsoneds:
if ","+effectitem+"," not in collected_non_jsoned:
collected_non_jsoned = collected_non_jsoned + effectitem + ","
else:
collected_errors = collected_errors + "MISSING effect definition:" + effectitem + " in:" + unit_typename + "\n"
if 'isTechnology' in data['unitStatSheet']['unit']:
if data['unitStatSheet']['unit']['isTechnology']==True:
is_tech="Y"
if 'isFactory' in data['unitStatSheet']['unit']:
if data['unitStatSheet']['unit']['isFactory']==True:
is_fact="Y"
if 'costTurn' in data['unitStatSheet']['unit']:
unit_cost=str(data['unitStatSheet']['unit']['costTurn'])
if 'uiDefinition' in data:
if 'visuals' in data['uiDefinition']:
if len(data['uiDefinition']['visuals'])>0:
if 'imgName' in data['uiDefinition']['visuals'][0]:
default_image = data['uiDefinition']['visuals'][0]['imgName']
if data['uiDefinition']['visuals'][0]['type'] != "DEFAULT":
collected_errors = collected_errors + "ERROR: 0th visual is not default:" + filename + "\n"
image_only_file_name=my_os_sep+default_image
image_only_file_name=image_only_file_name[image_only_file_name.rfind(my_os_sep)+1:]
if ","+image_only_file_name+"," in skin_filenames:
skin_filenames = skin_filenames.replace(image_only_file_name+",", '')
filewithsize = image_only_file_name + get_num_pixels_string(path_gfx + default_image)
if (","+filewithsize+",") in skin_filenames_n_sizes:
skin_filenames_n_sizes = skin_filenames_n_sizes.replace(filewithsize+",", '')
default_image_imgcolumns = str(data['uiDefinition']['visuals'][0]['imgColumns'])
if len(data['uiDefinition']['visuals'])>1:
if 'imgName' in data['uiDefinition']['visuals'][1]:
default_image1 = data['uiDefinition']['visuals'][1]['imgName']
image_only_file_name=my_os_sep+default_image1
image_only_file_name=image_only_file_name[image_only_file_name.rfind(my_os_sep)+1:]
if ","+image_only_file_name+"," in skin_filenames:
skin_filenames = skin_filenames.replace(image_only_file_name+",", '')
filewithsize = image_only_file_name + get_num_pixels_string(path_gfx + default_image1)
if (","+filewithsize+",") in skin_filenames_n_sizes:
skin_filenames_n_sizes = skin_filenames_n_sizes.replace(filewithsize+",", '')
if ","+unit_typeid+"," in used_ids:
collected_errors = collected_errors + "duplicate id:" + unit_typeid + "\n"
else:
used_ids=used_ids+unit_typeid+","
if used_id_max < int(unit_typeid):
used_id_max = int(unit_typeid)
if ","+unit_typename+"," in used_unit_type_names:
collected_errors = collected_errors + "duplicate type name:" + unit_typename + "\n"
else:
used_unit_type_names=used_unit_type_names+unit_typename+","
f_unit_id_filename_rel.write( unit_typeid + "|" + unit_typename + "|" + filename + "|" + unit_cost + "|" + default_image + "|" + default_image_imgcolumns + "|" + specs + "|" + races + "|" + is_tech + "|" + is_fact + ";" + "\n")
if 'unitStatSheet' in data:
if 'unit' in data['unitStatSheet']:
if 'trnCategories' in data['unitStatSheet']['unit']:
if data['unitStatSheet']['unit']['trnCategories'] is not None:
if str(data['unitStatSheet']['unit']['trnCategories']) != '[]':
f_unit_categories.write(data['unitTypeName'] + ":" + str(data['unitStatSheet']['unit']['trnCategories']) + ";" + "\n")
if 'trnBuilders' in data['unitStatSheet']['unit']:
if data['unitStatSheet']['unit']['trnBuilders'] is not None:
if str(data['unitStatSheet']['unit']['trnBuilders']) != '[]':
if str(data['unitStatSheet']['unit']['trnBuilders']) != '{}':
f_unit_builders.write("{\"unitIdString\":" + "\"" + data['unitTypeName'] + "\"," + "\"trnBuilders\":" + str(data['unitStatSheet']['unit']['trnBuilders']).replace("'", "\"") + "}")
if filename == onlyfiles[-1]:
f_unit_builders.write("\n")
else:
f_unit_builders.write("," + "\n")
f_unit_builders.write("]}" + "\n")
if (len(skin_filenames) > 3):
collected_errors = collected_errors + "MISSING SKINPACK files!:" + skin_filenames + "\n"
if (len(skin_filenames_n_sizes) > 3):
collected_errors = collected_errors + "WRONG SKINPACK file sizes!:" + skin_filenames_n_sizes + "\n"
collected_errors = collected_errors + collected_errors_of_duplicated_effect_TAGs + collected_errors_of_duplicatedTAGs;
print("Directory tree start")
f_unit_directories = open(fname_unit_directories, 'w')
startpath = "."
for root, dirs, files in os.walk(startpath):
#level = root.replace(startpath, '').count(os.sep)
#indent = ' ' * 4 * (level)
#print('{}{}/'.format(indent, os.path.basename(root)))
#subindent = ' ' * 4 * (level + 1)
#print("DIR:" + root)
if "cosm_units" in root or "wallpaper" in root or "cosmetics" in root:
f_unit_directories.write("DIRBREAK:" + "\n")
f_unit_directories.write("DIR:" + root+ "\n")
for d in dirs:
f_unit_directories.write(d + "\n")
for f in files:
#print('{}{}'.format(subindent, f))
#print(f)
if ".meta" not in f:
f_unit_directories.write(f + "\n")
print("Shopitems start")
used_shopids=","
used_shopid_max=0
with open(fname_shopitems) as f:
data = json.load(f)
we=data['shopConfigItems']
for shopitem in we:
currid=str(shopitem['shopItemId'])
if used_shopid_max < int(currid):
used_shopid_max = int(currid)
if ","+currid+"," not in used_shopids:
used_shopids=used_shopids+currid+",";
else:
collected_errors = collected_errors + "DUPLICATED SHOP_ID:" + currid + "\n"
print("\n\n")
print(fname_unit_directories + " written\n")
print(fname_unit_id_filename_rel + " written\n")
print(fname_unit_categories + " written\n")
print(fname_unit_builders + " written\n")
print("Last Shop id:" + str(used_shopid_max))
print("Last effect id:" + str(used_effect_id_max))
print("Last unit id:" + str(used_id_max) + "\n(warning! in many alternatives the biggest ID-s are spec ids - so not the real last one among units...)\n")
if collected_non_jsoned!=",":
print("Error " + str(collected_non_jsoned.count(",") - 1) + " NON JSONED EFFECTS:\n" + collected_non_jsoned + "\n")
print("Errors:\n" + collected_errors)
input("Press Enter to continue...")
(on PC latest python requires this as it has image pixel size checking from now: