import os import shutil import hashlib ROOT_DIR = os.getcwd() IMAGE_EXTS = ('.png', '.jpg', '.jpeg', '.gif', '.bmp', '.svg') def get_file_hash(filepath): hash_md5 = hashlib.md5() with open(filepath, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) return hash_md5.hexdigest() def consolidate_assets(): print("Starting Asset Consolidation...") for item in os.listdir(ROOT_DIR): theme_dir = os.path.join(ROOT_DIR, item) if os.path.isdir(theme_dir) and item.startswith("documentacao "): print(f"Processing Theme: {item}") assets_dir = os.path.join(theme_dir, "assets") if not os.path.exists(assets_dir): os.makedirs(assets_dir) print(f" Created assets dir: {assets_dir}") # Walk through the theme directory for root, dirs, files in os.walk(theme_dir): # Skip the assets dir itself to avoid loop or processing already correct files if os.path.abspath(root) == os.path.abspath(assets_dir): continue for file in files: if file.lower().endswith(IMAGE_EXTS): src_path = os.path.join(root, file) dest_path = os.path.join(assets_dir, file) # Case 1: File is not in assets/ if src_path != dest_path: try: if not os.path.exists(dest_path): shutil.move(src_path, dest_path) print(f" Moved: {file} -> assets/") else: # Collision src_hash = get_file_hash(src_path) dest_hash = get_file_hash(dest_path) if src_hash == dest_hash: print(f" Duplicate (Identical): Deleting {src_path}") os.remove(src_path) else: # Different content, rename base, ext = os.path.splitext(file) new_name = f"{base}_unique_{src_hash[:8]}{ext}" new_dest = os.path.join(assets_dir, new_name) shutil.move(src_path, new_dest) print(f" Duplicate (Different): Renamed to {new_name}") # TODO: ideally we would need to grep the MD files and update the link # But for now let's just save the file. except Exception as e: print(f" Error processing {file}: {e}") print("Asset Consolidation Complete.") if __name__ == "__main__": consolidate_assets()