Package horizons :: Package engine :: Module generate_atlases
[hide private]
[frames] | no frames]

Source Code for Module horizons.engine.generate_atlases

  1  #!/usr/bin/env python3 
  2   
  3  # ################################################### 
  4  # Copyright (C) 2008-2017 The Unknown Horizons Team 
  5  # team@unknown-horizons.org 
  6  # This file is part of Unknown Horizons. 
  7  # 
  8  # Unknown Horizons is free software; you can redistribute it and/or modify 
  9  # it under the terms of the GNU General Public License as published by 
 10  # the Free Software Foundation; either version 2 of the License, or 
 11  # (at your option) any later version. 
 12  # 
 13  # This program is distributed in the hope that it will be useful, 
 14  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
 15  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the 
 16  # GNU General Public License for more details. 
 17  # 
 18  # You should have received a copy of the GNU General Public License 
 19  # along with this program; if not, write to the 
 20  # Free Software Foundation, Inc., 
 21  # 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA 
 22  # ################################################### 
 23   
 24  import glob 
 25  import json 
 26  import logging 
 27  import math 
 28  import multiprocessing 
 29  import os 
 30  import os.path 
 31  import pickle 
 32  import sys 
 33  import traceback 
 34   
 35  # add paths for Mac Os X app container (Unknown Horizons.app) 
 36  app_python_lib_path = os.path.join(os.getcwd(), 'lib', 'python3.4') 
 37  if os.path.exists(app_python_lib_path): 
 38          # horizons path: Unknown Horizons.app/Contents/Resources/lib/python3.3/horizons 
 39          sys.path.append(app_python_lib_path) 
 40          # PIL path: Unknown Horizons.app/Contents/Resources/lib/python3.3/lib-dynload/PIL 
 41          sys.path.append(os.path.join(app_python_lib_path, 'lib-dynload')) 
 42   
 43  try: 
 44          from PIL import Image 
 45  except ImportError: 
 46          # Logging is not set up at this point. 
 47          print('The Python Imaging Library (PIL / Pillow) package' 
 48                ' is needed to run the atlas generator.') 
 49          sys.exit(1) 
 50   
 51   
 52  # make this script work both when started inside development and in the uh root dir 
 53  if not os.path.exists('content'): 
 54          os.chdir('..') 
 55  assert os.path.exists('content'), 'Content dir not found.' 
 56   
 57  sys.path.append('.') 
58 59 60 -class DummyFife:
61 use_atlases = False
62 63 # TODO We can probably remove the type ignore in the next release of typeshed/mypy 64 # See https://github.com/python/typeshed/commit/08ac3b7742f1fd55f801ac66d7517cf60aa471d6 65 66 67 import horizons.globals # isort:skip 68 horizons.globals.fife = DummyFife() # type: ignore 69 70 from horizons.constants import PATHS # isort:skip 71 from horizons.util.dbreader import DbReader # isort:skip 72 from horizons.util.loaders.actionsetloader import ActionSetLoader # isort:skip 73 from horizons.util.loaders.tilesetloader import TileSetLoader # isort:skip
74 75 76 -class AtlasEntry:
77 - def __init__(self, x, y, width, height, last_modified):
78 self.x = x 79 self.y = y 80 self.width = width 81 self.height = height 82 self.last_modified = last_modified
83
84 85 -class AtlasBook:
86 log = logging.getLogger("generate_atlases") 87
88 - def __init__(self, id, max_size):
89 self.id = id 90 self.path = os.path.join(PATHS.ATLAS_FILES_DIR, '{0:03d}.png'.format(id)) 91 self.max_size = max_size 92 self._clear()
93
94 - def _clear(self):
95 self.location = {} 96 self.cur_x = 0 97 self.cur_y = 0 98 self.cur_h = 0
99
100 - def add(self, w, h, path):
101 """Return true if and only if the image was added.""" 102 if self.cur_x + w <= self.max_size and self.cur_y + h <= self.max_size: 103 # add to the end of the current row 104 self.location[path] = AtlasEntry(self.cur_x, self.cur_y, w, h, int(os.path.getmtime(path))) 105 self.cur_x += w 106 self.cur_h = max(self.cur_h, h) 107 return True 108 109 if w <= self.max_size and self.cur_y + self.cur_h + h <= self.max_size: 110 # add to the beginning of the next row 111 self.cur_x = w 112 self.cur_y += self.cur_h 113 self.cur_h = h 114 self.location[path] = AtlasEntry(0, self.cur_y, w, h, int(os.path.getmtime(path))) 115 return True 116 117 # unable to fit in the given space with the current algorithm 118 return False
119
120 - def save(self):
121 """Write the entire image to a file with the given path.""" 122 if not os.path.exists(PATHS.ATLAS_FILES_DIR): 123 # Make sure atlas directory is available 124 os.mkdir(PATHS.ATLAS_FILES_DIR) 125 126 im = Image.new('RGBA', (self.max_size, self.max_size), (255, 0, 255, 255)) 127 128 # place the sub-images in the right places 129 for path, entry in self.location.items(): 130 with open(path, 'rb') as png_file: 131 sub_image = Image.open(png_file) 132 im.paste(sub_image, (entry.x, entry.y)) 133 134 # write the entire image to the file 135 with open(self.path, 'wb') as out_file: 136 im.save(out_file, 'png')
137
138 139 -def save_atlas_book(book):
140 book.save()
141
142 143 -class ImageSetManager:
144 - def __init__(self, initial_data, path):
145 self._data = {} 146 self._path = path 147 self._initial_data = initial_data 148 149 self.files = [] 150 for set_id in initial_data: 151 for action_id in initial_data[set_id]: 152 for rotation in sorted(initial_data[set_id][action_id]): 153 for path in sorted(initial_data[set_id][action_id][rotation]): 154 self.files.append(path)
155
156 - def _add_entry(self, set_id, action_id, rotation, path, row):
157 if set_id not in self._data: 158 self._data[set_id] = {} 159 if action_id not in self._data[set_id]: 160 self._data[set_id][action_id] = {} 161 if rotation not in self._data[set_id][action_id]: 162 self._data[set_id][action_id][rotation] = {} 163 self._data[set_id][action_id][rotation][path.replace(os.sep, '/')] = row
164
165 - def save(self, generator):
166 for set_id in self._initial_data: 167 for action_id in self._initial_data[set_id]: 168 for rotation in sorted(self._initial_data[set_id][action_id]): 169 for path in sorted(self._initial_data[set_id][action_id][rotation]): 170 book = generator.atlas_book_lookup[path] 171 book_entry = book.location[path] 172 173 row = [] 174 row.append(self._initial_data[set_id][action_id][rotation][path]) 175 row.append(book.id) 176 row.append(book_entry.x) 177 row.append(book_entry.y) 178 row.append(book_entry.width) 179 row.append(book_entry.height) 180 self._add_entry(set_id, action_id, rotation, path, row) 181 182 with open(self._path, 'w') as json_file: 183 json.dump(self._data, json_file, indent=1)
184
185 186 -class AtlasGenerator:
187 log = logging.getLogger("generate_atlases") 188 # increment this when the structure of the atlases changes 189 current_version = 1 190
191 - def __init__(self, max_size):
192 self.version = self.current_version 193 self.max_size = max_size 194 self.books = [] 195 self.num_books = 0 196 self.atlas_book_lookup = {}
197
198 - def _init_sets(self):
202
203 - def _save_sets(self):
204 for set in self.sets: 205 set.save(self)
206 207 @classmethod
208 - def _save_books(cls, books):
209 processes = max(1, min(len(books), multiprocessing.cpu_count() - 1)) 210 pool = multiprocessing.Pool(processes=processes) 211 for book in books: 212 pool.apply_async(save_atlas_book, [book]) 213 pool.close() 214 pool.join()
215
216 - def save(self):
217 with open(PATHS.ATLAS_DB_PATH, 'w') as atlas_db_file: 218 atlas_db_file.write("CREATE TABLE atlas('atlas_id' INTEGER NOT NULL PRIMARY KEY, 'atlas_path' TEXT NOT NULL);\n") 219 for book in self.books: 220 atlas_db_file.write("INSERT INTO atlas VALUES({0:d}, " 221 "'{1!s}');\n".format(book.id, book.path)) 222 223 self._save_sets() 224 self._save_books(self.books) 225 self._save_metadata()
226
227 - def _add_atlas_book(self):
228 self.books.append(AtlasBook(len(self.books), self.max_size))
229
230 - def _add_image(self, w, h, path):
231 if not self.books: 232 self._add_atlas_book() 233 234 if not self.books[-1].add(w, h, path): 235 self._add_atlas_book() 236 assert self.books[-1].add(w, h, path) 237 238 self.atlas_book_lookup[path] = self.books[-1]
239 240 @classmethod
241 - def _get_dimensions(cls, path):
242 with open(path, 'rb') as png_file: 243 return Image.open(png_file).size
244
245 - def _get_paths(self):
246 paths = [] 247 for set in self.sets: 248 for path in set.files: 249 paths.append(path) 250 return paths
251
252 - def recreate(self):
253 print('Recreating all atlases') 254 255 self._init_sets() 256 paths = self._get_paths() 257 data = [] 258 for path in paths: 259 w, h = self._get_dimensions(path) 260 data.append((w * h, h, w, path)) 261 262 assert data, 'No files found.' 263 assert (data[0][1] <= self.max_size and data[0][2] <= self.max_size), 'Image too large: ' + str(data[0][1:]) 264 265 for _, h, w, path in data: 266 self._add_image(w, h, path) 267 self.save()
268
269 - def _update_selected_books(self, update_books):
270 print('Updating some of the atlases:') 271 for book in sorted(update_books, key=lambda book: int(book.id)): 272 print(book.path) 273 print() 274 275 self._save_sets() 276 self._save_books(update_books)
277
278 - def update(self):
279 self._init_sets() 280 paths = self._get_paths() 281 282 # if the sizes don't match then something has been deleted or added 283 recreate_all = False 284 if len(set(paths)) != len(self.atlas_book_lookup): 285 recreate_all = True 286 self.log.info("The old number of images (%d) doesn't match the new (%d)", 287 len(self.atlas_book_lookup), len(set(paths))) 288 289 recreate_books = set() 290 if not recreate_all: 291 for path in paths: 292 if path not in self.atlas_book_lookup: 293 self.log.info('A new image has been added: %s', path) 294 recreate_all = True 295 break 296 297 last_modified = int(os.path.getmtime(path)) 298 book = self.atlas_book_lookup[path] 299 entry = book.location[path] 300 if last_modified == entry.last_modified: 301 continue 302 303 self.log.info('An image has been modified: %s', path) 304 w, h = self._get_dimensions(path) 305 if w > entry.width or h > entry.height: 306 self.log.info('An image is larger than before: %s', path) 307 recreate_all = True 308 break 309 310 if book not in recreate_books: 311 self.log.info('Need to recreate %s', book.path) 312 recreate_books.add(book) 313 314 # update the entry 315 entry.width = w 316 entry.height = h 317 entry.last_modified = last_modified 318 319 if recreate_all: 320 self.log.info('Forced to recreate the entire atlas.') 321 return False 322 323 if recreate_books: 324 self.log.info('Updated selected books') 325 self._update_selected_books(recreate_books) 326 self._save_metadata() 327 else: 328 # the sets have to always be saved because the tm_N files are not otherwise taken into account 329 self._save_sets() 330 return True
331
332 - def __getstate__(self):
333 # avoid saving self.sets 334 return {'version': self.version, 'max_size': self.max_size, 'books': self.books, 335 'num_books': self.num_books, 'atlas_book_lookup': self.atlas_book_lookup}
336
337 - def _save_metadata(self):
338 self.log.info('Saving metadata') 339 path = PATHS.ATLAS_METADATA_PATH 340 if not os.path.exists(os.path.dirname(path)): 341 os.makedirs(os.path.dirname(path)) 342 with open(path, 'wb') as file: 343 pickle.dump(self, file) 344 self.log.info('Finished saving metadata')
345 346 @classmethod
347 - def check_files(cls):
348 """Check that the required atlas files exist.""" 349 paths = [ 350 PATHS.ACTION_SETS_JSON_FILE, 351 PATHS.ATLAS_DB_PATH, 352 PATHS.TILE_SETS_JSON_FILE, 353 ] 354 for path in paths: 355 if not os.path.exists(path): 356 return False 357 358 # verify that the combined images exist 359 db = DbReader(':memory:') 360 with open(PATHS.ATLAS_DB_PATH) as f: 361 db.execute_script(f.read()) 362 for db_row in db("SELECT atlas_path FROM atlas"): 363 if not os.path.exists(db_row[0]): 364 return False 365 return True
366 367 @classmethod
368 - def load(cls, max_size):
369 if not cls.check_files(): 370 cls.log.info('Some required atlas file missing.') 371 return None 372 373 if not os.path.exists(PATHS.ATLAS_METADATA_PATH): 374 cls.log.info('Old atlas metadata cache not found.') 375 return None 376 377 cls.log.info('Loading the metadata cache') 378 with open(PATHS.ATLAS_METADATA_PATH, 'rb') as file: 379 data = pickle.load(file) 380 381 if data.version != cls.current_version: 382 cls.log.info('Old metadata version %d (current %d)', data.version, cls.current_version) 383 return None 384 385 if data.max_size != max_size: 386 cls.log.info('The desired max_size has changed from %d to %d', data.max_size, max_size) 387 return None 388 389 cls.log.info('Successfully loaded the metadata cache') 390 return data
391 392 @classmethod
393 - def clear_everything(cls):
394 """Delete all known atlas-related files.""" 395 paths = [] 396 paths.append(PATHS.ATLAS_METADATA_PATH) 397 paths.append(PATHS.ATLAS_DB_PATH) 398 paths.append(PATHS.ACTION_SETS_JSON_FILE) 399 paths.append(PATHS.TILE_SETS_JSON_FILE) 400 paths.extend(glob.glob(os.path.join(PATHS.ATLAS_FILES_DIR, '*.png'))) 401 402 # delete everything 403 for path in paths: 404 if not os.path.exists(path): 405 continue 406 cls.log.info('Deleting %s', path) 407 os.unlink(path)
408 409 410 if __name__ == '__main__': 411 args = sys.argv[1:] 412 if len(args) != 1: 413 print('Usage: python3 generate_atlases.py max_size') 414 exit(1) 415 416 max_size = int(math.pow(2, int(math.log(int(args[0]), 2)))) 417 418 updated = False 419 try: 420 generator = AtlasGenerator.load(max_size) 421 if generator is not None: 422 updated = generator.update() 423 except Exception: 424 traceback.print_exc() 425 426 if not updated: 427 AtlasGenerator.clear_everything() 428 generator = AtlasGenerator(max_size) 429 generator.recreate() 430