2011-05-05 13:04:40 +02:00
|
|
|
import os
|
|
|
|
import os.path
|
2011-05-13 03:38:14 +02:00
|
|
|
import sys
|
2011-05-05 14:42:04 +02:00
|
|
|
from datetime import datetime
|
2011-05-07 03:12:51 +02:00
|
|
|
from PhotoAlbum import Photo, Album, PhotoAlbumEncoder
|
2011-05-23 11:25:45 +02:00
|
|
|
from CachePath import *
|
2011-05-07 03:12:51 +02:00
|
|
|
import json
|
2011-05-05 13:04:40 +02:00
|
|
|
|
|
|
|
class TreeWalker:
|
|
|
|
def __init__(self, album_path, cache_path):
|
2012-06-22 17:54:53 +02:00
|
|
|
self.album_path = os.path.abspath(album_path).decode(sys.getfilesystemencoding())
|
|
|
|
self.cache_path = os.path.abspath(cache_path).decode(sys.getfilesystemencoding())
|
2011-05-05 13:04:40 +02:00
|
|
|
set_cache_path_base(self.album_path)
|
|
|
|
self.all_albums = list()
|
|
|
|
self.all_photos = list()
|
2011-05-06 00:37:15 +02:00
|
|
|
self.walk(self.album_path)
|
2011-05-07 03:12:51 +02:00
|
|
|
self.big_lists()
|
2011-05-05 13:04:40 +02:00
|
|
|
self.remove_stale()
|
2011-05-23 11:25:45 +02:00
|
|
|
message("complete", "")
|
2011-05-05 13:04:40 +02:00
|
|
|
def walk(self, path):
|
2011-05-23 11:25:45 +02:00
|
|
|
next_level()
|
2015-05-26 14:54:54 +02:00
|
|
|
if not os.access(path, os.R_OK | os.X_OK):
|
|
|
|
message("access denied", os.path.basename(path))
|
|
|
|
back_level()
|
|
|
|
return None
|
2011-05-23 11:25:45 +02:00
|
|
|
message("walking", os.path.basename(path))
|
2011-05-05 13:04:40 +02:00
|
|
|
cache = os.path.join(self.cache_path, json_cache(path))
|
|
|
|
cached = False
|
2011-05-05 14:42:04 +02:00
|
|
|
cached_album = None
|
|
|
|
if os.path.exists(cache):
|
2011-05-13 10:06:36 +02:00
|
|
|
try:
|
|
|
|
cached_album = Album.from_cache(cache)
|
|
|
|
if file_mtime(path) <= file_mtime(cache):
|
2011-05-23 11:25:45 +02:00
|
|
|
message("full cache", os.path.basename(path))
|
2011-05-13 10:06:36 +02:00
|
|
|
cached = True
|
|
|
|
album = cached_album
|
|
|
|
for photo in album.photos:
|
|
|
|
self.all_photos.append(photo)
|
2011-05-23 11:25:45 +02:00
|
|
|
else:
|
|
|
|
message("partial cache", os.path.basename(path))
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
2011-05-13 10:06:36 +02:00
|
|
|
except:
|
2011-05-23 11:25:45 +02:00
|
|
|
message("corrupt cache", os.path.basename(path))
|
2011-05-13 10:06:36 +02:00
|
|
|
cached_album = None
|
2011-05-05 14:42:04 +02:00
|
|
|
if not cached:
|
2011-05-05 13:04:40 +02:00
|
|
|
album = Album(path)
|
|
|
|
for entry in os.listdir(path):
|
2011-05-09 23:49:09 +02:00
|
|
|
if entry[0] == '.':
|
|
|
|
continue
|
2011-05-13 03:38:14 +02:00
|
|
|
try:
|
|
|
|
entry = entry.decode(sys.getfilesystemencoding())
|
2011-05-23 11:25:45 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
2011-05-13 03:38:14 +02:00
|
|
|
except:
|
2015-01-27 14:39:44 +01:00
|
|
|
next_level()
|
|
|
|
message("unicode error", entry.decode(sys.getfilesystemencoding(), "replace"))
|
|
|
|
back_level()
|
|
|
|
continue
|
2011-05-05 13:04:40 +02:00
|
|
|
entry = os.path.join(path, entry)
|
|
|
|
if os.path.isdir(entry):
|
2015-05-26 14:54:54 +02:00
|
|
|
next_walked_album = self.walk(entry)
|
|
|
|
if next_walked_album is not None:
|
|
|
|
album.add_album(next_walked_album)
|
2011-05-05 13:04:40 +02:00
|
|
|
elif not cached and os.path.isfile(entry):
|
2011-05-23 11:25:45 +02:00
|
|
|
next_level()
|
2011-05-05 14:42:04 +02:00
|
|
|
cache_hit = False
|
|
|
|
if cached_album:
|
|
|
|
cached_photo = cached_album.photo_from_path(entry)
|
2011-05-09 23:49:09 +02:00
|
|
|
if cached_photo and file_mtime(entry) <= cached_photo.attributes["dateTimeFile"]:
|
2015-06-17 14:49:55 +02:00
|
|
|
cache_file = None
|
|
|
|
if "mediaType" in cached_photo.attributes:
|
|
|
|
if cached_photo.attributes["mediaType"] == "video":
|
|
|
|
# if video
|
|
|
|
cache_file = os.path.join(self.cache_path, video_cache(entry))
|
|
|
|
else:
|
|
|
|
# if image
|
|
|
|
cache_file = os.path.join(self.cache_path, image_cache(entry, 1024, False))
|
|
|
|
else:
|
|
|
|
# if image
|
|
|
|
cache_file = os.path.join(self.cache_path, image_cache(entry, 1024, False))
|
|
|
|
|
|
|
|
# at this point we have full path to cache image/video
|
|
|
|
# check if it actually exists
|
|
|
|
if os.path.exists(cache_file):
|
|
|
|
message("cache hit", os.path.basename(entry))
|
|
|
|
cache_hit = True
|
|
|
|
photo = cached_photo
|
|
|
|
|
2011-05-05 14:42:04 +02:00
|
|
|
if not cache_hit:
|
2011-05-23 11:25:45 +02:00
|
|
|
message("metainfo", os.path.basename(entry))
|
2011-05-05 14:42:04 +02:00
|
|
|
photo = Photo(entry, self.cache_path)
|
2011-05-05 13:04:40 +02:00
|
|
|
if photo.is_valid:
|
|
|
|
self.all_photos.append(photo)
|
|
|
|
album.add_photo(photo)
|
2011-05-23 11:25:45 +02:00
|
|
|
else:
|
|
|
|
message("unreadable", os.path.basename(entry))
|
|
|
|
back_level()
|
2011-05-07 04:48:09 +02:00
|
|
|
if not album.empty:
|
2011-05-23 11:25:45 +02:00
|
|
|
message("caching", os.path.basename(path))
|
2011-05-07 04:48:09 +02:00
|
|
|
album.cache(self.cache_path)
|
|
|
|
self.all_albums.append(album)
|
|
|
|
else:
|
2011-05-23 11:25:45 +02:00
|
|
|
message("empty", os.path.basename(path))
|
|
|
|
back_level()
|
2011-05-05 13:04:40 +02:00
|
|
|
return album
|
2011-05-07 03:12:51 +02:00
|
|
|
def big_lists(self):
|
|
|
|
photo_list = []
|
|
|
|
self.all_photos.sort()
|
|
|
|
for photo in self.all_photos:
|
|
|
|
photo_list.append(photo.path)
|
2011-05-23 11:25:45 +02:00
|
|
|
message("caching", "all photos path list")
|
2011-05-07 03:12:51 +02:00
|
|
|
fp = open(os.path.join(self.cache_path, "all_photos.json"), 'w')
|
|
|
|
json.dump(photo_list, fp, cls=PhotoAlbumEncoder)
|
|
|
|
fp.close()
|
2011-05-05 13:04:40 +02:00
|
|
|
def remove_stale(self):
|
2011-05-23 11:25:45 +02:00
|
|
|
message("cleanup", "building stale list")
|
2011-05-07 04:48:09 +02:00
|
|
|
all_cache_entries = { "all_photos.json": True, "latest_photos.json": True }
|
|
|
|
for album in self.all_albums:
|
|
|
|
all_cache_entries[album.cache_path] = True
|
|
|
|
for photo in self.all_photos:
|
|
|
|
for entry in photo.image_caches:
|
|
|
|
all_cache_entries[entry] = True
|
2011-05-23 11:25:45 +02:00
|
|
|
message("cleanup", "searching for stale cache entries")
|
2011-05-06 00:37:15 +02:00
|
|
|
for cache in os.listdir(self.cache_path):
|
2011-05-13 03:38:14 +02:00
|
|
|
try:
|
|
|
|
cache = cache.decode(sys.getfilesystemencoding())
|
2011-05-23 11:25:45 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
2011-05-13 03:38:14 +02:00
|
|
|
except:
|
|
|
|
pass
|
2011-05-07 04:48:09 +02:00
|
|
|
if cache not in all_cache_entries:
|
2011-05-23 11:25:45 +02:00
|
|
|
message("cleanup", os.path.basename(cache))
|
2011-05-07 04:48:09 +02:00
|
|
|
os.unlink(os.path.join(self.cache_path, cache))
|