pep8'd what I could, including tabs to spaces
This commit is contained in:
parent
a45ecc76b0
commit
48bf325d77
1
.gitignore
vendored
1
.gitignore
vendored
@ -5,3 +5,4 @@ scanner/floatapp/app.cfg
|
||||
*.pyc
|
||||
*.min.css
|
||||
*.min.js
|
||||
.vscode
|
@ -1,49 +1,90 @@
|
||||
import os.path
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def message(category, text):
|
||||
if message.level <= 0:
|
||||
sep = " "
|
||||
else:
|
||||
sep = "--"
|
||||
print "%s %s%s[%s]%s%s" % (datetime.now().isoformat(), max(0, message.level) * " |", sep, category, max(1, (14 - len(category))) * " ", text)
|
||||
print "%s %s%s[%s]%s%s" % (
|
||||
datetime.now().isoformat(),
|
||||
max(0, message.level) * " |",
|
||||
sep,
|
||||
category,
|
||||
max(1, (14 - len(category))) * " ",
|
||||
text)
|
||||
message.level = -1
|
||||
|
||||
|
||||
def next_level():
|
||||
message.level += 1
|
||||
|
||||
|
||||
def back_level():
|
||||
message.level -= 1
|
||||
|
||||
|
||||
def set_cache_path_base(base):
|
||||
trim_base.base = base
|
||||
|
||||
|
||||
def untrim_base(path):
|
||||
return os.path.join(trim_base.base, path)
|
||||
|
||||
|
||||
def trim_base_custom(path, base):
|
||||
if path.startswith(base):
|
||||
path = path[len(base):]
|
||||
if path.startswith('/'):
|
||||
path = path[1:]
|
||||
return path
|
||||
|
||||
|
||||
def trim_base(path):
|
||||
return trim_base_custom(path, trim_base.base)
|
||||
|
||||
|
||||
def cache_base(path, filepath=False):
|
||||
if len(path) == 0:
|
||||
return "root"
|
||||
elif filepath and len(path.split(os.sep)) < 2:
|
||||
path = "root-" + path
|
||||
path = trim_base(path).replace('/', '-').replace(' ', '_').replace('(', '').replace('&', '').replace(',', '').replace(')', '').replace('#', '').replace('[', '').replace(']', '').replace('"', '').replace("'", '').replace('_-_', '-').lower()
|
||||
path = trim_base(path).replace(
|
||||
'/', '-').replace(
|
||||
' ', '_').replace(
|
||||
'(', '').replace(
|
||||
'&', '').replace(
|
||||
',', '').replace(
|
||||
')', '').replace(
|
||||
'#', '').replace(
|
||||
'[', '').replace(
|
||||
']', '').replace(
|
||||
'"', '').replace(
|
||||
"'", '').replace(
|
||||
'_-_', '-').lower()
|
||||
while path.find("--") != -1:
|
||||
path = path.replace("--", "-")
|
||||
while path.find("__") != -1:
|
||||
path = path.replace("__", "_")
|
||||
return path
|
||||
|
||||
|
||||
def json_cache(path):
|
||||
return cache_base(path) + ".json"
|
||||
|
||||
|
||||
def image_cache(path, size, square=False):
|
||||
if square:
|
||||
suffix = str(size) + "s"
|
||||
else:
|
||||
suffix = str(size)
|
||||
return cache_base(path, True) + "_" + suffix + ".jpg"
|
||||
|
||||
|
||||
def video_cache(path):
|
||||
return cache_base(path, True) + ".mp4"
|
||||
|
||||
|
||||
def file_mtime(path):
|
||||
return datetime.fromtimestamp(int(os.path.getmtime(path)))
|
||||
|
@ -10,6 +10,7 @@ import gc
|
||||
import tempfile
|
||||
from VideoToolWrapper import *
|
||||
|
||||
|
||||
def make_photo_thumbs(self, original_path, thumb_path, size):
|
||||
# The pool methods use a queue.Queue to pass tasks to the worker processes.
|
||||
# Everything that goes through the queue.Queue must be pickable, and since
|
||||
@ -17,6 +18,7 @@ def make_photo_thumbs(self, original_path, thumb_path, size):
|
||||
# This is why we have this "dummy" function, so that it's pickable.
|
||||
self._photo_thumbnail(original_path, thumb_path, size[0], size[1])
|
||||
|
||||
|
||||
class Album(object):
|
||||
def __init__(self, path):
|
||||
self._path = trim_base(path)
|
||||
@ -24,20 +26,26 @@ class Album(object):
|
||||
self._albums = list()
|
||||
self._photos_sorted = True
|
||||
self._albums_sorted = True
|
||||
|
||||
@property
|
||||
def photos(self):
|
||||
return self._photos
|
||||
|
||||
@property
|
||||
def albums(self):
|
||||
return self._albums
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._path
|
||||
|
||||
def __str__(self):
|
||||
return self.path
|
||||
|
||||
@property
|
||||
def cache_path(self):
|
||||
return json_cache(self.path)
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
self._sort()
|
||||
@ -48,14 +56,18 @@ class Album(object):
|
||||
elif len(self._albums) == 0:
|
||||
return self._photos[-1].date
|
||||
return max(self._photos[-1].date, self._albums[-1].date)
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.date, other.date)
|
||||
|
||||
def add_photo(self, photo):
|
||||
self._photos.append(photo)
|
||||
self._photos_sorted = False
|
||||
|
||||
def add_album(self, album):
|
||||
self._albums.append(album)
|
||||
self._albums_sorted = False
|
||||
|
||||
def _sort(self):
|
||||
if not self._photos_sorted:
|
||||
self._photos.sort()
|
||||
@ -63,6 +75,7 @@ class Album(object):
|
||||
if not self._albums_sorted:
|
||||
self._albums.sort()
|
||||
self._albums_sorted = True
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
if len(self._photos) != 0:
|
||||
@ -79,12 +92,14 @@ class Album(object):
|
||||
fp = open(os.path.join(base_dir, self.cache_path), 'w')
|
||||
json.dump(self, fp, cls=PhotoAlbumEncoder)
|
||||
fp.close()
|
||||
|
||||
@staticmethod
|
||||
def from_cache(path):
|
||||
fp = open(path, "r")
|
||||
dictionary = json.load(fp)
|
||||
fp.close()
|
||||
return Album.from_dict(dictionary)
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dictionary, cripple=True):
|
||||
album = Album(dictionary["path"])
|
||||
@ -95,26 +110,39 @@ class Album(object):
|
||||
album.add_album(Album.from_dict(subalbum), cripple)
|
||||
album._sort()
|
||||
return album
|
||||
|
||||
def to_dict(self, cripple=True):
|
||||
self._sort()
|
||||
subalbums = []
|
||||
if cripple:
|
||||
for sub in self._albums:
|
||||
if not sub.empty:
|
||||
subalbums.append({ "path": trim_base_custom(sub.path, self._path), "date": sub.date })
|
||||
subalbums.append({
|
||||
"path": trim_base_custom(sub.path, self._path),
|
||||
"date": sub.date
|
||||
})
|
||||
else:
|
||||
for sub in self._albums:
|
||||
if not sub.empty:
|
||||
subalbums.append(sub)
|
||||
return { "path": self.path, "date": self.date, "albums": subalbums, "photos": self._photos }
|
||||
return {
|
||||
"path": self.path,
|
||||
"date": self.date,
|
||||
"albums": subalbums,
|
||||
"photos": self._photos
|
||||
}
|
||||
|
||||
def photo_from_path(self, path):
|
||||
for photo in self._photos:
|
||||
if trim_base(path) == photo._path:
|
||||
return photo
|
||||
return None
|
||||
|
||||
|
||||
class Photo(object):
|
||||
thumb_sizes = [ (75, True), (150, True), (640, False), (1024, False), (1600, False) ]
|
||||
thumb_sizes = [
|
||||
(75, True), (150, True), (640, False), (1024, False), (1600, False)]
|
||||
|
||||
def __init__(self, path, thumb_path=None, attributes=None):
|
||||
self._path = trim_base(path)
|
||||
self.is_valid = True
|
||||
@ -165,7 +193,11 @@ class Photo(object):
|
||||
exif = {}
|
||||
for tag, value in info.items():
|
||||
decoded = TAGS.get(tag, tag)
|
||||
if (isinstance(value, tuple) or isinstance(value, list)) and (isinstance(decoded, str) or isinstance(decoded, unicode)) and decoded.startswith("DateTime") and len(value) >= 1:
|
||||
if ((isinstance(value, tuple) or isinstance(value, list)) and
|
||||
(isinstance(decoded, str) or
|
||||
isinstance(decoded, unicode)) and
|
||||
decoded.startswith("DateTime") and
|
||||
len(value) >= 1):
|
||||
value = value[0]
|
||||
if isinstance(value, str) or isinstance(value, unicode):
|
||||
value = value.strip().partition("\x00")[0]
|
||||
@ -179,11 +211,15 @@ class Photo(object):
|
||||
exif[decoded] = value
|
||||
|
||||
if "Orientation" in exif:
|
||||
self._orientation = exif["Orientation"];
|
||||
self._orientation = exif["Orientation"]
|
||||
if self._orientation in range(5, 9):
|
||||
self._attributes["size"] = (self._attributes["size"][1], self._attributes["size"][0])
|
||||
if self._orientation - 1 < len(self._photo_metadata.orientation_list):
|
||||
self._attributes["orientation"] = self._photo_metadata.orientation_list[self._orientation - 1]
|
||||
self._attributes["size"] = (
|
||||
self._attributes["size"][1], self._attributes["size"][0])
|
||||
if self._orientation - 1 < len(
|
||||
self._photo_metadata.orientation_list):
|
||||
self._attributes["orientation"] = (
|
||||
self._photo_metadata.orientation_list[
|
||||
self._orientation - 1])
|
||||
if "Make" in exif:
|
||||
self._attributes["make"] = exif["Make"]
|
||||
if "Model" in exif:
|
||||
@ -247,19 +283,120 @@ class Photo(object):
|
||||
except TypeError:
|
||||
self._attributes["dateTime"] = exif["DateTime"]
|
||||
|
||||
_photo_metadata.flash_dictionary = {0x0: "No Flash", 0x1: "Fired",0x5: "Fired, Return not detected",0x7: "Fired, Return detected",0x8: "On, Did not fire",0x9: "On, Fired",0xd: "On, Return not detected",0xf: "On, Return detected",0x10: "Off, Did not fire",0x14: "Off, Did not fire, Return not detected",0x18: "Auto, Did not fire",0x19: "Auto, Fired",0x1d: "Auto, Fired, Return not detected",0x1f: "Auto, Fired, Return detected",0x20: "No flash function",0x30: "Off, No flash function",0x41: "Fired, Red-eye reduction",0x45: "Fired, Red-eye reduction, Return not detected",0x47: "Fired, Red-eye reduction, Return detected",0x49: "On, Red-eye reduction",0x4d: "On, Red-eye reduction, Return not detected",0x4f: "On, Red-eye reduction, Return detected",0x50: "Off, Red-eye reduction",0x58: "Auto, Did not fire, Red-eye reduction",0x59: "Auto, Fired, Red-eye reduction",0x5d: "Auto, Fired, Red-eye reduction, Return not detected",0x5f: "Auto, Fired, Red-eye reduction, Return detected"}
|
||||
_photo_metadata.light_source_dictionary = {0: "Unknown", 1: "Daylight", 2: "Fluorescent", 3: "Tungsten (incandescent light)", 4: "Flash", 9: "Fine weather", 10: "Cloudy weather", 11: "Shade", 12: "Daylight fluorescent (D 5700 - 7100K)", 13: "Day white fluorescent (N 4600 - 5400K)", 14: "Cool white fluorescent (W 3900 - 4500K)", 15: "White fluorescent (WW 3200 - 3700K)", 17: "Standard light A", 18: "Standard light B", 19: "Standard light C", 20: "D55", 21: "D65", 22: "D75", 23: "D50", 24: "ISO studio tungsten"}
|
||||
_photo_metadata.metering_list = ["Unknown", "Average", "Center-weighted average", "Spot", "Multi-spot", "Multi-segment", "Partial"]
|
||||
_photo_metadata.exposure_list = ["Not Defined", "Manual", "Program AE", "Aperture-priority AE", "Shutter speed priority AE", "Creative (Slow speed)", "Action (High speed)", "Portrait", "Landscape", "Bulb"]
|
||||
_photo_metadata.orientation_list = ["Horizontal (normal)", "Mirror horizontal", "Rotate 180", "Mirror vertical", "Mirror horizontal and rotate 270 CW", "Rotate 90 CW", "Mirror horizontal and rotate 90 CW", "Rotate 270 CW"]
|
||||
_photo_metadata.sensing_method_list = ["Not defined", "One-chip color area sensor", "Two-chip color area sensor", "Three-chip color area sensor", "Color sequential area sensor", "Trilinear sensor", "Color sequential linear sensor"]
|
||||
_photo_metadata.scene_capture_type_list = ["Standard", "Landscape", "Portrait", "Night scene"]
|
||||
_photo_metadata.subject_distance_range_list = ["Unknown", "Macro", "Close view", "Distant view"]
|
||||
|
||||
_photo_metadata.flash_dictionary = {
|
||||
0x0: "No Flash",
|
||||
0x1: "Fired",
|
||||
0x5: "Fired, Return not detected",
|
||||
0x7: "Fired, Return detected",
|
||||
0x8: "On, Did not fire",
|
||||
0x9: "On, Fired",
|
||||
0xd: "On, Return not detected",
|
||||
0xf: "On, Return detected",
|
||||
0x10: "Off, Did not fire",
|
||||
0x14: "Off, Did not fire, Return not detected",
|
||||
0x18: "Auto, Did not fire",
|
||||
0x19: "Auto, Fired",
|
||||
0x1d: "Auto, Fired, Return not detected",
|
||||
0x1f: "Auto, Fired, Return detected",
|
||||
0x20: "No flash function",
|
||||
0x30: "Off, No flash function",
|
||||
0x41: "Fired, Red-eye reduction",
|
||||
0x45: "Fired, Red-eye reduction, Return not detected",
|
||||
0x47: "Fired, Red-eye reduction, Return detected",
|
||||
0x49: "On, Red-eye reduction",
|
||||
0x4d: "On, Red-eye reduction, Return not detected",
|
||||
0x4f: "On, Red-eye reduction, Return detected",
|
||||
0x50: "Off, Red-eye reduction",
|
||||
0x58: "Auto, Did not fire, Red-eye reduction",
|
||||
0x59: "Auto, Fired, Red-eye reduction",
|
||||
0x5d: "Auto, Fired, Red-eye reduction, Return not detected",
|
||||
0x5f: "Auto, Fired, Red-eye reduction, Return detected"
|
||||
}
|
||||
_photo_metadata.light_source_dictionary = {
|
||||
0: "Unknown",
|
||||
1: "Daylight",
|
||||
2: "Fluorescent",
|
||||
3: "Tungsten (incandescent light)",
|
||||
4: "Flash",
|
||||
9: "Fine weather",
|
||||
10: "Cloudy weather",
|
||||
11: "Shade",
|
||||
12: "Daylight fluorescent (D 5700 - 7100K)",
|
||||
13: "Day white fluorescent (N 4600 - 5400K)",
|
||||
14: "Cool white fluorescent (W 3900 - 4500K)",
|
||||
15: "White fluorescent (WW 3200 - 3700K)",
|
||||
17: "Standard light A",
|
||||
18: "Standard light B",
|
||||
19: "Standard light C",
|
||||
20: "D55",
|
||||
21: "D65",
|
||||
22: "D75",
|
||||
23: "D50",
|
||||
24: "ISO studio tungsten"
|
||||
}
|
||||
_photo_metadata.metering_list = [
|
||||
"Unknown",
|
||||
"Average",
|
||||
"Center-weighted average",
|
||||
"Spot",
|
||||
"Multi-spot",
|
||||
"Multi-segment",
|
||||
"Partial"
|
||||
]
|
||||
_photo_metadata.exposure_list = [
|
||||
"Not Defined",
|
||||
"Manual",
|
||||
"Program AE",
|
||||
"Aperture-priority AE",
|
||||
"Shutter speed priority AE",
|
||||
"Creative (Slow speed)",
|
||||
"Action (High speed)",
|
||||
"Portrait",
|
||||
"Landscape",
|
||||
"Bulb"
|
||||
]
|
||||
_photo_metadata.orientation_list = [
|
||||
"Horizontal (normal)",
|
||||
"Mirror horizontal",
|
||||
"Rotate 180",
|
||||
"Mirror vertical",
|
||||
"Mirror horizontal and rotate 270 CW",
|
||||
"Rotate 90 CW",
|
||||
"Mirror horizontal and rotate 90 CW",
|
||||
"Rotate 270 CW"
|
||||
]
|
||||
_photo_metadata.sensing_method_list = [
|
||||
"Not defined",
|
||||
"One-chip color area sensor",
|
||||
"Two-chip color area sensor",
|
||||
"Three-chip color area sensor",
|
||||
"Color sequential area sensor",
|
||||
"Trilinear sensor",
|
||||
"Color sequential linear sensor"
|
||||
]
|
||||
_photo_metadata.scene_capture_type_list = [
|
||||
"Standard",
|
||||
"Landscape",
|
||||
"Portrait",
|
||||
"Night scene"
|
||||
]
|
||||
_photo_metadata.subject_distance_range_list = [
|
||||
"Unknown",
|
||||
"Macro",
|
||||
"Close view",
|
||||
"Distant view"
|
||||
]
|
||||
|
||||
def _video_metadata(self, path, original=True):
|
||||
p = VideoProbeWrapper().call('-show_format', '-show_streams', '-of', 'json', '-loglevel', '0', path)
|
||||
if p == False:
|
||||
p = VideoProbeWrapper().call(
|
||||
'-show_format',
|
||||
'-show_streams',
|
||||
'-of',
|
||||
'json',
|
||||
'-loglevel',
|
||||
'0',
|
||||
path)
|
||||
if p is False:
|
||||
self.is_valid = False
|
||||
return
|
||||
info = json.loads(p)
|
||||
@ -272,7 +409,8 @@ class Photo(object):
|
||||
if "tags" in s and "rotate" in s["tags"]:
|
||||
self._attributes["rotate"] = s["tags"]["rotate"]
|
||||
if original:
|
||||
self._attributes["originalSize"] = (int(s["width"]), int(s["height"]))
|
||||
self._attributes["originalSize"] = (
|
||||
int(s["width"]), int(s["height"]))
|
||||
# we break, because a video can contain several streams
|
||||
# this way we only get/use values from the first stream
|
||||
break
|
||||
@ -287,7 +425,9 @@ class Photo(object):
|
||||
# lets use this
|
||||
|
||||
try:
|
||||
self._attributes["dateTimeVideo"] = datetime.strptime(info['format']['tags']['creation_time'], '%Y-%m-%d %H:%M:%S')
|
||||
self._attributes["dateTimeVideo"] = datetime.strptime(
|
||||
info['format']['tags']['creation_time'],
|
||||
'%Y-%m-%d %H:%M:%S')
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except TypeError:
|
||||
@ -314,13 +454,15 @@ class Photo(object):
|
||||
mirror = image.transpose(Image.FLIP_TOP_BOTTOM)
|
||||
elif self._orientation == 5:
|
||||
# Horizontal Mirror + Rotation 270
|
||||
mirror = image.transpose(Image.FLIP_TOP_BOTTOM).transpose(Image.ROTATE_270)
|
||||
mirror = image.transpose(
|
||||
Image.FLIP_TOP_BOTTOM).transpose(Image.ROTATE_270)
|
||||
elif self._orientation == 6:
|
||||
# Rotation 270
|
||||
mirror = image.transpose(Image.ROTATE_270)
|
||||
elif self._orientation == 7:
|
||||
# Vertical Mirror + Rotation 270
|
||||
mirror = image.transpose(Image.FLIP_LEFT_RIGHT).transpose(Image.ROTATE_270)
|
||||
mirror = image.transpose(
|
||||
Image.FLIP_LEFT_RIGHT).transpose(Image.ROTATE_270)
|
||||
elif self._orientation == 8:
|
||||
# Rotation 90
|
||||
mirror = image.transpose(Image.ROTATE_90)
|
||||
@ -329,12 +471,16 @@ class Photo(object):
|
||||
self._thumbnail(image, original_path, thumb_path, size, square)
|
||||
|
||||
def _thumbnail(self, image, original_path, thumb_path, size, square):
|
||||
thumb_path = os.path.join(thumb_path, image_cache(self._path, size, square))
|
||||
info_string = "%s -> %spx" % (os.path.basename(original_path), str(size))
|
||||
thumb_path = os.path.join(
|
||||
thumb_path, image_cache(self._path, size, square))
|
||||
info_string = "%s -> %spx" % (
|
||||
os.path.basename(original_path),
|
||||
str(size))
|
||||
if square:
|
||||
info_string += ", square"
|
||||
message("thumbing", info_string)
|
||||
if os.path.exists(thumb_path) and file_mtime(thumb_path) >= self._attributes["dateTimeFile"]:
|
||||
if os.path.exists(thumb_path) and file_mtime(
|
||||
thumb_path) >= self._attributes["dateTimeFile"]:
|
||||
return
|
||||
gc.collect()
|
||||
try:
|
||||
@ -386,7 +532,8 @@ class Photo(object):
|
||||
|
||||
try:
|
||||
for size in Photo.thumb_sizes:
|
||||
pool.apply_async(make_photo_thumbs, args = (self, original_path, thumb_path, size))
|
||||
pool.apply_async(make_photo_thumbs, args=(
|
||||
self, original_path, thumb_path, size))
|
||||
except:
|
||||
pool.terminate()
|
||||
|
||||
@ -394,7 +541,7 @@ class Photo(object):
|
||||
pool.join()
|
||||
|
||||
def _video_thumbnails(self, thumb_path, original_path):
|
||||
(tfd, tfn) = tempfile.mkstemp();
|
||||
(tfd, tfn) = tempfile.mkstemp()
|
||||
p = VideoTranscodeWrapper().call(
|
||||
'-i', original_path, # original file to extract thumbs from
|
||||
'-f', 'image2', # extract image
|
||||
@ -405,8 +552,10 @@ class Photo(object):
|
||||
'-y', # don't prompt for overwrite
|
||||
tfn # temporary file to store extracted image
|
||||
)
|
||||
if p == False:
|
||||
message("couldn't extract video frame", os.path.basename(original_path))
|
||||
if p is False:
|
||||
message(
|
||||
"couldn't extract video frame",
|
||||
os.path.basename(original_path))
|
||||
try:
|
||||
os.unlink(tfn)
|
||||
except:
|
||||
@ -439,7 +588,8 @@ class Photo(object):
|
||||
mirror = image.transpose(Image.ROTATE_90)
|
||||
for size in Photo.thumb_sizes:
|
||||
if size[1]:
|
||||
self._thumbnail(mirror, original_path, thumb_path, size[0], size[1])
|
||||
self._thumbnail(
|
||||
mirror, original_path, thumb_path, size[0], size[1])
|
||||
try:
|
||||
os.unlink(tfn)
|
||||
except:
|
||||
@ -452,7 +602,9 @@ class Photo(object):
|
||||
transcode_cmd = [
|
||||
'-i', original_path, # original file to be encoded
|
||||
'-c:v', 'libx264', # set h264 as videocodec
|
||||
'-preset', 'slow', # set specific preset that provides a certain encoding speed to compression ratio
|
||||
# set specific preset that provides a certain encoding speed to
|
||||
# compression ratio
|
||||
'-preset', 'slow',
|
||||
'-profile:v', 'baseline', # set output to specific h264 profile
|
||||
'-level', '3.0', # sets highest compatibility with target devices
|
||||
'-crf', '20', # set quality
|
||||
@ -461,7 +613,8 @@ class Photo(object):
|
||||
'-c:a', 'aac', # set aac as audiocodec
|
||||
'-ac', '2', # force two audiochannels
|
||||
'-ab', '160k', # set audiobitrate to 160Kbps
|
||||
'-maxrate', '10000000', # limits max rate, will degrade CRF if needed
|
||||
# limits max rate, will degrade CRF if needed
|
||||
'-maxrate', '10000000',
|
||||
'-bufsize', '10000000', # define how much the client should buffer
|
||||
'-f', 'mp4', # fileformat mp4
|
||||
'-threads', str(num_of_cores), # number of cores (all minus one)
|
||||
@ -471,7 +624,9 @@ class Photo(object):
|
||||
filters = []
|
||||
info_string = "%s -> mp4, h264" % (os.path.basename(original_path))
|
||||
message("transcoding", info_string)
|
||||
if os.path.exists(transcode_path) and file_mtime(transcode_path) >= self._attributes["dateTimeFile"]:
|
||||
if (os.path.exists(transcode_path) and
|
||||
file_mtime(
|
||||
transcode_path) >= self._attributes["dateTimeFile"]):
|
||||
self._video_metadata(transcode_path, False)
|
||||
return
|
||||
if "originalSize" in self._attributes:
|
||||
@ -501,17 +656,19 @@ class Photo(object):
|
||||
tmp_transcode_cmd = transcode_cmd[:]
|
||||
transcode_cmd.append(transcode_path)
|
||||
p = VideoTranscodeWrapper().call(*transcode_cmd)
|
||||
if p == False:
|
||||
if p is False:
|
||||
# add another option, try transcoding again
|
||||
# done to avoid this error;
|
||||
# x264 [error]: baseline profile doesn't support 4:2:2
|
||||
message("transcoding failure, trying yuv420p", os.path.basename(original_path))
|
||||
message(
|
||||
"transcoding failure, trying yuv420p",
|
||||
os.path.basename(original_path))
|
||||
tmp_transcode_cmd.append('-pix_fmt')
|
||||
tmp_transcode_cmd.append('yuv420p')
|
||||
tmp_transcode_cmd.append(transcode_path)
|
||||
p = VideoTranscodeWrapper().call(*tmp_transcode_cmd)
|
||||
|
||||
if p == False:
|
||||
if p is False:
|
||||
message("transcoding failure", os.path.basename(original_path))
|
||||
try:
|
||||
os.unlink(transcode_path)
|
||||
@ -524,25 +681,33 @@ class Photo(object):
|
||||
@property
|
||||
def name(self):
|
||||
return os.path.basename(self._path)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._path
|
||||
|
||||
@property
|
||||
def image_caches(self):
|
||||
caches = []
|
||||
if "mediaType" in self._attributes and self._attributes["mediaType"] == "video":
|
||||
if ("mediaType" in self._attributes and
|
||||
self._attributes["mediaType"] == "video"):
|
||||
for size in Photo.thumb_sizes:
|
||||
if size[1]:
|
||||
caches.append(image_cache(self._path, size[0], size[1]))
|
||||
caches.append(video_cache(self._path))
|
||||
else:
|
||||
caches = [image_cache(self._path, size[0], size[1]) for size in Photo.thumb_sizes]
|
||||
caches = [
|
||||
image_cache(self._path, size[0], size[1])
|
||||
for size in Photo.thumb_sizes
|
||||
]
|
||||
return caches
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
correct_date = None;
|
||||
correct_date = None
|
||||
if not self.is_valid:
|
||||
correct_date = datetime(1900, 1, 1)
|
||||
if "dateTimeVideo" in self._attributes:
|
||||
@ -565,6 +730,7 @@ class Photo(object):
|
||||
@property
|
||||
def attributes(self):
|
||||
return self._attributes
|
||||
|
||||
@staticmethod
|
||||
def from_dict(dictionary, basepath):
|
||||
del dictionary["date"]
|
||||
@ -573,17 +739,21 @@ class Photo(object):
|
||||
for key, value in dictionary.items():
|
||||
if key.startswith("dateTime"):
|
||||
try:
|
||||
dictionary[key] = datetime.strptime(dictionary[key], "%a %b %d %H:%M:%S %Y")
|
||||
dictionary[key] = datetime.strptime(
|
||||
dictionary[key],
|
||||
"%a %b %d %H:%M:%S %Y")
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
pass
|
||||
return Photo(path, None, dictionary)
|
||||
|
||||
def to_dict(self):
|
||||
photo = { "name": self.name, "date": self.date }
|
||||
photo = {"name": self.name, "date": self.date}
|
||||
photo.update(self.attributes)
|
||||
return photo
|
||||
|
||||
|
||||
class PhotoAlbumEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, datetime):
|
||||
@ -591,4 +761,3 @@ class PhotoAlbumEncoder(json.JSONEncoder):
|
||||
if isinstance(obj, Album) or isinstance(obj, Photo):
|
||||
return obj.to_dict()
|
||||
return json.JSONEncoder.default(self, obj)
|
||||
|
||||
|
@ -6,10 +6,13 @@ from PhotoAlbum import Photo, Album, PhotoAlbumEncoder
|
||||
from CachePath import *
|
||||
import json
|
||||
|
||||
|
||||
class TreeWalker:
|
||||
def __init__(self, album_path, cache_path):
|
||||
self.album_path = os.path.abspath(album_path).decode(sys.getfilesystemencoding())
|
||||
self.cache_path = os.path.abspath(cache_path).decode(sys.getfilesystemencoding())
|
||||
self.album_path = os.path.abspath(
|
||||
album_path).decode(sys.getfilesystemencoding())
|
||||
self.cache_path = os.path.abspath(
|
||||
cache_path).decode(sys.getfilesystemencoding())
|
||||
set_cache_path_base(self.album_path)
|
||||
self.all_albums = list()
|
||||
self.all_photos = list()
|
||||
@ -17,6 +20,7 @@ class TreeWalker:
|
||||
self.big_lists()
|
||||
self.remove_stale()
|
||||
message("complete", "")
|
||||
|
||||
def walk(self, path):
|
||||
next_level()
|
||||
if not os.access(path, os.R_OK | os.X_OK):
|
||||
@ -54,7 +58,8 @@ class TreeWalker:
|
||||
raise
|
||||
except:
|
||||
next_level()
|
||||
message("unicode error", entry.decode(sys.getfilesystemencoding(), "replace"))
|
||||
message("unicode error", entry.decode(
|
||||
sys.getfilesystemencoding(), "replace"))
|
||||
back_level()
|
||||
continue
|
||||
entry = os.path.join(path, entry)
|
||||
@ -67,18 +72,24 @@ class TreeWalker:
|
||||
cache_hit = False
|
||||
if cached_album:
|
||||
cached_photo = cached_album.photo_from_path(entry)
|
||||
if cached_photo and file_mtime(entry) <= cached_photo.attributes["dateTimeFile"]:
|
||||
if (cached_photo and file_mtime(
|
||||
entry) <= cached_photo.attributes["dateTimeFile"]):
|
||||
cache_file = None
|
||||
if "mediaType" in cached_photo.attributes:
|
||||
if cached_photo.attributes["mediaType"] == "video":
|
||||
# if video
|
||||
cache_file = os.path.join(self.cache_path, video_cache(entry))
|
||||
cache_file = os.path.join(
|
||||
self.cache_path, video_cache(entry))
|
||||
else:
|
||||
# if image
|
||||
cache_file = os.path.join(self.cache_path, image_cache(entry, 1024, False))
|
||||
cache_file = os.path.join(
|
||||
self.cache_path,
|
||||
image_cache(entry, 1024, False))
|
||||
else:
|
||||
# if image
|
||||
cache_file = os.path.join(self.cache_path, image_cache(entry, 1024, False))
|
||||
cache_file = os.path.join(
|
||||
self.cache_path,
|
||||
image_cache(entry, 1024, False))
|
||||
|
||||
# at this point we have full path to cache image/video
|
||||
# check if it actually exists
|
||||
@ -104,6 +115,7 @@ class TreeWalker:
|
||||
message("empty", os.path.basename(path))
|
||||
back_level()
|
||||
return album
|
||||
|
||||
def big_lists(self):
|
||||
photo_list = []
|
||||
self.all_photos.sort()
|
||||
@ -113,9 +125,11 @@ class TreeWalker:
|
||||
fp = open(os.path.join(self.cache_path, "all_photos.json"), 'w')
|
||||
json.dump(photo_list, fp, cls=PhotoAlbumEncoder)
|
||||
fp.close()
|
||||
|
||||
def remove_stale(self):
|
||||
message("cleanup", "building stale list")
|
||||
all_cache_entries = { "all_photos.json": True, "latest_photos.json": True }
|
||||
all_cache_entries = {"all_photos.json": True,
|
||||
"latest_photos.json": True}
|
||||
for album in self.all_albums:
|
||||
all_cache_entries[album.cache_path] = True
|
||||
for photo in self.all_photos:
|
||||
|
@ -2,6 +2,7 @@ from CachePath import message
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
class VideoToolWrapper(object):
|
||||
def call(self, *args):
|
||||
path = args[-1]
|
||||
@ -34,12 +35,14 @@ class VideoToolWrapper(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class VideoTranscodeWrapper(VideoToolWrapper):
|
||||
def __init__(self):
|
||||
self.wrappers = ['avconv', 'ffmpeg']
|
||||
self.check_output = False
|
||||
self.cleanup = True
|
||||
|
||||
|
||||
class VideoProbeWrapper(VideoToolWrapper):
|
||||
def __init__(self):
|
||||
self.wrappers = ['avprobe', 'ffprobe']
|
||||
|
@ -3,7 +3,9 @@ from flask_login import LoginManager
|
||||
import os.path
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config.from_pyfile(os.path.join(os.path.dirname(os.path.abspath(__file__)), "app.cfg"))
|
||||
app.config.from_pyfile(
|
||||
os.path.join(os.path.dirname(os.path.abspath(__file__)), "app.cfg"))
|
||||
|
||||
login_manager = LoginManager()
|
||||
import login
|
||||
login_manager.setup_app(app)
|
||||
|
@ -1,55 +1,89 @@
|
||||
from floatapp import app
|
||||
from floatapp.login import admin_required, login_required, is_authenticated, query_is_photo_user, query_is_admin_user, photo_user, admin_user
|
||||
from floatapp.jsonp import jsonp
|
||||
from process import send_process
|
||||
from TreeWalker import TreeWalker
|
||||
from flask import Response, abort, json, request, jsonify
|
||||
from flask_login import login_user, current_user
|
||||
from random import shuffle
|
||||
import os
|
||||
from mimetypes import guess_type
|
||||
from random import shuffle
|
||||
|
||||
from flask import Response, abort, json, jsonify, request
|
||||
from flask_login import current_user, login_user
|
||||
|
||||
from floatapp import app
|
||||
from floatapp.jsonp import jsonp
|
||||
from floatapp.login import (admin_required, admin_user, is_authenticated,
|
||||
login_required, photo_user, query_is_admin_user,
|
||||
query_is_photo_user)
|
||||
from process import send_process
|
||||
from TreeWalker import TreeWalker
|
||||
|
||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
@app.route("/scan")
|
||||
@admin_required
|
||||
def scan_photos():
|
||||
global cwd
|
||||
response = send_process([ "stdbuf", "-oL", os.path.abspath(os.path.join(cwd, "../main.py")),
|
||||
os.path.abspath(app.config["ALBUM_PATH"]), os.path.abspath(app.config["CACHE_PATH"]) ],
|
||||
response = send_process([
|
||||
"stdbuf",
|
||||
"-oL",
|
||||
os.path.abspath(os.path.join(cwd, "../venv/bin/python")),
|
||||
os.path.abspath(os.path.join(cwd, "../main.py")),
|
||||
os.path.abspath(app.config["ALBUM_PATH"]),
|
||||
os.path.abspath(app.config["CACHE_PATH"])
|
||||
],
|
||||
os.path.join(cwd, "scanner.pid"))
|
||||
response.headers.add("X-Accel-Buffering", "no")
|
||||
response.cache_control.no_cache = True
|
||||
return response
|
||||
|
||||
|
||||
@app.route("/auth")
|
||||
def login():
|
||||
success = False
|
||||
if current_user.is_authenticated():
|
||||
if current_user.is_authenticated:
|
||||
success = True
|
||||
elif query_is_photo_user(request.form) or query_is_photo_user(request.args):
|
||||
elif (query_is_photo_user(request.form) or
|
||||
query_is_photo_user(request.args)):
|
||||
success = login_user(photo_user, remember=True)
|
||||
elif query_is_admin_user(request.form) or query_is_admin_user(request.args):
|
||||
elif (query_is_admin_user(request.form) or
|
||||
query_is_admin_user(request.args)):
|
||||
success = login_user(admin_user, remember=True)
|
||||
if not success:
|
||||
abort(403)
|
||||
return ""
|
||||
|
||||
|
||||
def cache_base(path):
|
||||
path = path.replace('/', '-').replace(' ', '_').replace('(', '').replace('&', '').replace(',', '').replace(')', '').replace('#', '').replace('[', '').replace(']', '').replace('"', '').replace("'", '').replace('_-_', '-').lower()
|
||||
path = path.replace(
|
||||
'/', '-').replace(
|
||||
' ', '_').replace(
|
||||
'(', '').replace(
|
||||
'&', '').replace(
|
||||
',', '').replace(
|
||||
')', '').replace(
|
||||
'#', '').replace(
|
||||
'[', '').replace(
|
||||
']', '').replace(
|
||||
'"', '').replace(
|
||||
"'", '').replace(
|
||||
'_-_', '-').lower()
|
||||
|
||||
while path.find("--") != -1:
|
||||
path = path.replace("--", "-")
|
||||
|
||||
while path.find("__") != -1:
|
||||
path = path.replace("__", "_")
|
||||
|
||||
if len(path) == 0:
|
||||
path = "root"
|
||||
|
||||
return path
|
||||
|
||||
auth_list = [ ]
|
||||
|
||||
auth_list = []
|
||||
|
||||
|
||||
def read_auth_list():
|
||||
global auth_list, cwd
|
||||
f = open(os.path.join(cwd, "auth.txt"), "r")
|
||||
paths = [ ]
|
||||
paths = []
|
||||
for path in f:
|
||||
path = path.strip()
|
||||
paths.append(path)
|
||||
@ -57,24 +91,31 @@ def read_auth_list():
|
||||
f.close()
|
||||
auth_list = paths
|
||||
|
||||
|
||||
# TODO: Make this run via inotify
|
||||
read_auth_list()
|
||||
|
||||
|
||||
def check_permissions(path):
|
||||
if not is_authenticated():
|
||||
for auth_path in auth_list:
|
||||
if path.startswith(auth_path):
|
||||
abort(403)
|
||||
|
||||
|
||||
@app.route("/albums/<path:path>")
|
||||
def albums(path):
|
||||
check_permissions(path)
|
||||
return accel_redirect(app.config["ALBUM_ACCEL"], app.config["ALBUM_PATH"], path)
|
||||
return accel_redirect(
|
||||
app.config["ALBUM_ACCEL"], app.config["ALBUM_PATH"], path)
|
||||
|
||||
|
||||
@app.route("/cache/<path:path>")
|
||||
def cache(path):
|
||||
check_permissions(path)
|
||||
return accel_redirect(app.config["CACHE_ACCEL"], app.config["CACHE_PATH"], path)
|
||||
return accel_redirect(
|
||||
app.config["CACHE_ACCEL"], app.config["CACHE_PATH"], path)
|
||||
|
||||
|
||||
def accel_redirect(internal, real, relative_name):
|
||||
real_path = os.path.join(real, relative_name)
|
||||
@ -94,6 +135,7 @@ def accel_redirect(internal, real, relative_name):
|
||||
response.cache_control.max_age = 29030400
|
||||
return response
|
||||
|
||||
|
||||
@app.route("/photos")
|
||||
@jsonp
|
||||
def photos():
|
||||
|
@ -5,14 +5,16 @@ import re
|
||||
|
||||
jsonp_validator = re.compile("^[a-zA-Z0-9_\-.]{1,128}$")
|
||||
|
||||
|
||||
def jsonp(f):
|
||||
"""Wraps JSONified output for JSONP"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
callback = request.args.get('callback', False)
|
||||
if callback and jsonp_validator.match(callback):
|
||||
content = str(callback) + '(' + str(f(*args,**kwargs).data) + ')'
|
||||
return current_app.response_class(content, mimetype='application/javascript')
|
||||
content = str(callback) + '(' + str(f(*args, **kwargs).data) + ')'
|
||||
return current_app.response_class(
|
||||
content, mimetype='application/javascript')
|
||||
else:
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
|
@ -3,6 +3,7 @@ from flask import request, abort
|
||||
from flask_login import current_user, UserMixin
|
||||
from functools import wraps
|
||||
|
||||
|
||||
class User(UserMixin):
|
||||
def __init__(self, id, admin=False):
|
||||
self.admin = admin
|
||||
@ -11,6 +12,7 @@ class User(UserMixin):
|
||||
photo_user = User("user")
|
||||
admin_user = User("admin", True)
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(id):
|
||||
if id == "user":
|
||||
@ -19,35 +21,48 @@ def load_user(id):
|
||||
return admin_user
|
||||
return None
|
||||
|
||||
|
||||
@login_manager.unauthorized_handler
|
||||
def unauthorized():
|
||||
return abort(403)
|
||||
|
||||
|
||||
def login_required(fn):
|
||||
@wraps(fn)
|
||||
def decorated_view(*args, **kwargs):
|
||||
if query_is_admin_user(request.args) or query_is_photo_user(request.args) or current_user.is_authenticated():
|
||||
if (query_is_admin_user(request.args) or
|
||||
query_is_photo_user(request.args) or
|
||||
current_user.is_authenticated):
|
||||
return fn(*args, **kwargs)
|
||||
return app.login_manager.unauthorized()
|
||||
return decorated_view
|
||||
|
||||
|
||||
def admin_required(fn):
|
||||
@wraps(fn)
|
||||
def decorated_view(*args, **kwargs):
|
||||
if query_is_admin_user(request.args) or (current_user.is_authenticated() and current_user.admin):
|
||||
if (query_is_admin_user(request.args) or
|
||||
(current_user.is_authenticated and current_user.admin)):
|
||||
return fn(*args, **kwargs)
|
||||
return app.login_manager.unauthorized()
|
||||
return decorated_view
|
||||
|
||||
|
||||
def query_is_photo_user(query):
|
||||
username = query.get("username", None)
|
||||
password = query.get("password", None)
|
||||
return username == app.config["PHOTO_USERNAME"] and password == app.config["PHOTO_PASSWORD"]
|
||||
return username == (app.config["PHOTO_USERNAME"] and
|
||||
password == app.config["PHOTO_PASSWORD"])
|
||||
|
||||
|
||||
def query_is_admin_user(query):
|
||||
username = query.get("username", None)
|
||||
password = query.get("password", None)
|
||||
return username == app.config["ADMIN_USERNAME"] and password == app.config["ADMIN_PASSWORD"]
|
||||
return username == (app.config["ADMIN_USERNAME"] and
|
||||
password == app.config["ADMIN_PASSWORD"])
|
||||
|
||||
|
||||
def is_authenticated():
|
||||
return query_is_admin_user(request.args) or query_is_photo_user(request.args) or current_user.is_authenticated()
|
||||
return (query_is_admin_user(request.args) or
|
||||
query_is_photo_user(request.args) or
|
||||
current_user.is_authenticated)
|
||||
|
@ -3,10 +3,12 @@ import subprocess
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class ProcessWrapper(object):
|
||||
def __init__(self, process, done):
|
||||
self.process = process
|
||||
self.done = done
|
||||
|
||||
def close(self):
|
||||
self.done()
|
||||
if self.process.returncode is not None:
|
||||
@ -14,10 +16,13 @@ class ProcessWrapper(object):
|
||||
self.process.stdout.close()
|
||||
self.process.terminate()
|
||||
self.process.wait()
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
def next(self):
|
||||
try:
|
||||
data = self.process.stdout.readline()
|
||||
@ -29,6 +34,7 @@ class ProcessWrapper(object):
|
||||
self.close()
|
||||
raise StopIteration()
|
||||
|
||||
|
||||
def send_process(args, pid_file):
|
||||
def setup_proc():
|
||||
f = open(pid_file, "w")
|
||||
@ -36,17 +42,22 @@ def send_process(args, pid_file):
|
||||
f.close()
|
||||
os.close(0)
|
||||
os.dup2(1, 2)
|
||||
|
||||
def tear_down_proc():
|
||||
try:
|
||||
os.unlink(pid_file)
|
||||
except:
|
||||
pass
|
||||
|
||||
if os.path.exists(pid_file):
|
||||
f = open(pid_file, "r")
|
||||
pid = f.read()
|
||||
f.close()
|
||||
if os.path.exists("/proc/%s/status" % pid):
|
||||
return Response("Scanner is already running.\n", mimetype="text/plain")
|
||||
process = subprocess.Popen(args, close_fds=True, stdout=subprocess.PIPE, preexec_fn=setup_proc)
|
||||
return Response(
|
||||
"Scanner is already running.\n", mimetype="text/plain")
|
||||
|
||||
process = subprocess.Popen(
|
||||
args, close_fds=True, stdout=subprocess.PIPE, preexec_fn=setup_proc)
|
||||
response = ProcessWrapper(process, tear_down_proc)
|
||||
return Response(response, direct_passthrough=True, mimetype="text/plain")
|
||||
|
@ -5,6 +5,7 @@ from CachePath import message
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def main():
|
||||
reload(sys)
|
||||
sys.setdefaultencoding("UTF-8")
|
||||
@ -12,6 +13,7 @@ def main():
|
||||
if len(sys.argv) != 3:
|
||||
print "usage: %s ALBUM_PATH CACHE_PATH" % sys.argv[0]
|
||||
return
|
||||
|
||||
try:
|
||||
os.umask(022)
|
||||
TreeWalker(sys.argv[1], sys.argv[2])
|
||||
|
Loading…
Reference in New Issue
Block a user