Skip to content

Commit

Permalink
Merge pull request #237 from ShaneIsrael/develop
Browse files Browse the repository at this point in the history
Configurable Thumbnail Location
  • Loading branch information
ShaneIsrael authored Dec 27, 2023
2 parents 284fc2c + 3bcdc0d commit 7512018
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 9 deletions.
1 change: 1 addition & 0 deletions .env.dev
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
export FLASK_APP="app/server/fireshare:create_app()"
export FLASK_DEBUG=1
export ENVIRONMENT=dev
export THUMBNAIL_VIDEO_LOCATION=50
export SECRET_KEY=dev-test-key
export DATA_DIRECTORY=$(pwd)/dev_root/dev_data/
export VIDEO_DIRECTORY=$(pwd)/dev_root/dev_videos/
Expand Down
1 change: 1 addition & 0 deletions .env.prod
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,6 @@ export VIDEO_DIRECTORY=/videos/
export PROCESSED_DIRECTORY=/processed/
export TEMPLATE_PATH=/app/server/fireshare/templates
export ENVIRONMENT=production
export THUMBNAIL_VIDEO_LOCATION=0
export ADMIN_PASSWORD=admin
export ADMIN_USERNAME=admin
2 changes: 1 addition & 1 deletion app/client/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "fireshare",
"version": "1.2.16",
"version": "1.2.17",
"private": true,
"dependencies": {
"@emotion/react": "^11.9.0",
Expand Down
4 changes: 2 additions & 2 deletions app/client/src/components/admin/UploadCard.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ const UploadCard = ({ authenticated, feedView = false, publicUpload = false, fet
}
handleAlert({
type: 'success',
message: 'Your upload will be available shortly',
autohideDuration: 2500,
message: 'Your upload will be in a few seconds.',
autohideDuration: 3500,
open: true,
onClose: () => fetchVideos(),
})
Expand Down
1 change: 1 addition & 0 deletions app/server/fireshare/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def create_app(init_schedule=False):
raise Exception("DATA_DIRECTORY not found in environment")

app.config['ENVIRONMENT'] = os.getenv('ENVIRONMENT')
app.config['THUMBNAIL_VIDEO_LOCATION'] = int(os.getenv('THUMBNAIL_VIDEO_LOCATION'))
app.config['SECRET_KEY'] = os.getenv('SECRET_KEY', secrets.token_hex(32))
app.config['DATA_DIRECTORY'] = os.getenv('DATA_DIRECTORY')
app.config['VIDEO_DIRECTORY'] = os.getenv('VIDEO_DIRECTORY')
Expand Down
30 changes: 24 additions & 6 deletions app/server/fireshare/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,12 +114,18 @@ def scan_videos(root):
db.session.commit()

@cli.command()
@click.pass_context
@click.option("--path", "-p", help="path to video to scan", required=False)
def scan_video(path):
def scan_video(ctx, path):
with create_app().app_context():
paths = current_app.config['PATHS']
videos_path = paths["video"]
video_links = paths["processed"] / "video_links"
thumbnail_skip = current_app.config['THUMBNAIL_VIDEO_LOCATION'] or 0
if thumbnail_skip > 0 and thumbnail_skip <= 100:
thumbnail_skip = thumbnail_skip / 100
else:
thumbnail_skip = 0

config_file = open(paths["data"] / "config.json")
video_config = json.load(config_file)["app_config"]["video_defaults"]
Expand Down Expand Up @@ -169,7 +175,12 @@ def scan_video(path):
logger.info(f"{dst} exists already")
info = VideoInfo(video_id=v.video_id, title=Path(v.path).stem, private=video_config["private"])
db.session.add(info)

db.session.commit()

logger.info("Syncing metadata")
ctx.invoke(sync_metadata, video=video_id)
info = VideoInfo.query.filter(VideoInfo.video_id==video_id).one()

processed_root = Path(current_app.config['PROCESSED_DIRECTORY'])
logger.info(f"Checking for videos with missing posters...")
derived_path = Path(processed_root, "derived", info.video_id)
Expand All @@ -180,7 +191,7 @@ def scan_video(path):
if should_create_poster:
if not derived_path.exists():
derived_path.mkdir(parents=True)
poster_time = 0
poster_time = int(info.duration * thumbnail_skip)
util.create_poster(video_path, derived_path / "poster.jpg", poster_time)
else:
logger.debug(f"Skipping creation of poster for video {info.video_id} because it exists at {str(poster_path)}")
Expand Down Expand Up @@ -216,10 +227,11 @@ def repair_symlinks():
logger.info(f"{dst} exists already")

@cli.command()
def sync_metadata():
@click.option("--video", "-v", help="The video to sync metadata from", default=None)
def sync_metadata(video):
with create_app().app_context():
paths = current_app.config['PATHS']
videos = VideoInfo.query.filter(VideoInfo.info==None).all()
videos = VideoInfo.query.filter(VideoInfo.video_id==video).all() if video else VideoInfo.query.filter(VideoInfo.info==None).all()
logger.info(f'Found {len(videos):,} videos without metadata')
for v in videos:
vpath = paths["processed"] / "video_links" / str(v.video_id + v.video.extension)
Expand Down Expand Up @@ -352,6 +364,12 @@ def bulk_import(ctx, root):
return
util.create_lock(paths["data"])

thumbnail_skip = current_app.config['THUMBNAIL_VIDEO_LOCATION'] or 0
if thumbnail_skip > 0 and thumbnail_skip <= 100:
thumbnail_skip = thumbnail_skip / 100
else:
thumbnail_skip = 0

timing = {}
s = time.time()
ctx.invoke(scan_videos, root=root)
Expand All @@ -360,7 +378,7 @@ def bulk_import(ctx, root):
ctx.invoke(sync_metadata)
timing['sync_metadata'] = time.time() - s
s = time.time()
ctx.invoke(create_posters)
ctx.invoke(create_posters, skip=thumbnail_skip)
timing['create_posters'] = time.time() - s

logger.info(f"Finished bulk import. Timing info: {json.dumps(timing)}")
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,7 @@ services:
- ADMIN_PASSWORD=admin
- SECRET_KEY=replace_this_with_some_random_string
- MINUTES_BETWEEN_VIDEO_SCANS=5
# The location video thumbnails are generated. A value between 0-100 where 50 would be the frame in the middle of the video file and 0 would be the first frame of the video.
- THUMBNAIL_VIDEO_LOCATION=0
- PUID=1000
- PGID=1000

0 comments on commit 7512018

Please sign in to comment.