#!/usr/bin/env bash # Desc: Download YouTube videos # Usage: $ ./bkytpldl-generic # Version: 4.0.0 # Adjust me dir_out="~/Videos/"; url_playlist1="https://www.youtube.com/playlist?list=PLxxx"; url_playlist2="https://www.youtube.com/playlist?list=PLxxx"; url_playlist3="https://www.youtube.com/playlist?list=PLxxx"; declare -a args; # array for yt-dlp arguments declare -a urls urls_rand; # array for YouTube playlist URLs yell() { echo "$0: $*" >&2; } # print script path and all args to stderr die() { yell "$*"; exit 111; } # same as yell() but non-zero exit status must() { "$@" || die "cannot $*"; } # runs args as command, reports args if command fails # check dependencies if ! command -v yt-dlp 1>/dev/random 2>&1; then die "FATAL:yt-dlp not found."; fi; # Donʼt run multiple yt-dlp instances if pgrep "^yt-dlp$" 1>/dev/random 2>&1; then die "FATAL:yt-dlp already running."; fi; # Check directories if [[ ! -d $dir_out ]]; then mkdir -p "$dir_out"; fi; # == Download Video == # yt-dlp output options ## Restrict file name character set #args+=("--restrict-filenames"); # Remove non-ASCII characters args+=("--trim-filenames=120"); # Use in tandem with `%(title).120B` ## Request to write accompanying files args+=("--write-subs"); # Write subtitles file args+=("--write-auto-subs"); # Write subtitles file #args+=("--all-subs"); # Download all available subtitles (causes many requests) #subLangs="en.*,ja.*,id.*,es.*,zh-Hans.*,zh-Hant.*,sv.*,el.*,hi.*,ru.*,bn.*,fr.*,ko.*,ar.*,nv.*"; # custom language list subLangs="en,en-orig,en.*"; # custom language list args+=("--sub-langs" "$subLangs"); args+=("--write-info-json"); # Write accompanying json file args+=("--no-overwrites"); # Don't overwrite files args+=("--write-thumbnail"); # Write thumbnail ## Only download metadata #args+=("--no-download"); # Don't download video file. ## Save meta-data args+=("--write-comments"); # Get comments ### Limit comments ### comment_sort values: ### top : use YouTube top comment algorithm ### new : get newest comments (default) ### max_comments values: ### max-comments : max number of parent comments or replies ### max-parents : max number of comment threads ### max-replies : max number of replies across all threads ### max-replies-per-thread : max number of replies per thread args+=("--extractor-args" "youtube:comment_sort=top;max_comments=10000,100,10000,100"); ## Randomize order in which playlist items are downloaded args+=("--playlist-random"); ## Delay between downloads minSleep="30"; maxSleep="$(( minSleep + (RANDOM + RANDOM + RANDOM) / ( 3 * 400) ))"; # roughly 60 seconds args+=("--min-sleep-interval" "$minSleep"); args+=("--max-sleep-interval" "$maxSleep"); args+=("--sleep-requests" "2"); # delay on metadata requests args+=("--sleep-subtitles" "10"); # delay for subtitles ## Remember downloaded videos to avoid redownload attempts pathDA="$dir_out"/.bkytpldl_history.txt; args+=("--download-archive" "$pathDA"); ## Use firefox 'default-release' profile cookies ## Example: Linux: from ~/.mozilla/firefox/deadbeef.default-release/ #args+=("--cookies-from-browser"); #args+=("firefox:deadbeef.default-release"); Default Firefox profile name ## Specify output filename format ## Note: `$(title).120B` shortens title to 120 bytes (useful for ## titles with UTF-8 characters. args+=("-o"); args+=("%(playlist)s/%(upload_date)s.%(channel)s.%(channel_id)s.%(title).120B.%(id)s.%(ext)s"); ## Limit download resolution to 1080p args+=("-S" "res:1080"); ## Specify playlist URLs to download urls+=("$url_playlist1"); urls+=("$url_playlist2"); urls+=("$url_playlist3"); ### Shuffle playlist download order mapfile -t urls_rand < <(printf "%s\n" "${urls[@]}" | shuf); for url in "${urls_rand[@]}"; do args+=("$url"); done; # Change working directory to output dir pushd "$dir_out" || die "FATAL:Failed to change pwd to:dir_out:$dir_out"; # Download videos #yell "DEBUG:args:$(declare -p args)"; # debug command must yt-dlp "${args[@]}"; # execute command popd || die "FATAL:Failed to return from dir_out:$dir_out"; # Author: Steven Baltakatei Sandoval # License; GPLv3+