diff --git a/README.md b/README.md index 7690cce..f7e0d6a 100644 --- a/README.md +++ b/README.md @@ -32,10 +32,12 @@ The default enabled libraries included in the `ffmpeg` build are: - libx264 - libx265 - libwebp +- libvpx +- libvorbis - libmp3lame The user-overridable compile options are: -- `ENABLE`: configure what ffmpeg enables (default: libsvtav1_psy libopus libdav1d libaom librav1e libvmaf libx264 libx265 libwebp libmp3lame) +- `ENABLE`: configure what ffmpeg enables (default: libsvtav1_psy libopus libdav1d libaom librav1e libvmaf libx264 libx265 libwebp libvpx libvorbis libmp3lame) - `PREFIX`: prefix to install to, default is local install in ./gitignore/sysroot (default: local) - `STATIC`: static or shared build (default: ON) - `LTO`: enable link time optimization (default: ON) diff --git a/lib/0-utils.sh b/lib/0-utils.sh new file mode 100644 index 0000000..c1cac61 --- /dev/null +++ b/lib/0-utils.sh @@ -0,0 +1,414 @@ +#!/usr/bin/env bash + +# shellcheck disable=SC2034 + +# ANSI colors +RED='\e[0;31m' +CYAN='\e[0;36m' +GREEN='\e[0;32m' +YELLOW='\e[0;33m' +NC='\e[0m' + +# echo wrappers +echo_wrapper() { + local args + if [[ $1 == '-n' ]]; then + args=("$1") + shift + fi + # COLOR is override for using ${color} + # shellcheck disable=SC2153 + if [[ ${COLOR} == 'OFF' ]]; then + color='' + endColor='' + else + endColor="${NC}" + fi + + echo -e "${args[@]}" "${color}${word:-''}${endColor}" "$@" +} +echo_fail() { color="${RED}" word="FAIL" echo_wrapper "$@"; } +echo_info() { color="${CYAN}" word="INFO" echo_wrapper "$@"; } +echo_pass() { color="${GREEN}" word="PASS" echo_wrapper "$@"; } +echo_warn() { color="${YELLOW}" word="WARN" echo_wrapper "$@"; } +echo_exit() { + echo_fail "$@" + exit 1 +} +void() { echo "$@" >/dev/null; } + +echo_if_fail() { + local cmd=("$@") + local logName="${LOGNAME:-${RANDOM}}-" + local out="${TMP_DIR}/${logName}stdout" + local err="${TMP_DIR}/${logName}stderr" + + # set trace to the cmdEvalTrace and open file descriptor + local cmdEvalTrace="${TMP_DIR}/${logName}cmdEvalTrace" + exec 5>"${cmdEvalTrace}" + export BASH_XTRACEFD=5 + + set -x + "${cmd[@]}" >"${out}" 2>"${err}" + local retval=$? + + # unset and close file descriptor + set +x + exec 5>&- + + # parse out relevant part of the trace + local cmdEvalLines=() + while IFS= read -r line; do + line="${line/${PS4}/}" + test "${line}" == 'set +x' && continue + test "${line}" == '' && continue + cmdEvalLines+=("${line}") + done <"${cmdEvalTrace}" + + if ! test ${retval} -eq 0; then + echo + echo_fail "command failed with ${retval}:" + printf "%s\n" "${cmdEvalLines[@]}" + echo_warn "command stdout:" + tail -n 32 "${out}" + echo_warn "command stderr:" + tail -n 32 "${err}" + echo + fi + if [[ -z ${LOGNAME} ]]; then + rm "${out}" "${err}" "${cmdEvalTrace}" + fi + return ${retval} +} + +is_root_owned() { + local path=$1 + local uid + + if stat --version >/dev/null 2>&1; then + # GNU coreutils (Linux) + uid=$(stat -c '%u' "$path") + else + # BSD/macOS + uid=$(stat -f '%u' "$path") + fi + + test "$uid" -eq 0 +} + +dump_arr() { + local arrayNames=("$@") + for arrayName in "${arrayNames[@]}"; do + declare -n array="${arrayName}" + arrayExpanded=("${array[@]}") + + # skip showing single element arrays by default + if [[ ! ${#arrayExpanded[@]} -gt 1 ]]; then + if [[ ${SHOW_SINGLE} == true ]]; then + echo_info "${arrayName}='${arrayExpanded[*]}'" + else + continue + fi + fi + + echo + # don't care that the variable has "ARR" + echo_info "${arrayName//"_ARR"/}" + printf "\t%s\n" "${arrayExpanded[@]}" + done +} + +has_cmd() { + local cmds=("$@") + local rv=0 + for cmd in "${cmds[@]}"; do + command -v "${cmd}" >/dev/null 2>&1 || rv=1 + done + + return ${rv} +} + +missing_cmd() { + local cmds=("$@") + local rv=1 + for cmd in "${cmds[@]}"; do + if ! has_cmd "${cmd}"; then + echo_warn "missing ${cmd}" + rv=0 + fi + done + + return ${rv} +} + +bash_dirname() { + local tmp=${1:-.} + + [[ $tmp != *[!/]* ]] && { + printf '/\n' + return + } + + tmp=${tmp%%"${tmp##*[!/]}"} + + [[ $tmp != */* ]] && { + printf '.\n' + return + } + + tmp=${tmp%/*} + tmp=${tmp%%"${tmp##*[!/]}"} + + printf '%s\n' "${tmp:-/}" +} + +bash_basename() { + local tmp + path="$1" + suffix="${2:-''}" + + tmp=${path%"${path##*[!/]}"} + tmp=${tmp##*/} + tmp=${tmp%"${suffix/"$tmp"/}"} + + printf '%s\n' "${tmp:-/}" +} + +bash_realpath() { + local file=$1 + local dir + + # If the file is already absolute + [[ $file == /* ]] && { + printf '%s\n' "$file" + return + } + + # Otherwise: split into directory + basename + dir="$(bash_dirname "${file}")" + file="$(bash_basename "${file}")" + + # If no directory component, use current directory + if [[ $dir == "$file" ]]; then + dir="$PWD" + else + # Save current dir, move into target dir, capture $PWD, then return + local oldpwd="$PWD" + cd "$dir" || return 1 + dir="$PWD" + cd "$oldpwd" || return 1 + fi + + printf '%s/%s\n' "$dir" "$file" +} + +line_contains() { + local line="$1" + local substr="$2" + if [[ $line == *"${substr}"* ]]; then + return 0 + else + return 1 + fi +} + +line_starts_with() { + local line="$1" + local substr="$2" + if [[ $line == "${substr}"* ]]; then + return 0 + else + return 1 + fi +} + +is_linux() { + line_contains "${OSTYPE}" 'linux' +} + +is_darwin() { + line_contains "$(print_os)" darwin +} + +is_windows() { + line_contains "$(print_os)" windows +} + +is_android() { + line_contains "$(print_os)" android +} + +print_os() { + # cached response + if [[ -n ${FB_OS} ]]; then + echo "${FB_OS}" + return 0 + fi + + unset FB_OS + if [[ -f /etc/os-release ]]; then + source /etc/os-release + FB_OS="${ID}" + if [[ ${VERSION_ID} != '' ]]; then + FB_OS+="-${VERSION_ID}" + fi + if line_starts_with "${FB_OS}" 'arch'; then + FB_OS='archlinux' + fi + else + FB_OS="$(uname -o)" + fi + + # lowercase + FB_OS="${FB_OS,,}" + + # special treatment for windows + if line_contains "${FB_OS}" 'windows' || line_contains "${FB_OS}" 'msys'; then + FB_OS='windows' + fi + + echo "${FB_OS}" +} + +is_positive_integer() { + local input="$1" + if [[ ${input} != ?(-)+([[:digit:]]) || ${input} -lt 0 ]]; then + echo_fail "${input} is not a positive integer" + return 1 + fi + return 0 +} + +replace_line() { + local file="$1" + local search="$2" + local newLine="$3" + local newFile="${TMP_DIR}/$(bash_basename "${file}")" + + test -f "${newFile}" && rm "${newFile}" + while read -r line; do + if line_contains "${line}" "${search}"; then + echo -en "${newLine}" >>"${newFile}" + continue + fi + echo "${line}" >>"${newFile}" + done <"${file}" + + cp "${newFile}" "${file}" +} + +remove_line() { + local file="$1" + local search="$2" + replace_line "${file}" "${search}" '' +} + +bash_sort() { + local arr=("$@") + local n=${#arr[@]} + local i j val1 val2 + + # Bubble sort, numeric comparison + for ((i = 0; i < n; i++)); do + for ((j = 0; j < n - i - 1; j++)); do + read -r val1 _ <<<"${arr[j]}" + read -r val2 _ <<<"${arr[j + 1]}" + if (("${val1}" > "${val2}")); then + local tmp=${arr[j]} + arr[j]=${arr[j + 1]} + arr[j + 1]=$tmp + fi + done + done + + printf '%s\n' "${arr[@]}" +} + +_start_spinner() { + local spinChars=( + "-" + '\' + "|" + "/" + ) + + sleep 1 + + while true; do + for ((ind = 0; ind < "${#spinChars[@]}"; ind++)); do + echo -ne "${spinChars[${ind}]}" '\b\b' + sleep .25 + done + done +} + +spinner() { + local action="$1" + local spinPidFile="${TMP_DIR}/.spinner-pid" + case "${action}" in + start) + test -f "${spinPidFile}" && rm "${spinPidFile}" + + # don't want to clutter logs if running headless + test "${HEADLESS}" == '1' && return + + _start_spinner & + echo $! >"${spinPidFile}" + ;; + stop) + test -f "${spinPidFile}" && kill "$(<"${spinPidFile}")" + echo -ne ' \n' + ;; + esac +} + +get_pkgconfig_version() { + local pkg="$1" + pkg-config --modversion "${pkg}" +} + +using_cmake_4() { + local cmakeVersion + IFS=$' \t' read -r _ _ cmakeVersion <<<"$(command cmake --version)" + line_starts_with "${cmakeVersion}" 4 +} + +recreate_dir() { + local dirs=("$@") + for dir in "${dirs[@]}"; do + test -d "${dir}" && rm -rf "${dir}" + mkdir -p "${dir}" || return 1 + done +} + +ensure_dir() { + local dirs=("$@") + for dir in "${dirs[@]}"; do + test -d "${dir}" || mkdir -p "${dir}" || return 1 + done +} + +get_remote_head() { + local url="$1" + local remoteHEAD='' + IFS=$' \t' read -r remoteHEAD _ <<< \ + "$(git ls-remote "${url}" HEAD)" + echo "${remoteHEAD}" +} + +fb_max() { + local a="$1" + local b="$2" + test "${a}" -gt "${b}" && + echo "${a}" || + echo "${b}" +} + +print_padded() { + local str="$1" + local padding="$2" + echo -n "${str}" + for ((i = 0; i < padding - ${#str}; i++)); do + echo -n ' ' + done +} diff --git a/lib/build.sh b/lib/build.sh index d229365..99516d8 100644 --- a/lib/build.sh +++ b/lib/build.sh @@ -1,495 +1,514 @@ #!/usr/bin/env bash set_compile_opts() { - test "${FB_COMPILE_OPTS_SET}" == 1 && return 0 + test "${FB_COMPILE_OPTS_SET}" == 1 && return 0 - EXPORTED_ENV_NAMES=( - CC - CFLAGS - CXX - CXXFLAGS - CPPFLAGS - LDFLAGS - RUSTFLAGS - PKG_CONFIG_PATH - ) - BUILD_ENV_NAMES=( - "${EXPORTED_ENV_NAMES[@]}" - CFLAGS_ARR - CPPFLAGS_ARR - LDFLAGS_ARR - USE_LD - RUSTFLAGS_ARR - CONFIGURE_FLAGS - MESON_FLAGS - CMAKE_FLAGS - FFMPEG_EXTRA_FLAGS - CARGO_CINSTALL_FLAGS - LTO_FLAG - PGO_FLAG - LIB_SUFF - BUILD_TYPE - ) - unset "${BUILD_ENV_NAMES[@]}" - export "${EXPORTED_ENV_NAMES[@]}" + EXPORTED_ENV_NAMES=( + CC + CFLAGS + CXX + CXXFLAGS + CPPFLAGS + LDFLAGS + RUSTFLAGS + PKG_CONFIG_PATH + ) + BUILD_ENV_NAMES=( + "${EXPORTED_ENV_NAMES[@]}" + CFLAGS_ARR + CPPFLAGS_ARR + LDFLAGS_ARR + USE_LD + RUSTFLAGS_ARR + CONFIGURE_FLAGS + MESON_FLAGS + CMAKE_FLAGS + FFMPEG_EXTRA_FLAGS + CARGO_CINSTALL_FLAGS + LTO_FLAG + PGO_FLAG + LIB_SUFF + BUILD_TYPE + ) + unset "${BUILD_ENV_NAMES[@]}" + export "${EXPORTED_ENV_NAMES[@]}" - # set job count for all builds - JOBS="$(nproc)" - # local vs system prefix - test "${PREFIX}" == 'local' && PREFIX="${LOCAL_PREFIX}" + # set job count for all builds + JOBS="$(nproc)" + # local vs system prefix + test "${PREFIX}" == 'local' && PREFIX="${LOCAL_PREFIX}" - # check if we need to handle PREFIX with sudo - local testfile='' - if [[ -d ${PREFIX} ]]; then - testfile="${PREFIX}/ffmpeg-build-testfile" - else - # try creating in parent path - testfile="$(bash_dirname "${PREFIX}")/ffmpeg-build-testfile" - fi - unset SUDO_MODIFY - if touch "${testfile}" 2>/dev/null; then - SUDO_MODIFY='' - else - SUDO_MODIFY="${SUDO}" - echo_warn "using ${SUDO}to install" - ${SUDO_MODIFY} mkdir -p "${PREFIX}/bin/" || return 1 - fi - test -f "${testfile}" && ${SUDO_MODIFY} rm "${testfile}" - test -d "${PREFIX}" || { ${SUDO_MODIFY} mkdir -p "${PREFIX}" || return 1; } + # check if we need to handle PREFIX with sudo + local testfile='' + if [[ -d ${PREFIX} ]]; then + testfile="${PREFIX}/ffmpeg-build-testfile" + else + # try creating in parent path + testfile="$(bash_dirname "${PREFIX}")/ffmpeg-build-testfile" + fi + unset SUDO_MODIFY + if touch "${testfile}" 2>/dev/null; then + SUDO_MODIFY='' + else + SUDO_MODIFY="${SUDO}" + echo_warn "using ${SUDO}to install" + ${SUDO_MODIFY} mkdir -p "${PREFIX}/bin/" || return 1 + fi + test -f "${testfile}" && ${SUDO_MODIFY} rm "${testfile}" + test -d "${PREFIX}" || { ${SUDO_MODIFY} mkdir -p "${PREFIX}" || return 1; } - # set library/pkgconfig directory - LIBDIR="${PREFIX}/lib" - LDFLAGS_ARR=("-L${LIBDIR}") + # set library/pkgconfig directory + LIBDIR="${PREFIX}/lib" + LDFLAGS_ARR=("-L${LIBDIR}") - # android has different library location/names - # cannot build static due to missing liblog - if is_android; then - if [[ ${STATIC} == 'ON' ]]; then - echo_warn "$(print_os) does not support STATIC=${STATIC}" - STATIC=OFF - echo_warn "setting STATIC=${STATIC}" - fi - LDFLAGS_ARR+=( - "-L/system/lib64" - "-lm" - "-landroid-shmem" - "-landroid-posix-semaphore" - ) - fi + # android has different library location/names + # cannot build static due to missing liblog + if is_android; then + if [[ ${STATIC} == 'ON' ]]; then + echo_warn "$(print_os) does not support STATIC=${STATIC}" + STATIC=OFF + echo_warn "setting STATIC=${STATIC}" + fi + LDFLAGS_ARR+=( + "-L/system/lib64" + "-lm" + "-landroid-shmem" + "-landroid-posix-semaphore" + ) + fi - # use clang - CC=clang - CXX=clang++ - CMAKE_FLAGS+=( - "-DCMAKE_C_COMPILER=${CC}" - "-DCMAKE_CXX_COMPILER=${CXX}" - ) - FFMPEG_EXTRA_FLAGS+=( - "--cc=${CC}" - "--cxx=${CXX}" - ) + # use clang + CC=clang + CXX=clang++ + CMAKE_FLAGS+=( + "-DCMAKE_C_COMPILER=${CC}" + "-DCMAKE_CXX_COMPILER=${CXX}" + ) + FFMPEG_EXTRA_FLAGS+=( + "--cc=${CC}" + "--cxx=${CXX}" + ) - # hack PATH to inject use of lld as linker - # PATH cc/c++ may be hardcoded as gcc - # which breaks when trying to use clang/lld - # not supported on darwin - if ! is_darwin; then - USE_LD=lld - LDFLAGS_ARR+=("-fuse-ld=${USE_LD}") - # android does not like LINKER_TYPE despite only using lld - if ! is_android; then - CMAKE_FLAGS+=("-DCMAKE_LINKER_TYPE=${USE_LD^^}") - fi - CMAKE_FLAGS+=("-DCMAKE_LINKER=${USE_LD}") - local compilerDir="${LOCAL_PREFIX}/compiler-tools" - recreate_dir "${compilerDir}" || return 1 - # real:gnu:clang:generic - local compilerMap="\ + # hack PATH to inject use of lld as linker + # PATH cc/c++ may be hardcoded as gcc + # which breaks when trying to use clang/lld + # not supported on darwin + if ! is_darwin; then + USE_LD=lld + LDFLAGS_ARR+=("-fuse-ld=${USE_LD}") + # android does not like LINKER_TYPE despite only using lld + if ! is_android; then + CMAKE_FLAGS+=("-DCMAKE_LINKER_TYPE=${USE_LD^^}") + fi + CMAKE_FLAGS+=("-DCMAKE_LINKER=${USE_LD}") + local compilerDir="${LOCAL_PREFIX}/compiler-tools" + recreate_dir "${compilerDir}" || return 1 + # real:gnu:clang:generic + local compilerMap="\ ${CC}:gcc:clang:cc ${CXX}:g++:clang++:c++ ld.lld:ld:lld:ld" - local realT gnuT clangT genericT - while read -r line; do - IFS=: read -r realT gnuT clangT genericT <<<"${line}" - # full path to the real tool - realT="$(command -v "${realT}")" + local realT gnuT clangT genericT + while read -r line; do + IFS=: read -r realT gnuT clangT genericT <<<"${line}" + # full path to the real tool + realT="$(command -v "${realT}")" - # add fuse-ld for the compiler - local addFlag='-v' - if line_contains "${realT}" clang; then addFlag+=" -fuse-ld=${USE_LD}"; fi + # add fuse-ld for the compiler + local addFlag='-v' + if line_contains "${realT}" clang; then addFlag+=" -fuse-ld=${USE_LD}"; fi - # create generic tool version - echo "#!/usr/bin/env bash + # create generic tool version + echo "#!/usr/bin/env bash echo \$@ > ${compilerDir}/${genericT}.last-command exec \"${realT}\" ${addFlag} \"\$@\"" >"${compilerDir}/${genericT}" - chmod +x "${compilerDir}/${genericT}" - echo_if_fail "${compilerDir}/${genericT}" --version || return 1 + chmod +x "${compilerDir}/${genericT}" + echo_if_fail "${compilerDir}/${genericT}" --version || return 1 - # copy generic to gnu/clang variants - # cp "${compilerDir}/${genericT}" "${compilerDir}/${gnuT}" 2>/dev/null - # cp "${compilerDir}/${genericT}" "${compilerDir}/${clangT}" 2>/dev/null - done <<<"${compilerMap}" + # copy generic to gnu/clang variants + # cp "${compilerDir}/${genericT}" "${compilerDir}/${gnuT}" 2>/dev/null + # cp "${compilerDir}/${genericT}" "${compilerDir}/${clangT}" 2>/dev/null + done <<<"${compilerMap}" - # also add fake which command in case one does not exist - # shellcheck disable=SC2016 - echo '#!/usr/bin/env bash + # also add fake which command in case one does not exist + # shellcheck disable=SC2016 + echo '#!/usr/bin/env bash which="" test -f /bin/which && which=/bin/which test -f /usr/bin/which && which=/usr/bin/which if [[ ${which} == "" ]]; then - command -v "$@" + command -v "$@" else - ${which} "$@" + ${which} "$@" fi' >"${compilerDir}/which" - chmod +x "${compilerDir}/which" - export PATH="${compilerDir}:${PATH}" - fi + chmod +x "${compilerDir}/which" + export PATH="${compilerDir}:${PATH}" + fi - # set prefix flags and basic flags - CONFIGURE_FLAGS+=( - "--prefix=${PREFIX}" - "--libdir=${LIBDIR}" - "--disable-debug" - ) - MESON_FLAGS+=( - "--prefix" "${PREFIX}" - "--libdir" "lib" - "--bindir" "bin" - "--buildtype" "release" - ) - CMAKE_FLAGS+=( - "-DCMAKE_PREFIX_PATH=${PREFIX}" - "-DCMAKE_INSTALL_PREFIX=${PREFIX}" - "-DCMAKE_INSTALL_LIBDIR=lib" - "-DCMAKE_BUILD_TYPE=Release" - "-DCMAKE_C_COMPILER_LAUNCHER=ccache" - "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache" - "-DCMAKE_VERBOSE_MAKEFILE=ON" - "-G" "Ninja" - ) - CARGO_CINSTALL_FLAGS=( - "--release" - "--verbose" - "--prefix" "${PREFIX}" - "--libdir" "${LIBDIR}" - ) - PKG_CONFIG_PATH="${LIBDIR}/pkgconfig" + # set prefix flags and basic flags + CONFIGURE_FLAGS+=( + "--prefix=${PREFIX}" + "--libdir=${LIBDIR}" + "--disable-debug" + ) + MESON_FLAGS+=( + "--prefix" "${PREFIX}" + "--libdir" "lib" + "--bindir" "bin" + "--buildtype" "release" + ) + CMAKE_FLAGS+=( + "-DCMAKE_PREFIX_PATH=${PREFIX}" + "-DCMAKE_INSTALL_PREFIX=${PREFIX}" + "-DCMAKE_INSTALL_LIBDIR=lib" + "-DCMAKE_BUILD_TYPE=Release" + "-DCMAKE_C_COMPILER_LAUNCHER=ccache" + "-DCMAKE_CXX_COMPILER_LAUNCHER=ccache" + "-DCMAKE_VERBOSE_MAKEFILE=ON" + "-G" "Ninja" + ) + CARGO_CINSTALL_FLAGS=( + "--release" + "--verbose" + "--prefix" "${PREFIX}" + "--libdir" "${LIBDIR}" + ) + PKG_CONFIG_PATH="${LIBDIR}/pkgconfig" - # cmake version 4 breaks some builds - if using_cmake_4; then - CMAKE_FLAGS+=("-DCMAKE_POLICY_VERSION_MINIMUM=3.5") - fi + # cmake version 4 breaks some builds + if using_cmake_4; then + CMAKE_FLAGS+=("-DCMAKE_POLICY_VERSION_MINIMUM=3.5") + fi - # add prefix include - # TODO use cygpath for windows - CPPFLAGS_ARR+=("-I${PREFIX}/include") + # add prefix include + # TODO use cygpath for windows + CPPFLAGS_ARR+=("-I${PREFIX}/include") - # if PGO is enabled, first build run will be to generate - # second run will be to use generated profdata - if [[ ${PGO} == 'ON' ]]; then - if [[ ${PGO_RUN} == 'generate' ]]; then - PGO_FLAG="-fprofile-generate" - CFLAGS_ARR+=("${PGO_FLAG}") - LDFLAGS_ARR+=("${PGO_FLAG}") - else - PGO_FLAG="-fprofile-use=${PGO_PROFDATA}" - CFLAGS_ARR+=("${PGO_FLAG}") - LDFLAGS_ARR+=("${PGO_FLAG}") - fi - fi + # if PGO is enabled, first build run will be to generate + # second run will be to use generated profdata + if [[ ${PGO} == 'ON' ]]; then + if [[ ${PGO_RUN} == 'generate' ]]; then + PGO_FLAG="-fprofile-generate" + CFLAGS_ARR+=("${PGO_FLAG}") + LDFLAGS_ARR+=("${PGO_FLAG}") + else + PGO_FLAG="-fprofile-use=${PGO_PROFDATA}" + CFLAGS_ARR+=("${PGO_FLAG}") + LDFLAGS_ARR+=("${PGO_FLAG}") + fi + fi - # enabling link-time optimization - if [[ ${LTO} == 'ON' ]]; then - LTO_FLAG='-flto=full' - CFLAGS_ARR+=("${LTO_FLAG}") - LDFLAGS_ARR+=("${LTO_FLAG}") - CONFIGURE_FLAGS+=('--enable-lto') - MESON_FLAGS+=("-Db_lto=true") - else - LTO_FLAG='unreachable-flag' - MESON_FLAGS+=("-Db_lto=false") - fi - # setting optimization level - if [[ ${OPT} == '' ]]; then - OPT='0' - fi - CFLAGS_ARR+=("-O${OPT}") - MESON_FLAGS+=("--optimization=${OPT}") + # enabling link-time optimization + if [[ ${LTO} == 'ON' ]]; then + LTO_FLAG='-flto=full' + CFLAGS_ARR+=("${LTO_FLAG}") + LDFLAGS_ARR+=("${LTO_FLAG}") + CONFIGURE_FLAGS+=('--enable-lto') + MESON_FLAGS+=("-Db_lto=true") + else + LTO_FLAG='unreachable-flag' + MESON_FLAGS+=("-Db_lto=false") + fi + # setting optimization level + if [[ ${OPT} == '' ]]; then + OPT='0' + fi + CFLAGS_ARR+=("-O${OPT}") + MESON_FLAGS+=("--optimization=${OPT}") - STATIC_LIB_SUFF='a' - # darwin has different suffix for dynamic libraries - if is_darwin; then - SHARED_LIB_SUFF='dylib' - else - SHARED_LIB_SUFF='so' - fi + STATIC_LIB_SUFF='a' + # darwin has different suffix for dynamic libraries + if is_darwin; then + SHARED_LIB_SUFF='dylib' + else + SHARED_LIB_SUFF='so' + fi - # static/shared linking - if [[ ${STATIC} == 'ON' ]]; then - BUILD_TYPE=static - CONFIGURE_FLAGS+=( - '--enable-static' - '--disable-shared' - ) - MESON_FLAGS+=('--default-library=static') - CMAKE_FLAGS+=( - "-DENABLE_STATIC=${STATIC}" - "-DENABLE_SHARED=OFF" - "-DBUILD_SHARED_LIBS=OFF" - ) - # darwin does not support -static - if is_darwin; then - FFMPEG_EXTRA_FLAGS+=("--extra-ldflags=${LDFLAGS_ARR[*]}") - CMAKE_FLAGS+=("-DCMAKE_EXE_LINKER_FLAGS=${LDFLAGS_ARR[*]}") - else - FFMPEG_EXTRA_FLAGS+=("--extra-ldflags=${LDFLAGS_ARR[*]} -static") - CMAKE_FLAGS+=("-DCMAKE_EXE_LINKER_FLAGS=${LDFLAGS_ARR[*]} -static") - fi - FFMPEG_EXTRA_FLAGS+=("--pkg-config-flags=--static") - # remove shared libraries for static builds - USE_LIB_SUFF="${STATIC_LIB_SUFF}" - DEL_LIB_SUFF="${SHARED_LIB_SUFF}" - else - BUILD_TYPE=shared - CMAKE_FLAGS+=( - "-DENABLE_STATIC=${STATIC}" - "-DENABLE_SHARED=ON" - "-DBUILD_SHARED_LIBS=ON" - "-DCMAKE_INSTALL_RPATH=${LIBDIR}" - "-DCMAKE_BUILD_WITH_INSTALL_RPATH=ON" - "-DCMAKE_EXE_LINKER_FLAGS=${LDFLAGS_ARR[*]}" - ) - if is_darwin; then - CMAKE_FLAGS+=( - "-DCMAKE_MACOSX_RPATH=ON" - "-DCMAKE_INSTALL_NAME_DIR=@rpath" - ) - fi - FFMPEG_EXTRA_FLAGS+=("--extra-ldflags=${LDFLAGS_ARR[*]}") - LDFLAGS_ARR+=("-Wl,-rpath,${LIBDIR}") - CONFIGURE_FLAGS+=( - '--enable-shared' - '--disable-static' - ) - FFMPEG_EXTRA_FLAGS+=('--enable-rpath') - # remove static libraries for shared builds - USE_LIB_SUFF="${SHARED_LIB_SUFF}" - DEL_LIB_SUFF="${STATIC_LIB_SUFF}" - fi + # static/shared linking + if [[ ${STATIC} == 'ON' ]]; then + BUILD_TYPE=static + CONFIGURE_FLAGS+=( + '--enable-static' + '--disable-shared' + ) + MESON_FLAGS+=('--default-library=static') + CMAKE_FLAGS+=( + "-DENABLE_STATIC=${STATIC}" + "-DENABLE_SHARED=OFF" + "-DBUILD_SHARED_LIBS=OFF" + ) + # darwin does not support -static + if is_darwin; then + FFMPEG_EXTRA_FLAGS+=("--extra-ldflags=${LDFLAGS_ARR[*]}") + CMAKE_FLAGS+=("-DCMAKE_EXE_LINKER_FLAGS=${LDFLAGS_ARR[*]}") + else + FFMPEG_EXTRA_FLAGS+=("--extra-ldflags=${LDFLAGS_ARR[*]} -static") + CMAKE_FLAGS+=("-DCMAKE_EXE_LINKER_FLAGS=${LDFLAGS_ARR[*]} -static") + fi + FFMPEG_EXTRA_FLAGS+=("--pkg-config-flags=--static") + # remove shared libraries for static builds + USE_LIB_SUFF="${STATIC_LIB_SUFF}" + DEL_LIB_SUFF="${SHARED_LIB_SUFF}" + else + BUILD_TYPE=shared + CMAKE_FLAGS+=( + "-DENABLE_STATIC=${STATIC}" + "-DENABLE_SHARED=ON" + "-DBUILD_SHARED_LIBS=ON" + "-DCMAKE_INSTALL_RPATH=${LIBDIR}" + "-DCMAKE_BUILD_WITH_INSTALL_RPATH=ON" + "-DCMAKE_EXE_LINKER_FLAGS=${LDFLAGS_ARR[*]}" + ) + if is_darwin; then + CMAKE_FLAGS+=( + "-DCMAKE_MACOSX_RPATH=ON" + "-DCMAKE_INSTALL_NAME_DIR=@rpath" + ) + fi + FFMPEG_EXTRA_FLAGS+=("--extra-ldflags=${LDFLAGS_ARR[*]}") + LDFLAGS_ARR+=("-Wl,-rpath,${LIBDIR}") + CONFIGURE_FLAGS+=( + '--enable-shared' + '--disable-static' + ) + FFMPEG_EXTRA_FLAGS+=('--enable-rpath') + # remove static libraries for shared builds + USE_LIB_SUFF="${SHARED_LIB_SUFF}" + DEL_LIB_SUFF="${STATIC_LIB_SUFF}" + fi - # architecture/cpu compile flags - # arm prefers -mcpu over -march for native builds - # https://community.arm.com/arm-community-blogs/b/tools-software-ides-blog/posts/compiler-flags-across-architectures-march-mtune-and-mcpu - local arch_flags=() - if [[ ${HOSTTYPE} == "aarch64" && ${ARCH} == 'native' ]]; then - arch_flags+=("-mcpu=${ARCH}") - else - arch_flags+=("-march=${ARCH}") - fi + # architecture/cpu compile flags + # arm prefers -mcpu over -march for native builds + # https://community.arm.com/arm-community-blogs/b/tools-software-ides-blog/posts/compiler-flags-across-architectures-march-mtune-and-mcpu + local arch_flags=() + if [[ ${HOSTTYPE} == "aarch64" && ${ARCH} == 'native' ]]; then + arch_flags+=("-mcpu=${ARCH}") + else + arch_flags+=("-march=${ARCH}") + fi - # can fail static builds with -fpic - # warning: too many GOT entries for -fpic, please recompile with -fPIC - CFLAGS_ARR+=("${arch_flags[@]}" "-fPIC") - RUSTFLAGS_ARR+=("-C target-cpu=${ARCH}") + # can fail static builds with -fpic + # warning: too many GOT entries for -fpic, please recompile with -fPIC + CFLAGS_ARR+=("${arch_flags[@]}" "-fPIC") + RUSTFLAGS_ARR+=("-C target-cpu=${ARCH}") - # set exported env names to stringified arrays - CPPFLAGS="${CPPFLAGS_ARR[*]}" - CFLAGS="${CFLAGS_ARR[*]} ${CPPFLAGS}" - CXXFLAGS="${CFLAGS}" - LDFLAGS="${LDFLAGS_ARR[*]}" - RUSTFLAGS="${RUSTFLAGS_ARR[*]}" + # set exported env names to stringified arrays + CPPFLAGS="${CPPFLAGS_ARR[*]}" + CFLAGS="${CFLAGS_ARR[*]} ${CPPFLAGS}" + CXXFLAGS="${CFLAGS}" + LDFLAGS="${LDFLAGS_ARR[*]}" + RUSTFLAGS="${RUSTFLAGS_ARR[*]}" - CMAKE_FLAGS+=( - "-DCMAKE_CFLAGS=${CFLAGS}" - "-DCMAKE_CXX_FLAGS=${CFLAGS}" - ) - MESON_FLAGS+=( - "-Dc_args=${CFLAGS}" - "-Dcpp_args=${CFLAGS}" - "-Dc_link_args=${LDFLAGS}" - "-Dcpp_link_args=${LDFLAGS}" - ) + CMAKE_FLAGS+=( + "-DCMAKE_CFLAGS=${CFLAGS}" + "-DCMAKE_CXX_FLAGS=${CFLAGS}" + ) + MESON_FLAGS+=( + "-Dc_args=${CFLAGS}" + "-Dcpp_args=${CFLAGS}" + "-Dc_link_args=${LDFLAGS}" + "-Dcpp_link_args=${LDFLAGS}" + ) - # extra ffmpeg flags - FFMPEG_EXTRA_FLAGS+=( - "--extra-cflags=${CFLAGS}" - "--extra-cxxflags=${CFLAGS}" - '--pkg-config=pkg-config' - ) + # extra ffmpeg flags + FFMPEG_EXTRA_FLAGS+=( + "--extra-cflags=${CFLAGS}" + "--extra-cxxflags=${CFLAGS}" + '--pkg-config=pkg-config' + ) - dump_arr "${BUILD_ENV_NAMES[@]}" + dump_arr "${BUILD_ENV_NAMES[@]}" - FB_COMPILE_OPTS_SET=1 - echo -} - -get_remote_head() { - local url="$1" - local remoteHEAD='' - IFS=$' \t' read -r remoteHEAD _ <<< \ - "$(git ls-remote "${url}" HEAD)" - echo "${remoteHEAD}" + FB_COMPILE_OPTS_SET=1 + echo } get_build_conf() { - local getBuild="${1}" + local getBuild="${1}" - # name version file-extension url dep1,dep2 - # shellcheck disable=SC2016 - local BUILDS_CONF=' -ffmpeg 8.0 tar.gz https://github.com/FFmpeg/FFmpeg/archive/refs/tags/n${ver}.${ext} + local longestBuild=0 + local longestVer=0 + local longestExt=0 + local padding=4 -libsvtav1_psy 3.0.2-B tar.gz https://github.com/BlueSwordM/svt-av1-psyex/archive/refs/tags/v${ver}.${ext} dovi_tool,hdr10plus_tool,cpuinfo -hdr10plus_tool 1.7.1 tar.gz https://github.com/quietvoid/hdr10plus_tool/archive/refs/tags/${ver}.${ext} -dovi_tool 2.3.1 tar.gz https://github.com/quietvoid/dovi_tool/archive/refs/tags/${ver}.${ext} -cpuinfo latest git https://github.com/pytorch/cpuinfo/ + # name version file-extension url dep1,dep2 + # shellcheck disable=SC2016 + local BUILDS_CONF=' +ffmpeg 8.0.1 tar.gz https://github.com/FFmpeg/FFmpeg/archive/refs/tags/n${ver}.${ext} -libsvtav1 3.1.2 tar.gz https://gitlab.com/AOMediaCodec/SVT-AV1/-/archive/v${ver}/SVT-AV1-v${ver}.${ext} -librav1e 0.8.1 tar.gz https://github.com/xiph/rav1e/archive/refs/tags/v${ver}.${ext} -libaom 3.13.1 tar.gz https://storage.googleapis.com/aom-releases/libaom-${ver}.${ext} -libvmaf 3.0.0 tar.gz https://github.com/Netflix/vmaf/archive/refs/tags/v${ver}.${ext} -libopus 1.5.2 tar.gz https://github.com/xiph/opus/releases/download/v${ver}/opus-${ver}.${ext} -libdav1d 1.5.1 tar.xz http://downloads.videolan.org/videolan/dav1d/${ver}/dav1d-${ver}.${ext} -libx264 latest git https://code.videolan.org/videolan/x264.git -libmp3lame 3.100 tar.gz https://pilotfiber.dl.sourceforge.net/project/lame/lame/${ver}/lame-${ver}.${ext} -libvpx 1.15.2 tar.gz https://github.com/webmproject/libvpx/archive/refs/tags/v${ver}.${ext} +libsvtav1_psy 3.0.2-B tar.gz https://github.com/BlueSwordM/svt-av1-psyex/archive/refs/tags/v${ver}.${ext} dovi_tool,hdr10plus_tool,cpuinfo +hdr10plus_tool 1.7.2 tar.gz https://github.com/quietvoid/hdr10plus_tool/archive/refs/tags/${ver}.${ext} +dovi_tool 2.3.1 tar.gz https://github.com/quietvoid/dovi_tool/archive/refs/tags/${ver}.${ext} +cpuinfo latest git https://github.com/pytorch/cpuinfo/ -libvorbis 1.3.7 tar.xz https://github.com/xiph/vorbis/releases/download/v${ver}/libvorbis-${ver}.${ext} libogg -libogg 1.3.6 tar.xz https://github.com/xiph/ogg/releases/download/v${ver}/libogg-${ver}.${ext} +libsvtav1 3.1.2 tar.gz https://gitlab.com/AOMediaCodec/SVT-AV1/-/archive/v${ver}/SVT-AV1-v${ver}.${ext} +librav1e 0.8.1 tar.gz https://github.com/xiph/rav1e/archive/refs/tags/v${ver}.${ext} +libaom 3.13.1 tar.gz https://storage.googleapis.com/aom-releases/libaom-${ver}.${ext} +libvmaf 3.0.0 tar.gz https://github.com/Netflix/vmaf/archive/refs/tags/v${ver}.${ext} +libopus 1.6 tar.gz https://github.com/xiph/opus/archive/refs/tags/v${ver}.${ext} +libdav1d 1.5.3 tar.xz https://downloads.videolan.org/videolan/dav1d/${ver}/dav1d-${ver}.${ext} +libx264 latest git https://code.videolan.org/videolan/x264.git +libmp3lame 3.100 tar.gz https://pilotfiber.dl.sourceforge.net/project/lame/lame/${ver}/lame-${ver}.${ext} +libvpx 1.15.2 tar.gz https://github.com/webmproject/libvpx/archive/refs/tags/v${ver}.${ext} -libwebp 1.6.0 tar.gz https://github.com/webmproject/libwebp/archive/refs/tags/v${ver}.${ext} libpng,libjpeg -libjpeg 3.0.3 tar.gz https://github.com/winlibs/libjpeg/archive/refs/tags/libjpeg-turbo-${ver}.${ext} -libpng 1.6.50 tar.gz https://github.com/pnggroup/libpng/archive/refs/tags/v${ver}.${ext} zlib -zlib 1.3.1 tar.gz https://github.com/madler/zlib/archive/refs/tags/v${ver}.${ext} +libvorbis 1.3.7 tar.xz https://github.com/xiph/vorbis/releases/download/v${ver}/libvorbis-${ver}.${ext} libogg +libogg 1.3.6 tar.xz https://github.com/xiph/ogg/releases/download/v${ver}/libogg-${ver}.${ext} -libplacebo 7.351.0 tar.gz https://github.com/haasn/libplacebo/archive/refs/tags/v${ver}.${ext} glslang,vulkan_loader,glad -glslang 16.0.0 tar.gz https://github.com/KhronosGroup/glslang/archive/refs/tags/${ver}.${ext} spirv_tools -spirv_tools 2025.4 tar.gz https://github.com/KhronosGroup/SPIRV-Tools/archive/refs/tags/v${ver}.${ext} spirv_headers -spirv_headers 1.4.328.1 tar.gz https://github.com/KhronosGroup/SPIRV-Headers/archive/refs/tags/vulkan-sdk-${ver}.${ext} -glad 2.0.8 tar.gz https://github.com/Dav1dde/glad/archive/refs/tags/v${ver}.${ext} +libwebp 1.6.0 tar.gz https://github.com/webmproject/libwebp/archive/refs/tags/v${ver}.${ext} libpng,libjpeg +libjpeg 3.0.3 tar.gz https://github.com/winlibs/libjpeg/archive/refs/tags/libjpeg-turbo-${ver}.${ext} +libpng 1.6.53 tar.gz https://github.com/pnggroup/libpng/archive/refs/tags/v${ver}.${ext} zlib +zlib 1.3.1 tar.gz https://github.com/madler/zlib/archive/refs/tags/v${ver}.${ext} -libx265 4.1 tar.gz https://bitbucket.org/multicoreware/x265_git/downloads/x265_${ver}.${ext} libnuma -libnuma 2.0.19 tar.gz https://github.com/numactl/numactl/archive/refs/tags/v${ver}.${ext} +libplacebo 7.351.0 tar.gz https://github.com/haasn/libplacebo/archive/refs/tags/v${ver}.${ext} glslang,vulkan_loader,glad +glslang 16.0.0 tar.gz https://github.com/KhronosGroup/glslang/archive/refs/tags/${ver}.${ext} spirv_tools +spirv_tools 2025.4 tar.gz https://github.com/KhronosGroup/SPIRV-Tools/archive/refs/tags/v${ver}.${ext} spirv_headers +spirv_headers 1.4.328.1 tar.gz https://github.com/KhronosGroup/SPIRV-Headers/archive/refs/tags/vulkan-sdk-${ver}.${ext} +glad 2.0.8 tar.gz https://github.com/Dav1dde/glad/archive/refs/tags/v${ver}.${ext} + +libx265 4.1 tar.gz https://bitbucket.org/multicoreware/x265_git/downloads/x265_${ver}.${ext} libnuma +libnuma 2.0.19 tar.gz https://github.com/numactl/numactl/archive/refs/tags/v${ver}.${ext} ' - local supported_builds=() - unset ver ext url deps extractedDir - while read -r line; do - test "${line}" == '' && continue - IFS=$' \t' read -r build ver ext url deps <<<"${line}" - supported_builds+=("${build}") - if [[ ${getBuild} != "${build}" ]]; then - build='' - continue - fi - break - done <<<"${BUILDS_CONF}" + local supported_builds=() + unset ver ext url deps extractedDir + while read -r line; do + test "${line}" == '' && continue + IFS=$' \t' read -r build ver ext url deps <<<"${line}" + supported_builds+=("${build}") - if [[ ${getBuild} == 'supported' ]]; then - echo "${supported_builds[@]}" - return 0 - fi + # padding support + longestBuild="$(fb_max "${#build}" "${longestBuild}")" + longestVer="$(fb_max "${#ver}" "${longestVer}")" + longestExt="$(fb_max "${#ext}" "${longestExt}")" - if [[ ${build} == '' ]]; then - echo_fail "build ${getBuild} is not supported" - return 1 - fi + if [[ ${getBuild} != "${build}" ]]; then + build='' + continue + fi + break + done <<<"${BUILDS_CONF}" - # url uses ver and extension - eval "url=\"$url\"" - # set dependencies array - # shellcheck disable=SC2206 - deps=(${deps//,/ }) - # set version based off of remote head - # and set extracted directory - if [[ ${ext} == 'git' ]]; then - ver="$(get_remote_head "${url}")" - extractedDir="${BUILD_DIR}/${build}-${ext}" - else - extractedDir="${BUILD_DIR}/${build}-v${ver}" - fi + # special arg to print supported builds only + if [[ ${getBuild} == 'supported' ]]; then + echo "${supported_builds[@]}" + return 0 + fi - return 0 + # special arg to print BUILDS_CONF but formatted with spaces + if [[ ${getBuild} == 'format-builds-conf' ]]; then + echo "local BUILDS_CONF='" + while read -r line; do + IFS=$' \t' read -r build ver ext url deps <<<"${line}" + print_padded "${build}" $((padding + longestBuild)) + print_padded "${ver}" $((padding + longestVer)) + print_padded "${ext}" $((padding + longestExt)) + print_padded "${url}" "${padding}" + echo " ${deps}" + done <<<"${BUILDS_CONF}" + echo "'" + return 0 + fi + + if [[ ${build} == '' ]]; then + echo_fail "build ${getBuild} is not supported" + return 1 + fi + + # url uses ver and extension + eval "url=\"$url\"" + # set dependencies array + # shellcheck disable=SC2206 + deps=(${deps//,/ }) + # set version based off of remote head + # and set extracted directory + if [[ ${ext} == 'git' ]]; then + ver="$(get_remote_head "${url}")" + extractedDir="${BUILD_DIR}/${build}-${ext}" + else + extractedDir="${BUILD_DIR}/${build}-v${ver}" + fi + + return 0 } download_release() { - local basename="$(bash_basename "${extractedDir}")" - local download="${DL_DIR}/${basename}" + local basename="$(bash_basename "${extractedDir}")" + local download="${DL_DIR}/${basename}" - # remove other versions of a download - for alreadyDownloaded in "${DL_DIR}/${build}-"*; do - if line_contains "${alreadyDownloaded}" "${basename}"; then - continue - fi - if [[ ! -d ${alreadyDownloaded} && ! -f ${alreadyDownloaded} ]]; then - continue - fi - echo_warn "removing wrong version: ${alreadyDownloaded}" - rm -rf "${alreadyDownloaded}" - done - # remove other versions of a build - for alreadyBuilt in "${BUILD_DIR}/${build}-"*; do - if line_contains "${alreadyBuilt}" "${basename}"; then - continue - fi - test -d "${alreadyBuilt}" || continue - echo_warn "removing wrong version: ${extractedDir}" - rm -rf "${alreadyBuilt}" - done + # remove other versions of a download + for alreadyDownloaded in "${DL_DIR}/${build}-"*; do + if line_contains "${alreadyDownloaded}" "${basename}"; then + continue + fi + if [[ ! -d ${alreadyDownloaded} && ! -f ${alreadyDownloaded} ]]; then + continue + fi + echo_warn "removing wrong version: ${alreadyDownloaded}" + rm -rf "${alreadyDownloaded}" + done + # remove other versions of a build + for alreadyBuilt in "${BUILD_DIR}/${build}-"*; do + if line_contains "${alreadyBuilt}" "${basename}"; then + continue + fi + test -d "${alreadyBuilt}" || continue + echo_warn "removing wrong version: ${extractedDir}" + rm -rf "${alreadyBuilt}" + done - # enabling a clean build - if [[ ${CLEAN} == 'ON' ]]; then - DO_CLEAN="rm -rf" - else - DO_CLEAN='void' - fi + # enabling a clean build + if [[ ${CLEAN} == 'ON' ]]; then + DO_CLEAN="rm -rf" + else + DO_CLEAN='void' + fi - # create new build dir for clean builds - test -d "${extractedDir}" && - { ${DO_CLEAN} "${extractedDir}" || return 1; } + # create new build dir for clean builds + test -d "${extractedDir}" && + { ${DO_CLEAN} "${extractedDir}" || return 1; } - if test "${ext}" != "git"; then - wgetOut="${download}.${ext}" + if test "${ext}" != "git"; then + wgetOut="${download}.${ext}" - # download archive if not present - if ! test -f "${wgetOut}"; then - echo_info "downloading ${build}" - echo_if_fail wget "${url}" -O "${wgetOut}" - fi + # download archive if not present + if ! test -f "${wgetOut}"; then + echo_info "downloading ${build}" + echo_if_fail wget "${url}" -O "${wgetOut}" + fi - # create new build directory - test -d "${extractedDir}" || - { - mkdir "${extractedDir}" - tar -xf "${wgetOut}" \ - --strip-components=1 \ - --no-same-permissions \ - -C "${extractedDir}" || { rm "${wgetOut}" && return 1; } - } - else - # for git downloads - test -d "${download}" || - git clone --recursive "${url}" "${download}" || return 1 - ( - cd "${download}" || exit 1 - local localHEAD remoteHEAD - localHEAD="$(git rev-parse HEAD)" - remoteHEAD="$(get_remote_head "$(git config --get remote.origin.url)")" - if [[ ${localHEAD} != "${remoteHEAD}" ]]; then - git stash - git pull --ff-only - git submodule update --init --recursive - fi - localHEAD="$(git rev-parse HEAD)" - if [[ ${localHEAD} != "${remoteHEAD}" ]]; then - echo_exit "could not update git for ${build}" - fi - ) || return 1 + # create new build directory + test -d "${extractedDir}" || + { + mkdir "${extractedDir}" + tar -xf "${wgetOut}" \ + --strip-components=1 \ + --no-same-permissions \ + -C "${extractedDir}" || { rm "${wgetOut}" && return 1; } + } + else + # for git downloads + test -d "${download}" || + git clone --recursive "${url}" "${download}" || return 1 + ( + cd "${download}" || exit 1 + local localHEAD remoteHEAD + localHEAD="$(git rev-parse HEAD)" + remoteHEAD="$(get_remote_head "$(git config --get remote.origin.url)")" + if [[ ${localHEAD} != "${remoteHEAD}" ]]; then + git stash + git pull --ff-only + git submodule update --init --recursive + fi + localHEAD="$(git rev-parse HEAD)" + if [[ ${localHEAD} != "${remoteHEAD}" ]]; then + echo_exit "could not update git for ${build}" + fi + ) || return 1 - # create new build directory - test -d "${extractedDir}" || - cp -r "${download}" "${extractedDir}" || return 1 - fi + # create new build directory + test -d "${extractedDir}" || + cp -r "${download}" "${extractedDir}" || return 1 + fi } FB_FUNC_NAMES+=('do_build') @@ -498,165 +517,165 @@ FB_FUNC_DESCS['do_build']='build a specific project' # shellcheck disable=SC2034 FB_FUNC_COMPLETION['do_build']="$(get_build_conf supported)" do_build() { - local build="${1:-''}" - get_build_conf "${build}" || return 1 - download_release || return 1 + local build="${1:-''}" + get_build_conf "${build}" || return 1 + download_release || return 1 - set_compile_opts || return 1 - for dep in "${deps[@]}"; do - do_build "${dep}" || return 1 - done - get_build_conf "${build}" || return 1 - download_release || return 1 + set_compile_opts || return 1 + for dep in "${deps[@]}"; do + do_build "${dep}" || return 1 + done + get_build_conf "${build}" || return 1 + download_release || return 1 - # save the metadata for a build to skip re-building identical builds - local oldMetadataFile="${TMP_DIR}/${build}-old-metadata" - local newMetadataFile="${TMP_DIR}/${build}-new-metadata" - local ffmpegOldMetadataFile="${TMP_DIR}/ffmpeg-old-metadata" + # save the metadata for a build to skip re-building identical builds + local oldMetadataFile="${TMP_DIR}/${build}-old-metadata" + local newMetadataFile="${TMP_DIR}/${build}-new-metadata" + local ffmpegOldMetadataFile="${TMP_DIR}/ffmpeg-old-metadata" - # add build function, version, url, and top-level env to metadata - { - local buildFunction="$(type "build_${build}")" - # include meta builds - for token in ${buildFunction}; do - if [[ ${token} == "meta_"*"_build" ]]; then - type "${token}" - fi - done - echo "${buildFunction}" - echo "ver: ${ver}" - echo "url: ${url}" - echo "LOCAL_PREFIX: ${LOCAL_PREFIX}" - COLOR=OFF SHOW_SINGLE=true dump_arr "${BUILD_ENV_NAMES[@]}" - } >"${newMetadataFile}" + # add build function, version, url, and top-level env to metadata + { + local buildFunction="$(type "build_${build}")" + # include meta builds + for token in ${buildFunction}; do + if [[ ${token} == "meta_"*"_build" ]]; then + type "${token}" + fi + done + echo "${buildFunction}" + echo "ver: ${ver}" + echo "url: ${url}" + echo "LOCAL_PREFIX: ${LOCAL_PREFIX}" + COLOR=OFF SHOW_SINGLE=true dump_arr "${BUILD_ENV_NAMES[@]}" + } >"${newMetadataFile}" - # only ffmpeg cares about ENABLE and has special function - if [[ ${build} == 'ffmpeg' ]]; then - # shellcheck disable=SC2153 - echo "ENABLE=${ENABLE}" >>"${newMetadataFile}" - type add_project_versioning_to_ffmpeg >>"${newMetadataFile}" - fi + # only ffmpeg cares about ENABLE and has special function + if [[ ${build} == 'ffmpeg' ]]; then + # shellcheck disable=SC2153 + echo "ENABLE=${ENABLE}" >>"${newMetadataFile}" + type add_project_versioning_to_ffmpeg >>"${newMetadataFile}" + fi - # prepare build - pushd "${extractedDir}" >/dev/null || return 1 - # check for any patches - for patch in "${PATCHES_DIR}/${build}"/*.patch; do - test -f "${patch}" || continue - echo_if_fail patch -p1 -i "${patch}" || return 1 - echo "patch:${patch}" >>"${newMetadataFile}" - done + # prepare build + pushd "${extractedDir}" >/dev/null || return 1 + # check for any patches + for patch in "${PATCHES_DIR}/${build}"/*.patch; do + test -f "${patch}" || continue + echo_if_fail patch -p1 -i "${patch}" || return 1 + echo "patch:${patch}" >>"${newMetadataFile}" + done - # rebuild if new metadata is different - local newMetadata="$(<"${newMetadataFile}")" - local oldMetadata='' - test -f "${oldMetadataFile}" && oldMetadata="$(<"${oldMetadataFile}")" - if [[ ${oldMetadata} != "${newMetadata}" || -n ${REQUIRES_REBUILD} ]]; then - echo_info -n "building ${build} " - # build in background - local timeBefore=${EPOCHSECONDS} - spinner start - LOGNAME="${build}" echo_if_fail "build_${build}" - local retval=$? - spinner stop + # rebuild if new metadata is different + local newMetadata="$(<"${newMetadataFile}")" + local oldMetadata='' + test -f "${oldMetadataFile}" && oldMetadata="$(<"${oldMetadataFile}")" + if [[ ${oldMetadata} != "${newMetadata}" || -n ${REQUIRES_REBUILD} ]]; then + echo_info -n "building ${build} " + # build in background + local timeBefore=${EPOCHSECONDS} + spinner start + LOGNAME="${build}" echo_if_fail "build_${build}" + local retval=$? + spinner stop - popd >/dev/null || return 1 - test ${retval} -eq 0 || return ${retval} - echo_pass "built ${build} in $((EPOCHSECONDS - timeBefore)) seconds" + popd >/dev/null || return 1 + test ${retval} -eq 0 || return ${retval} + echo_pass "built ${build} in $((EPOCHSECONDS - timeBefore)) seconds" - # set new to old for later builds - cp "${newMetadataFile}" "${oldMetadataFile}" + # set new to old for later builds + cp "${newMetadataFile}" "${oldMetadataFile}" - # force ffmpeg to rebuild since one of the libraries has changed - if [[ ${build} != 'ffmpeg' && -f ${ffmpegOldMetadataFile} ]]; then - rm "${ffmpegOldMetadataFile}" - fi - # indicate that build chain will require rebuild - REQUIRES_REBUILD=1 - else - popd >/dev/null || return 1 - echo_info "re-using identical previous build for ${build}" - fi + # force ffmpeg to rebuild since one of the libraries has changed + if [[ ${build} != 'ffmpeg' && -f ${ffmpegOldMetadataFile} ]]; then + rm "${ffmpegOldMetadataFile}" + fi + # indicate that build chain will require rebuild + REQUIRES_REBUILD=1 + else + popd >/dev/null || return 1 + echo_info "re-using identical previous build for ${build}" + fi } FB_FUNC_NAMES+=('build') # shellcheck disable=SC2034 FB_FUNC_DESCS['build']='build ffmpeg with the desired configuration' build() { - # if PGO is enabled, build will call build - # only want to recursively build on the first run - if [[ ${PGO} == 'ON' && ${PGO_RUN} != 'generate' ]]; then - PGO_RUN='generate' build || return 1 - # will need to reset compile opts - unset FB_COMPILE_OPTS_SET - fi + # if PGO is enabled, build will call build + # only want to recursively build on the first run + if [[ ${PGO} == 'ON' && ${PGO_RUN} != 'generate' ]]; then + PGO_RUN='generate' build || return 1 + # will need to reset compile opts + unset FB_COMPILE_OPTS_SET + fi - set_compile_opts || return 1 + set_compile_opts || return 1 - for build in ${ENABLE}; do - do_build "${build}" || return 1 - # reset whether build chain requires a rebuild - unset REQUIRES_REBUILD - done - do_build ffmpeg || return 1 + for build in ${ENABLE}; do + do_build "${build}" || return 1 + # reset whether build chain requires a rebuild + unset REQUIRES_REBUILD + done + do_build ffmpeg || return 1 - # skip packaging on PGO generate run - if [[ ${PGO} == 'ON' && ${PGO_RUN} == 'generate' ]]; then - PATH="${PREFIX}/bin:${PATH}" gen_profdata - return $? - fi + # skip packaging on PGO generate run + if [[ ${PGO} == 'ON' && ${PGO_RUN} == 'generate' ]]; then + PATH="${PREFIX}/bin:${PATH}" gen_profdata + return $? + fi - local ffmpegBin="${PREFIX}/bin/ffmpeg" - # run ffmpeg to show completion - "${ffmpegBin}" -version || return 1 + local ffmpegBin="${PREFIX}/bin/ffmpeg" + # run ffmpeg to show completion + "${ffmpegBin}" -version || return 1 - # suggestion for path - hash -r - local ffmpeg="$(command -v ffmpeg 2>/dev/null)" - if [[ ${ffmpeg} != "${ffmpegBin}" ]]; then - echo - echo_warn "ffmpeg in path (${ffmpeg}) is not the built one (${ffmpegBin})" - echo_info "consider adding ${PREFIX}/bin to \$PATH" - echo "echo 'export PATH=\"${PREFIX}/bin:\$PATH\"' >> ~/.bashrc" - fi + # suggestion for path + hash -r + local ffmpeg="$(command -v ffmpeg 2>/dev/null)" + if [[ ${ffmpeg} != "${ffmpegBin}" ]]; then + echo + echo_warn "ffmpeg in path (${ffmpeg}) is not the built one (${ffmpegBin})" + echo_info "consider adding ${PREFIX}/bin to \$PATH" + echo "echo 'export PATH=\"${PREFIX}/bin:\$PATH\"' >> ~/.bashrc" + fi - package || return 1 + package || return 1 - return 0 + return 0 } # make sure the sysroot has the appropriate library type # darwin will always link dynamically if a dylib is present # so they must be remove for static builds sanitize_sysroot_libs() { - # do nothing for windows - if is_windows; then return; fi + # do nothing for windows + if is_windows; then return; fi - local libs=("$@") + local libs=("$@") - for lib in "${libs[@]}"; do - local libPath="${LIBDIR}/${lib}" - local foundLib=false + for lib in "${libs[@]}"; do + local libPath="${LIBDIR}/${lib}" + local foundLib=false - for useLib in "${libPath}"*"${USE_LIB_SUFF}"; do - test -f "${useLib}" || continue - foundLib=true - # darwin sometimes fails to set rpath correctly - if is_darwin && [[ ${STATIC} == 'OFF' ]]; then - install_name_tool \ - -id "${useLib}" \ - "${useLib}" || return 1 - fi - done + for useLib in "${libPath}"*"${USE_LIB_SUFF}"; do + test -f "${useLib}" || continue + foundLib=true + # darwin sometimes fails to set rpath correctly + if is_darwin && [[ ${STATIC} == 'OFF' ]]; then + install_name_tool \ + -id "${useLib}" \ + "${useLib}" || return 1 + fi + done - if [[ ${foundLib} == false ]]; then - echo_fail "could not find ${libPath}*${USE_LIB_SUFF}, something is wrong" - return 1 - fi + if [[ ${foundLib} == false ]]; then + echo_fail "could not find ${libPath}*${USE_LIB_SUFF}, something is wrong" + return 1 + fi - ${SUDO_MODIFY} rm "${libPath}"*".${DEL_LIB_SUFF}"* - done + ${SUDO_MODIFY} rm "${libPath}"*".${DEL_LIB_SUFF}"* + done - return 0 + return 0 } # cargo cinstall destdir prepends with entire prefix @@ -665,465 +684,465 @@ sanitize_sysroot_libs() { # also windows via msys path resolution breaks sometimes # for PREFIX installs (C:/ instead of /c/) install_local_destdir() ( - local destdir="$1" - test "${destdir}" == '' && return 1 - cd "${destdir}" || return 1 - local sysrootDir="$(bash_basename "${PREFIX}")" - while ! test -d "${sysrootDir}"; do - cd ./* || return 1 - done - # final cd - cd "${sysrootDir}" || return 1 - ${SUDO_MODIFY} cp -r ./* "${PREFIX}/" + local destdir="$1" + test "${destdir}" == '' && return 1 + cd "${destdir}" || return 1 + local sysrootDir="$(bash_basename "${PREFIX}")" + while ! test -d "${sysrootDir}"; do + cd ./* || return 1 + done + # final cd + cd "${sysrootDir}" || return 1 + ${SUDO_MODIFY} cp -r ./* "${PREFIX}/" ) del_pkgconfig_gcc_s() { - # HACK PATCH - # remove '-lgcc_s' from pkgconfig for static builds - if [[ ${STATIC} == 'ON' ]]; then - local fname="$1" - local cfg="${PKG_CONFIG_PATH}/${fname}" - local newCfg="${TMP_DIR}/${fname}" - test -f "${cfg}" || return 1 - local del='-lgcc_s' + # HACK PATCH + # remove '-lgcc_s' from pkgconfig for static builds + if [[ ${STATIC} == 'ON' ]]; then + local fname="$1" + local cfg="${PKG_CONFIG_PATH}/${fname}" + local newCfg="${TMP_DIR}/${fname}" + test -f "${cfg}" || return 1 + local del='-lgcc_s' - test -f "${newCfg}" && rm "${newCfg}" - while read -r line; do - if line_contains "${line}" "${del}"; then - line="${line//${del} /}" - fi - echo "${line}" >>"${newCfg}" - done <"${cfg}" - # overwrite the pkgconfig - ${SUDO_MODIFY} cp "${newCfg}" "${cfg}" - fi + test -f "${newCfg}" && rm "${newCfg}" + while read -r line; do + if line_contains "${line}" "${del}"; then + line="${line//${del} /}" + fi + echo "${line}" >>"${newCfg}" + done <"${cfg}" + # overwrite the pkgconfig + ${SUDO_MODIFY} cp "${newCfg}" "${cfg}" + fi } ### RUST ### meta_cargoc_build() { - local destdir="${PWD}/fb-local-install" - # let rust handle its own lto/pgo - local newCflags="${CFLAGS//${LTO_FLAG}/}" - newCflags="${newCflags//${PGO_FLAG}/}" + local destdir="${PWD}/fb-local-install" + # let rust handle its own lto/pgo + local newCflags="${CFLAGS//${LTO_FLAG}/}" + newCflags="${newCflags//${PGO_FLAG}/}" - CFLAGS="${newCflags}" cargo cinstall \ - --destdir "${destdir}" \ - "${CARGO_CINSTALL_FLAGS[@]}" || return 1 - # cargo cinstall destdir prepends with entire prefix - # this breaks windows with msys path augmentation - # so recurse into directories until sysroot is there - install_local_destdir "${destdir}" || return 1 + CFLAGS="${newCflags}" cargo cinstall \ + --destdir "${destdir}" \ + "${CARGO_CINSTALL_FLAGS[@]}" || return 1 + # cargo cinstall destdir prepends with entire prefix + # this breaks windows with msys path augmentation + # so recurse into directories until sysroot is there + install_local_destdir "${destdir}" || return 1 } build_hdr10plus_tool() { - # build libhdr10plus - cd hdr10plus || return 1 - meta_cargoc_build || return 1 - sanitize_sysroot_libs libhdr10plus-rs || return 1 + # build libhdr10plus + cd hdr10plus || return 1 + meta_cargoc_build || return 1 + sanitize_sysroot_libs libhdr10plus-rs || return 1 } build_dovi_tool() { - # build libdovi - cd dolby_vision || return 1 - meta_cargoc_build || return 1 - sanitize_sysroot_libs libdovi || return 1 + # build libdovi + cd dolby_vision || return 1 + meta_cargoc_build || return 1 + sanitize_sysroot_libs libdovi || return 1 } build_librav1e() { - meta_cargoc_build || return 1 - sanitize_sysroot_libs librav1e || return 1 - del_pkgconfig_gcc_s rav1e.pc || return 1 + meta_cargoc_build || return 1 + sanitize_sysroot_libs librav1e || return 1 + del_pkgconfig_gcc_s rav1e.pc || return 1 } ### CMAKE ### meta_cmake_build() { - local addFlags=("$@") - # configure - cmake \ - -B fb-build \ - "${CMAKE_FLAGS[@]}" \ - "${addFlags[@]}" || return 1 - # build - cmake \ - --build fb-build \ - --config Release \ - -j "${JOBS}" || return 1 - # install - ${SUDO_MODIFY} cmake \ - --install fb-build || return 1 + local addFlags=("$@") + # configure + cmake \ + -B fb-build \ + "${CMAKE_FLAGS[@]}" \ + "${addFlags[@]}" || return 1 + # build + cmake \ + --build fb-build \ + --config Release \ + -j "${JOBS}" || return 1 + # install + ${SUDO_MODIFY} cmake \ + --install fb-build || return 1 } build_cpuinfo() { - meta_cmake_build \ - -DCPUINFO_LIBRARY_TYPE="${BUILD_TYPE}" \ - -DCPUINFO_RUNTIME_TYPE="${BUILD_TYPE}" \ - -DCPUINFO_BUILD_UNIT_TESTS=OFF \ - -DCPUINFO_BUILD_MOCK_TESTS=OFF \ - -DCPUINFO_BUILD_BENCHMARKS=OFF \ - -DCPUINFO_LOG_TO_STDIO=ON \ - -DUSE_SYSTEM_LIBS=ON || return 1 - sanitize_sysroot_libs libcpuinfo || return 1 + meta_cmake_build \ + -DCPUINFO_LIBRARY_TYPE="${BUILD_TYPE}" \ + -DCPUINFO_RUNTIME_TYPE="${BUILD_TYPE}" \ + -DCPUINFO_BUILD_UNIT_TESTS=OFF \ + -DCPUINFO_BUILD_MOCK_TESTS=OFF \ + -DCPUINFO_BUILD_BENCHMARKS=OFF \ + -DCPUINFO_LOG_TO_STDIO=ON \ + -DUSE_SYSTEM_LIBS=ON || return 1 + sanitize_sysroot_libs libcpuinfo || return 1 } build_libsvtav1() { - meta_cmake_build \ - -DENABLE_AVX512=ON \ - -DBUILD_TESTING=OFF \ - -DCOVERAGE=OFF || return 1 - sanitize_sysroot_libs libSvtAv1Enc || return 1 + meta_cmake_build \ + -DENABLE_AVX512=ON \ + -DBUILD_TESTING=OFF \ + -DCOVERAGE=OFF || return 1 + sanitize_sysroot_libs libSvtAv1Enc || return 1 } build_libsvtav1_psy() { - meta_cmake_build \ - -DBUILD_TESTING=OFF \ - -DENABLE_AVX512=ON \ - -DCOVERAGE=OFF \ - -DLIBDOVI_FOUND=1 \ - -DLIBHDR10PLUS_RS_FOUND=1 || return 1 - sanitize_sysroot_libs libSvtAv1Enc || return 1 + meta_cmake_build \ + -DBUILD_TESTING=OFF \ + -DENABLE_AVX512=ON \ + -DCOVERAGE=OFF \ + -DLIBDOVI_FOUND=1 \ + -DLIBHDR10PLUS_RS_FOUND=1 || return 1 + sanitize_sysroot_libs libSvtAv1Enc || return 1 } build_libaom() { - meta_cmake_build \ - -DENABLE_TESTS=OFF || return 1 - sanitize_sysroot_libs libaom || return 1 + meta_cmake_build \ + -DENABLE_TESTS=OFF || return 1 + sanitize_sysroot_libs libaom || return 1 } build_libopus() { - meta_cmake_build || return 1 - sanitize_sysroot_libs libopus || return 1 + meta_cmake_build || return 1 + sanitize_sysroot_libs libopus || return 1 } build_libvorbis() { - meta_cmake_build || return 1 - sanitize_sysroot_libs \ - libvorbis libvorbisenc libvorbisfile || return 1 + meta_cmake_build || return 1 + sanitize_sysroot_libs \ + libvorbis libvorbisenc libvorbisfile || return 1 } build_libogg() { - meta_cmake_build || return 1 - sanitize_sysroot_libs libogg || return 1 + meta_cmake_build || return 1 + sanitize_sysroot_libs libogg || return 1 } build_libwebp() { - if is_android; then - replace_line CMakeLists.txt \ - "if(ANDROID)" \ - "if(FALSE)\n" - fi + if is_android; then + replace_line CMakeLists.txt \ + "if(ANDROID)" \ + "if(FALSE)\n" + fi - meta_cmake_build || return 1 - sanitize_sysroot_libs libwebp libsharpyuv || return 1 + meta_cmake_build || return 1 + sanitize_sysroot_libs libwebp libsharpyuv || return 1 } build_libjpeg() { - meta_cmake_build || return 1 - sanitize_sysroot_libs libjpeg libturbojpeg || return 1 + meta_cmake_build || return 1 + sanitize_sysroot_libs libjpeg libturbojpeg || return 1 } build_libpng() { - meta_cmake_build \ - -DPNG_TESTS=OFF \ - -DPNG_TOOLS=OFF || return 1 - sanitize_sysroot_libs libpng || return 1 + meta_cmake_build \ + -DPNG_TESTS=OFF \ + -DPNG_TOOLS=OFF || return 1 + sanitize_sysroot_libs libpng || return 1 } build_zlib() { - meta_cmake_build \ - -DZLIB_BUILD_EXAMPLES=OFF || return 1 - sanitize_sysroot_libs libz || return 1 + meta_cmake_build \ + -DZLIB_BUILD_EXAMPLES=OFF || return 1 + sanitize_sysroot_libs libz || return 1 } build_glslang() { - meta_cmake_build \ - -DALLOW_EXTERNAL_SPIRV_TOOLS=ON || return 1 - sanitize_sysroot_libs libglslang || return 1 + meta_cmake_build \ + -DALLOW_EXTERNAL_SPIRV_TOOLS=ON || return 1 + sanitize_sysroot_libs libglslang || return 1 } build_spirv_tools() { - meta_cmake_build \ - -DSPIRV-Headers_SOURCE_DIR="${PREFIX}" \ - -DSPIRV_WERROR=OFF \ - -DSPIRV_SKIP_TESTS=ON \ - -G Ninja || return 1 + meta_cmake_build \ + -DSPIRV-Headers_SOURCE_DIR="${PREFIX}" \ + -DSPIRV_WERROR=OFF \ + -DSPIRV_SKIP_TESTS=ON \ + -G Ninja || return 1 } build_spirv_headers() { - meta_cmake_build \ - -G Ninja || return 1 + meta_cmake_build \ + -G Ninja || return 1 } build_libx265() { - # libx265 does not support cmake >= 4 - if using_cmake_4; then - remove_line "source/CMakeLists.txt" "cmake_policy(SET CMP0025 OLD)" || return 1 - remove_line "source/CMakeLists.txt" "cmake_policy(SET CMP0054 OLD)" || return 1 - fi + # libx265 does not support cmake >= 4 + if using_cmake_4; then + remove_line "source/CMakeLists.txt" "cmake_policy(SET CMP0025 OLD)" || return 1 + remove_line "source/CMakeLists.txt" "cmake_policy(SET CMP0054 OLD)" || return 1 + fi - meta_cmake_build \ - -DHIGH_BIT_DEPTH=ON \ - -DENABLE_HDR10_PLUS=OFF \ - -S source || return 1 - sanitize_sysroot_libs libx265 || return 1 - del_pkgconfig_gcc_s x265.pc || return 1 + meta_cmake_build \ + -DHIGH_BIT_DEPTH=ON \ + -DENABLE_HDR10_PLUS=OFF \ + -S source || return 1 + sanitize_sysroot_libs libx265 || return 1 + del_pkgconfig_gcc_s x265.pc || return 1 } ### MESON ### meta_meson_build() { - local addFlags=("$@") - meson setup \ - "${MESON_FLAGS[@]}" \ - "${addFlags[@]}" \ - . fb-build || return 1 - meson compile \ - -C fb-build \ - -j "${JOBS}" || return 1 - ${SUDO_MODIFY} meson install \ - -C fb-build || return 1 + local addFlags=("$@") + meson setup \ + "${MESON_FLAGS[@]}" \ + "${addFlags[@]}" \ + . fb-build || return 1 + meson compile \ + -C fb-build \ + -j "${JOBS}" || return 1 + ${SUDO_MODIFY} meson install \ + -C fb-build || return 1 } build_libdav1d() { - local enableAsm=true - # arm64 will fail the build at 0 optimization - if [[ "${HOSTTYPE}:${OPT}" == "aarch64:0" ]]; then - enableAsm=false - fi - meta_meson_build \ - -D enable_asm=${enableAsm} || return 1 - sanitize_sysroot_libs libdav1d || return 1 + local enableAsm=true + # arm64 will fail the build at 0 optimization + if [[ "${HOSTTYPE}:${OPT}" == "aarch64:0" ]]; then + enableAsm=false + fi + meta_meson_build \ + -D enable_asm=${enableAsm} || return 1 + sanitize_sysroot_libs libdav1d || return 1 } build_libplacebo() { - # copy downloaded glad release as "submodule" - ( - installDir="${PWD}/3rdparty/glad" - get_build_conf glad - CLEAN=OFF download_release - cd "${extractedDir}" || exit 1 - cp -r ./* "${installDir}" - ) || return 1 + # copy downloaded glad release as "submodule" + ( + installDir="${PWD}/3rdparty/glad" + get_build_conf glad + CLEAN=OFF download_release + cd "${extractedDir}" || exit 1 + cp -r ./* "${installDir}" + ) || return 1 - meta_meson_build \ - -D tests=false \ - -D demos=false || return 1 - sanitize_sysroot_libs libplacebo || return 1 + meta_meson_build \ + -D tests=false \ + -D demos=false || return 1 + sanitize_sysroot_libs libplacebo || return 1 } build_libvmaf() { - cd libvmaf || return 1 - virtualenv .venv - ( - source .venv/bin/activate - meta_meson_build \ - -D enable_float=true || exit 1 - ) || return 1 - sanitize_sysroot_libs libvmaf || return 1 + cd libvmaf || return 1 + virtualenv .venv + ( + source .venv/bin/activate + meta_meson_build \ + -D enable_float=true || exit 1 + ) || return 1 + sanitize_sysroot_libs libvmaf || return 1 - # HACK PATCH - # add '-lstdc++' to pkgconfig for static builds - if [[ ${STATIC} == 'ON' ]]; then - local fname='libvmaf.pc' - local cfg="${PKG_CONFIG_PATH}/${fname}" - local newCfg="${TMP_DIR}/${fname}" - test -f "${cfg}" || return 1 - local search='Libs: ' + # HACK PATCH + # add '-lstdc++' to pkgconfig for static builds + if [[ ${STATIC} == 'ON' ]]; then + local fname='libvmaf.pc' + local cfg="${PKG_CONFIG_PATH}/${fname}" + local newCfg="${TMP_DIR}/${fname}" + test -f "${cfg}" || return 1 + local search='Libs: ' - test -f "${newCfg}" && rm "${newCfg}" - while read -r line; do - if line_contains "${line}" "${search}"; then - line+=" -lstdc++" - fi - echo "${line}" >>"${newCfg}" - done <"${cfg}" - # overwrite the pkgconfig - ${SUDO_MODIFY} cp "${newCfg}" "${cfg}" - fi + test -f "${newCfg}" && rm "${newCfg}" + while read -r line; do + if line_contains "${line}" "${search}"; then + line+=" -lstdc++" + fi + echo "${line}" >>"${newCfg}" + done <"${cfg}" + # overwrite the pkgconfig + ${SUDO_MODIFY} cp "${newCfg}" "${cfg}" + fi } ### PYTHON ### build_glad() { - true + true } ### AUTOTOOLS ### meta_configure_build() { - local addFlags=("$@") - local configureFlags=() - # backup global variable for re-setting - # after build is complete - local cflagsBackup="${CFLAGS}" - local ldflagsBackup="${LDFLAGS}" + local addFlags=("$@") + local configureFlags=() + # backup global variable for re-setting + # after build is complete + local cflagsBackup="${CFLAGS}" + local ldflagsBackup="${LDFLAGS}" - # some builds break with LTO - if [[ ${LTO} == 'OFF' ]]; then - for flag in "${CONFIGURE_FLAGS[@]}"; do - test "${flag}" == '--enable-lto' && continue - configureFlags+=("${flag}") - done - CFLAGS="${CFLAGS//${LTO_FLAG}/}" - LDFLAGS="${LDFLAGS//${LTO_FLAG}/}" - else - configureFlags+=("${CONFIGURE_FLAGS[@]}") - fi + # some builds break with LTO + if [[ ${LTO} == 'OFF' ]]; then + for flag in "${CONFIGURE_FLAGS[@]}"; do + test "${flag}" == '--enable-lto' && continue + configureFlags+=("${flag}") + done + CFLAGS="${CFLAGS//${LTO_FLAG}/}" + LDFLAGS="${LDFLAGS//${LTO_FLAG}/}" + else + configureFlags+=("${CONFIGURE_FLAGS[@]}") + fi - # configure - ./configure \ - "${configureFlags[@]}" \ - "${addFlags[@]}" || return 1 - # build - # attempt to build twice since build can fail due to OOM - ccache make -j"${JOBS}" || - ccache make -j"${JOBS}" || return 1 - # install - local destdir="${PWD}/fb-local-install" - make -j"${JOBS}" DESTDIR="${destdir}" install || return 1 - install_local_destdir "${destdir}" || return 1 + # configure + ./configure \ + "${configureFlags[@]}" \ + "${addFlags[@]}" || return 1 + # build + # attempt to build twice since build can fail due to OOM + ccache make -j"${JOBS}" || + ccache make -j"${JOBS}" || return 1 + # install + local destdir="${PWD}/fb-local-install" + make -j"${JOBS}" DESTDIR="${destdir}" install || return 1 + install_local_destdir "${destdir}" || return 1 - # reset global variables - CFLAGS="${cflagsBackup}" - LDFLAGS="${ldflagsBackup}" + # reset global variables + CFLAGS="${cflagsBackup}" + LDFLAGS="${ldflagsBackup}" } build_libvpx() { - meta_configure_build \ - --disable-examples \ - --disable-tools \ - --disable-docs \ - --disable-unit-tests \ - --disable-decode-perf-tests \ - --disable-encode-perf-tests \ - --enable-vp8 \ - --enable-vp9 \ - --enable-vp9-highbitdepth \ - --enable-better-hw-compatibility \ - --enable-webm-io \ - --enable-libyuv || return 1 - sanitize_sysroot_libs libvpx || return 1 + meta_configure_build \ + --disable-examples \ + --disable-tools \ + --disable-docs \ + --disable-unit-tests \ + --disable-decode-perf-tests \ + --disable-encode-perf-tests \ + --enable-vp8 \ + --enable-vp9 \ + --enable-vp9-highbitdepth \ + --enable-better-hw-compatibility \ + --enable-webm-io \ + --enable-libyuv || return 1 + sanitize_sysroot_libs libvpx || return 1 } build_libx264() { - # libx264 breaks with LTO - LTO=OFF meta_configure_build \ - --disable-cli \ - --disable-avs \ - --disable-swscale \ - --disable-lavf \ - --disable-ffms \ - --disable-gpac || return 1 - sanitize_sysroot_libs libx264 || return 1 + # libx264 breaks with LTO + LTO=OFF meta_configure_build \ + --disable-cli \ + --disable-avs \ + --disable-swscale \ + --disable-lavf \ + --disable-ffms \ + --disable-gpac || return 1 + sanitize_sysroot_libs libx264 || return 1 } build_libmp3lame() { - meta_configure_build \ - --enable-nasm \ - --disable-frontend || return 1 - sanitize_sysroot_libs libmp3lame || return 1 + meta_configure_build \ + --enable-nasm \ + --disable-frontend || return 1 + sanitize_sysroot_libs libmp3lame || return 1 } build_libnuma() { - if ! is_linux; then return 0; fi + if ! is_linux; then return 0; fi - ./autogen.sh || return 1 - meta_configure_build || return 1 - sanitize_sysroot_libs libnuma || return 1 + ./autogen.sh || return 1 + meta_configure_build || return 1 + sanitize_sysroot_libs libnuma || return 1 } add_project_versioning_to_ffmpeg() { - # embed this project's enables/versions - # into ffmpeg with FFMPEG_BUILDER_INFO - local FFMPEG_BUILDER_INFO=( - '' # pad with empty line - "ffmpeg-builder=$(git -C "${REPO_DIR}" rev-parse HEAD)" - ) - for build in ${ENABLE}; do - get_build_conf "${build}" || return 1 - # add build configuration info - FFMPEG_BUILDER_INFO+=("${build}=${ver}") - done - # and finally for ffmpeg itself - get_build_conf ffmpeg || return 1 - FFMPEG_BUILDER_INFO+=("${build}=${ver}") + # embed this project's enables/versions + # into ffmpeg with FFMPEG_BUILDER_INFO + local FFMPEG_BUILDER_INFO=( + '' # pad with empty line + "ffmpeg-builder=$(git -C "${REPO_DIR}" rev-parse HEAD)" + ) + for build in ${ENABLE}; do + get_build_conf "${build}" || return 1 + # add build configuration info + FFMPEG_BUILDER_INFO+=("${build}=${ver}") + done + # and finally for ffmpeg itself + get_build_conf ffmpeg || return 1 + FFMPEG_BUILDER_INFO+=("${build}=${ver}") - local fname='opt_common.c' - local optFile="fftools/${fname}" - if [[ ! -f ${optFile} ]]; then - echo_fail "could not find ${fname} to add project versioning" - fi + local fname='opt_common.c' + local optFile="fftools/${fname}" + if [[ ! -f ${optFile} ]]; then + echo_fail "could not find ${fname} to add project versioning" + fi - local searchFor='static void print_all_libs_info' - local foundUsageStart=0 - local newOptFile="${TMP_DIR}/${fname}" - test -f "${newOptFile}" && rm "${newOptFile}" - while read -r line; do - # if we found the line previously, add the versioning - if [[ ${foundUsageStart} -eq 1 ]]; then - if line_starts_with "${line}" '}'; then - echo_info "found ${line} on ${lineNum}" - for info in "${FFMPEG_BUILDER_INFO[@]}"; do - local newline="av_log(NULL, AV_LOG_INFO, \"${info}\n\");" - echo "${newline}" >>"${newOptFile}" - lineNum=$((lineNum + 1)) - done - newline="av_log(NULL, AV_LOG_INFO, \"\n\");" - echo "${newline}" >>"${newOptFile}" - foundUsageStart=0 - fi - fi - # find the line we are searching for - if line_contains "${line}" "${searchFor}"; then - foundUsageStart=1 - fi - # start building the new file - echo "${line}" >>"${newOptFile}" - done <"${optFile}" + local searchFor='static void print_all_libs_info' + local foundUsageStart=0 + local newOptFile="${TMP_DIR}/${fname}" + test -f "${newOptFile}" && rm "${newOptFile}" + while read -r line; do + # if we found the line previously, add the versioning + if [[ ${foundUsageStart} -eq 1 ]]; then + if line_starts_with "${line}" '}'; then + echo_info "found ${line} on ${lineNum}" + for info in "${FFMPEG_BUILDER_INFO[@]}"; do + local newline="av_log(NULL, AV_LOG_INFO, \"${info}\n\");" + echo "${newline}" >>"${newOptFile}" + lineNum=$((lineNum + 1)) + done + newline="av_log(NULL, AV_LOG_INFO, \"\n\");" + echo "${newline}" >>"${newOptFile}" + foundUsageStart=0 + fi + fi + # find the line we are searching for + if line_contains "${line}" "${searchFor}"; then + foundUsageStart=1 + fi + # start building the new file + echo "${line}" >>"${newOptFile}" + done <"${optFile}" - cp "${newOptFile}" "${optFile}" || return 1 + cp "${newOptFile}" "${optFile}" || return 1 - return 0 + return 0 } build_ffmpeg() { - add_project_versioning_to_ffmpeg || return 1 + add_project_versioning_to_ffmpeg || return 1 - # libsvtav1_psy real name is libsvtav1 - for enable in ${ENABLE}; do - test "${enable}" == 'libsvtav1_psy' && enable='libsvtav1' - CONFIGURE_FLAGS+=("--enable-${enable}") - done + # libsvtav1_psy real name is libsvtav1 + for enable in ${ENABLE}; do + test "${enable}" == 'libsvtav1_psy' && enable='libsvtav1' + CONFIGURE_FLAGS+=("--enable-${enable}") + done - local ffmpegFlags=( - "--enable-gpl" - "--enable-version3" - "--disable-htmlpages" - "--disable-podpages" - "--disable-txtpages" - "--disable-ffplay" - "--disable-autodetect" - "--extra-version=${ver}" - "--enable-runtime-cpudetect" - ) + local ffmpegFlags=( + "--enable-gpl" + "--enable-version3" + "--disable-htmlpages" + "--disable-podpages" + "--disable-txtpages" + "--disable-ffplay" + "--disable-autodetect" + "--extra-version=${ver}" + "--enable-runtime-cpudetect" + ) - # lto is broken on darwin for ffmpeg only - # https://trac.ffmpeg.org/ticket/11479 - local ltoBackup="${LTO}" - if is_darwin; then - LTO=OFF - for flag in "${FFMPEG_EXTRA_FLAGS[@]}"; do - if line_contains "${flag}" "${LTO_FLAG}"; then - # get rid of potential space on either side - flag="${flag//${LTO_FLAG} /}" - flag="${flag// ${LTO_FLAG}/}" - fi - ffmpegFlags+=("${flag}") - done - else - ffmpegFlags+=("${FFMPEG_EXTRA_FLAGS[@]}") - fi + # lto is broken on darwin for ffmpeg only + # https://trac.ffmpeg.org/ticket/11479 + local ltoBackup="${LTO}" + if is_darwin; then + LTO=OFF + for flag in "${FFMPEG_EXTRA_FLAGS[@]}"; do + if line_contains "${flag}" "${LTO_FLAG}"; then + # get rid of potential space on either side + flag="${flag//${LTO_FLAG} /}" + flag="${flag// ${LTO_FLAG}/}" + fi + ffmpegFlags+=("${flag}") + done + else + ffmpegFlags+=("${FFMPEG_EXTRA_FLAGS[@]}") + fi - meta_configure_build \ - "${ffmpegFlags[@]}" || return 1 - LTO="${ltoBackup}" - ${SUDO_MODIFY} cp ff*_g "${PREFIX}/bin" - sanitize_sysroot_libs \ - libavcodec libavdevice libavfilter libswscale \ - libavformat libavutil libswresample || return 1 + meta_configure_build \ + "${ffmpegFlags[@]}" || return 1 + LTO="${ltoBackup}" + ${SUDO_MODIFY} cp ff*_g "${PREFIX}/bin" + sanitize_sysroot_libs \ + libavcodec libavdevice libavfilter libswscale \ + libavformat libavutil libswresample || return 1 } diff --git a/lib/compile_opts.sh b/lib/compile_opts.sh index e5f24a3..c8872d8 100644 --- a/lib/compile_opts.sh +++ b/lib/compile_opts.sh @@ -50,15 +50,15 @@ FB_COMP_OPTS=("${!FB_COMP_OPTS_DESC[@]}") # sets FB_COMP_OPTS to allow for user-overriding check_compile_opts_override() { - for opt in "${FB_COMP_OPTS[@]}"; do - declare -n defOptVal="DEFAULT_${opt}" - declare -n optVal="${opt}" - # use given value if not overridden - if [[ -v optVal && ${optVal} != "${defOptVal}" ]]; then - echo_info "setting given value for ${opt}=${optVal}" - declare -g "${opt}=${optVal}" - else - declare -g "${opt}=${defOptVal}" - fi - done + for opt in "${FB_COMP_OPTS[@]}"; do + declare -n defOptVal="DEFAULT_${opt}" + declare -n optVal="${opt}" + # use given value if not overridden + if [[ -v optVal && ${optVal} != "${defOptVal}" ]]; then + echo_info "setting given value for ${opt}=${optVal}" + declare -g "${opt}=${optVal}" + else + declare -g "${opt}=${defOptVal}" + fi + done } diff --git a/lib/docker.sh b/lib/docker.sh index b83f65b..cc82d8f 100644 --- a/lib/docker.sh +++ b/lib/docker.sh @@ -1,326 +1,326 @@ #!/usr/bin/env bash VALID_DOCKER_IMAGES=( - 'ubuntu' - 'fedora' - 'debian' - 'archlinux' + 'ubuntu' + 'fedora' + 'debian' + 'archlinux' ) DOCKER_WORKDIR='/workdir' set_docker_run_flags() { - local cargo_git="${IGN_DIR}/cargo/git" - local cargo_registry="${IGN_DIR}/cargo/registry" - ensure_dir "${cargo_git}" "${cargo_registry}" - DOCKER_RUN_FLAGS=( - --rm - -v "${cargo_git}:/root/.cargo/git" - -v "${cargo_registry}:/root/.cargo/registry" - -v "${REPO_DIR}:${REPO_DIR}" - -w "${REPO_DIR}" - -e "DEBUG=${DEBUG}" - -e "HEADLESS=${HEADLESS}" - ) - for opt in "${FB_COMP_OPTS[@]}"; do - declare -n defOptVal="DEFAULT_${opt}" - declare -n optVal="${opt}" - if [[ -v optVal && ${optVal} != "${defOptVal}" ]]; then - DOCKER_RUN_FLAGS+=("-e" "${opt}=${optVal}") - fi - done + local cargo_git="${IGN_DIR}/cargo/git" + local cargo_registry="${IGN_DIR}/cargo/registry" + ensure_dir "${cargo_git}" "${cargo_registry}" + DOCKER_RUN_FLAGS=( + --rm + -v "${cargo_git}:/root/.cargo/git" + -v "${cargo_registry}:/root/.cargo/registry" + -v "${REPO_DIR}:${REPO_DIR}" + -w "${REPO_DIR}" + -e "DEBUG=${DEBUG}" + -e "HEADLESS=${HEADLESS}" + ) + for opt in "${FB_COMP_OPTS[@]}"; do + declare -n defOptVal="DEFAULT_${opt}" + declare -n optVal="${opt}" + if [[ -v optVal && ${optVal} != "${defOptVal}" ]]; then + DOCKER_RUN_FLAGS+=("-e" "${opt}=${optVal}") + fi + done } check_docker() { - if missing_cmd docker; then - echo_info "install docker" - curl https://get.docker.com -sSf | bash - fi - set_docker_run_flags || return 1 + if missing_cmd docker; then + echo_info "install docker" + curl https://get.docker.com -sSf | bash + fi + set_docker_run_flags || return 1 } # get full image digest for a given image get_docker_image_tag() { - local image="$1" - local tag='' - case "${image}" in - ubuntu) tag='ubuntu:24.04@sha256:c35e29c9450151419d9448b0fd75374fec4fff364a27f176fb458d472dfc9e54' ;; - debian) tag='debian:13@sha256:0d01188e8dd0ac63bf155900fad49279131a876a1ea7fac917c62e87ccb2732d' ;; - fedora) tag='fedora:42@sha256:b3d16134560afa00d7cc2a9e4967eb5b954512805f3fe27d8e70bbed078e22ea' ;; - archlinux) tag='ogarcia/archlinux:latest@sha256:1d70273180e43b1f51b41514bdaa73c61f647891a53a9c301100d5c4807bf628' ;; - esac - echo "${tag}" + local image="$1" + local tag='' + case "${image}" in + ubuntu) tag='ubuntu:24.04@sha256:c35e29c9450151419d9448b0fd75374fec4fff364a27f176fb458d472dfc9e54' ;; + debian) tag='debian:13@sha256:0d01188e8dd0ac63bf155900fad49279131a876a1ea7fac917c62e87ccb2732d' ;; + fedora) tag='fedora:42@sha256:b3d16134560afa00d7cc2a9e4967eb5b954512805f3fe27d8e70bbed078e22ea' ;; + archlinux) tag='ogarcia/archlinux:latest@sha256:1d70273180e43b1f51b41514bdaa73c61f647891a53a9c301100d5c4807bf628' ;; + esac + echo "${tag}" } # change dash to colon for docker and add namespace set_distro_image_tag() { - local image_tag="${1}" - echo "ffmpeg_builder_${image_tag//-/:}" + local image_tag="${1}" + echo "ffmpeg_builder_${image_tag//-/:}" } # change colon to dash and add extension type docker_image_archive_name() { - local image_tag="${1}" - echo "${image_tag//:/-}.tar.zst" + local image_tag="${1}" + echo "${image_tag//:/-}.tar.zst" } echo_platform() { - local platKernel platCpu - platKernel="$(uname)" - platKernel="${platKernel,,}" - if [[ ${HOSTTYPE} == 'x86_64' ]]; then - platCpu='amd64' - else - platCpu='arm64' - fi + local platKernel platCpu + platKernel="$(uname)" + platKernel="${platKernel,,}" + if [[ ${HOSTTYPE} == 'x86_64' ]]; then + platCpu='amd64' + else + platCpu='arm64' + fi - echo "${platKernel}/${platCpu}" + echo "${platKernel}/${platCpu}" } validate_selected_image() { - local selectedImage="$1" - local valid=1 - for image in "${VALID_DOCKER_IMAGES[@]}"; do - if [[ ${selectedImage} == "${image}" ]]; then - valid=0 - break - fi - done - if [[ valid -eq 1 ]]; then - echo_fail "${selectedImage} is not valid" - echo_info "valid images:" "${VALID_DOCKER_IMAGES[@]}" - return 1 - fi + local selectedImage="$1" + local valid=1 + for image in "${VALID_DOCKER_IMAGES[@]}"; do + if [[ ${selectedImage} == "${image}" ]]; then + valid=0 + break + fi + done + if [[ valid -eq 1 ]]; then + echo_fail "${selectedImage} is not valid" + echo_info "valid images:" "${VALID_DOCKER_IMAGES[@]}" + return 1 + fi } docker_login() { - echo_if_fail docker login \ - -u "${DOCKER_REGISTRY_USER}" \ - -p "${DOCKER_REGISTRY_PASS}" \ - "${DOCKER_REGISTRY}" + echo_if_fail docker login \ + -u "${DOCKER_REGISTRY_USER}" \ + -p "${DOCKER_REGISTRY_PASS}" \ + "${DOCKER_REGISTRY}" } FB_FUNC_NAMES+=('docker_build_image') FB_FUNC_DESCS['docker_build_image']='build a docker image with the required dependencies pre-installed' FB_FUNC_COMPLETION['docker_build_image']="${VALID_DOCKER_IMAGES[*]}" docker_build_image() { - local image="$1" - validate_selected_image "${image}" || return 1 - check_docker || return 1 - PLATFORM="${PLATFORM:-$(echo_platform)}" + local image="$1" + validate_selected_image "${image}" || return 1 + check_docker || return 1 + PLATFORM="${PLATFORM:-$(echo_platform)}" - echo_info "sourcing package manager for ${image}" - local dockerDistro="$(get_docker_image_tag "${image}")" - # specific file for evaluated package manager info - local distroPkgMgr="${DOCKER_DIR}/$(bash_basename "${image}")-pkg_mgr" - # get package manager info - docker run \ - "${DOCKER_RUN_FLAGS[@]}" \ - "${dockerDistro}" \ - bash -c "./scripts/print_pkg_mgr.sh" | tr -d '\r' >"${distroPkgMgr}" - # shellcheck disable=SC1090 - cat "${distroPkgMgr}" - # shellcheck disable=SC1090 - source "${distroPkgMgr}" + echo_info "sourcing package manager for ${image}" + local dockerDistro="$(get_docker_image_tag "${image}")" + # specific file for evaluated package manager info + local distroPkgMgr="${DOCKER_DIR}/$(bash_basename "${image}")-pkg_mgr" + # get package manager info + docker run \ + "${DOCKER_RUN_FLAGS[@]}" \ + "${dockerDistro}" \ + bash -c "./scripts/print_pkg_mgr.sh" | tr -d '\r' >"${distroPkgMgr}" + # shellcheck disable=SC1090 + cat "${distroPkgMgr}" + # shellcheck disable=SC1090 + source "${distroPkgMgr}" - local dockerfile="${DOCKER_DIR}/Dockerfile_$(bash_basename "${image}")" - local embedPath='/Dockerfile' - { - echo "FROM ${dockerDistro}" - echo 'SHELL ["/bin/bash", "-c"]' - echo 'RUN ln -sf /bin/bash /bin/sh' - echo 'ENV DEBIAN_FRONTEND=noninteractive' - echo "RUN ${pkg_mgr_update} && ${pkg_mgr_upgrade} && ${pkg_install} ${req_pkgs[*]}" + local dockerfile="${DOCKER_DIR}/Dockerfile_$(bash_basename "${image}")" + local embedPath='/Dockerfile' + { + echo "FROM ${dockerDistro}" + echo 'SHELL ["/bin/bash", "-c"]' + echo 'RUN ln -sf /bin/bash /bin/sh' + echo 'ENV DEBIAN_FRONTEND=noninteractive' + echo "RUN ${pkg_mgr_update} && ${pkg_mgr_upgrade} && ${pkg_install} ${req_pkgs[*]}" - # ENV for pipx/rust - echo 'ENV PIPX_HOME=/root/.local' - echo 'ENV PIPX_BIN_DIR=/root/.local/bin' - echo 'ENV PATH="/root/.local/bin:$PATH"' - echo 'ENV CARGO_HOME="/root/.cargo"' - echo 'ENV RUSTUP_HOME="/root/.rustup"' - echo 'ENV PATH="/root/.cargo/bin:$PATH"' - # add to profile - echo 'RUN export PIPX_HOME=${PIPX_HOME} >> /etc/profile' - echo 'RUN export PIPX_BIN_DIR=${PIPX_BIN_DIR} >> /etc/profile' - echo 'RUN export CARGO_HOME=${CARGO_HOME} >> /etc/profile' - echo 'RUN export RUSTUP_HOME=${RUSTUP_HOME} >> /etc/profile' - echo 'RUN export PATH=${PATH} >> /etc/profile' + # ENV for pipx/rust + echo 'ENV PIPX_HOME=/root/.local' + echo 'ENV PIPX_BIN_DIR=/root/.local/bin' + echo 'ENV PATH="/root/.local/bin:$PATH"' + echo 'ENV CARGO_HOME="/root/.cargo"' + echo 'ENV RUSTUP_HOME="/root/.rustup"' + echo 'ENV PATH="/root/.cargo/bin:$PATH"' + # add to profile + echo 'RUN export PIPX_HOME=${PIPX_HOME} >> /etc/profile' + echo 'RUN export PIPX_BIN_DIR=${PIPX_BIN_DIR} >> /etc/profile' + echo 'RUN export CARGO_HOME=${CARGO_HOME} >> /etc/profile' + echo 'RUN export RUSTUP_HOME=${RUSTUP_HOME} >> /etc/profile' + echo 'RUN export PATH=${PATH} >> /etc/profile' - # make nobody:nogroup usable - echo 'RUN sed -i '/nobody/d' /etc/passwd || true' - echo 'RUN echo "nobody:x:65534:65534:nobody:/root:/bin/bash" >> /etc/passwd' - echo 'RUN sed -i '/nogroup/d' /etc/group || true' - echo 'RUN echo "nogroup:x:65534:" >> /etc/group' - # open up permissions before switching user - echo 'RUN chmod 777 -R /root/' - # run as nobody:nogroup for rest of install - echo 'USER 65534:65534' - # pipx - echo "RUN pipx install virtualenv" - # rust - local rustupVersion='1.28.2' - local rustcVersion='1.90.0' - local rustupTarball="rustup-${rustupVersion}.tar.gz" - local rustupTarballPath="${DOCKER_DIR}/${rustupTarball}" - if [[ ! -f ${rustupTarballPath} ]]; then - wget https://github.com/rust-lang/rustup/archive/refs/tags/${rustupVersion}.tar.gz -O "${rustupTarballPath}" - fi + # make nobody:nogroup usable + echo 'RUN sed -i '/nobody/d' /etc/passwd || true' + echo 'RUN echo "nobody:x:65534:65534:nobody:/root:/bin/bash" >> /etc/passwd' + echo 'RUN sed -i '/nogroup/d' /etc/group || true' + echo 'RUN echo "nogroup:x:65534:" >> /etc/group' + # open up permissions before switching user + echo 'RUN chmod 777 -R /root/' + # run as nobody:nogroup for rest of install + echo 'USER 65534:65534' + # pipx + echo "RUN pipx install virtualenv" + # rust + local rustupVersion='1.28.2' + local rustcVersion='1.90.0' + local rustupTarball="rustup-${rustupVersion}.tar.gz" + local rustupTarballPath="${DOCKER_DIR}/${rustupTarball}" + if [[ ! -f ${rustupTarballPath} ]]; then + wget https://github.com/rust-lang/rustup/archive/refs/tags/${rustupVersion}.tar.gz -O "${rustupTarballPath}" + fi - echo "ADD ${rustupTarball} /tmp/" - echo "RUN cd /tmp/rustup-${rustupVersion} && bash rustup-init.sh -y --default-toolchain=${rustcVersion}" - # install cargo-binstall - echo "RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash" - # install cargo-c - echo "RUN cargo-binstall -y cargo-c" + echo "ADD ${rustupTarball} /tmp/" + echo "RUN cd /tmp/rustup-${rustupVersion} && bash rustup-init.sh -y --default-toolchain=${rustcVersion}" + # install cargo-binstall + echo "RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash" + # install cargo-c + echo "RUN cargo-binstall -y cargo-c" - # final mods for PS1 - echo - echo 'USER root' - echo "RUN echo \"PS1='id=\\\$(id -u)@${image}:\w\\$ '\" >> /etc/bash.bashrc" - echo 'USER 65534:65534' - echo + # final mods for PS1 + echo + echo 'USER root' + echo "RUN echo \"PS1='id=\\\$(id -u)@${image}:\w\\$ '\" >> /etc/bash.bashrc" + echo 'USER 65534:65534' + echo - # embed dockerfile into docker image itself - # shellcheck disable=SC2094 - echo "COPY $(bash_basename "${dockerfile}") ${embedPath}" + # embed dockerfile into docker image itself + # shellcheck disable=SC2094 + echo "COPY $(bash_basename "${dockerfile}") ${embedPath}" - echo "WORKDIR ${DOCKER_WORKDIR}" + echo "WORKDIR ${DOCKER_WORKDIR}" - } >"${dockerfile}" + } >"${dockerfile}" - # docker buildx is too aggressive with invalidating - # build layer caches. Instead of relying on docker - # to check for when to rebuild, compare the to-build - # dockerfile with the embedded dockerfile - local oldDockerfile="${dockerfile}.old" - docker_run_image "${image}" cp "${embedPath}" "${oldDockerfile}" - if diff "${dockerfile}" "${oldDockerfile}"; then - echo_pass "no dockerfile changes detected, skipping rebuild" - return 0 - else - echo_warn "dockerfile changes detected, proceeding with build" - fi + # docker buildx is too aggressive with invalidating + # build layer caches. Instead of relying on docker + # to check for when to rebuild, compare the to-build + # dockerfile with the embedded dockerfile + local oldDockerfile="${dockerfile}.old" + docker_run_image "${image}" cp "${embedPath}" "${oldDockerfile}" + if diff "${dockerfile}" "${oldDockerfile}"; then + echo_pass "no dockerfile changes detected, skipping rebuild" + return 0 + else + echo_warn "dockerfile changes detected, proceeding with build" + fi - image_tag="$(set_distro_image_tag "${image}")" - docker buildx build \ - --platform "${PLATFORM}" \ - -t "${image_tag}" \ - -f "${dockerfile}" \ - "${DOCKER_DIR}" || return 1 + image_tag="$(set_distro_image_tag "${image}")" + docker buildx build \ + --platform "${PLATFORM}" \ + -t "${image_tag}" \ + -f "${dockerfile}" \ + "${DOCKER_DIR}" || return 1 - # if a docker registry is defined, push to it - if [[ ${DOCKER_REGISTRY} != '' ]]; then - docker_login || return 1 - docker buildx build \ - --push \ - --platform "${PLATFORM}" \ - -t "${DOCKER_REGISTRY}/${image_tag}" \ - -f "${dockerfile}" \ - "${DOCKER_DIR}" || return 1 - fi + # if a docker registry is defined, push to it + if [[ ${DOCKER_REGISTRY} != '' ]]; then + docker_login || return 1 + docker buildx build \ + --push \ + --platform "${PLATFORM}" \ + -t "${DOCKER_REGISTRY}/${image_tag}" \ + -f "${dockerfile}" \ + "${DOCKER_DIR}" || return 1 + fi } FB_FUNC_NAMES+=('docker_save_image') FB_FUNC_DESCS['docker_save_image']='save docker image into tar.zst' FB_FUNC_COMPLETION['docker_save_image']="${VALID_DOCKER_IMAGES[*]}" docker_save_image() { - local image="$1" - validate_selected_image "${image}" || return 1 - check_docker || return 1 - image_tag="$(set_distro_image_tag "${image}")" - echo_info "saving docker image for ${image_tag}" - docker save "${image_tag}" | - zstd -T0 >"${DOCKER_DIR}/$(docker_image_archive_name "${image_tag}")" || - return 1 + local image="$1" + validate_selected_image "${image}" || return 1 + check_docker || return 1 + image_tag="$(set_distro_image_tag "${image}")" + echo_info "saving docker image for ${image_tag}" + docker save "${image_tag}" | + zstd -T0 >"${DOCKER_DIR}/$(docker_image_archive_name "${image_tag}")" || + return 1 } FB_FUNC_NAMES+=('docker_load_image') FB_FUNC_DESCS['docker_load_image']='load docker image from tar.zst' FB_FUNC_COMPLETION['docker_load_image']="${VALID_DOCKER_IMAGES[*]}" docker_load_image() { - local image="$1" - validate_selected_image "${image}" || return 1 - check_docker || return 1 - image_tag="$(set_distro_image_tag "${image}")" - echo_info "loading docker image for ${image_tag}" - local archive="${DOCKER_DIR}/$(docker_image_archive_name "${image_tag}")" - test -f "$archive" || return 1 - zstdcat -T0 "$archive" | docker load || return 1 - docker system prune -f + local image="$1" + validate_selected_image "${image}" || return 1 + check_docker || return 1 + image_tag="$(set_distro_image_tag "${image}")" + echo_info "loading docker image for ${image_tag}" + local archive="${DOCKER_DIR}/$(docker_image_archive_name "${image_tag}")" + test -f "$archive" || return 1 + zstdcat -T0 "$archive" | docker load || return 1 + docker system prune -f } FB_FUNC_NAMES+=('docker_run_image') FB_FUNC_DESCS['docker_run_image']='run a docker image with the given arguments' FB_FUNC_COMPLETION['docker_run_image']="${VALID_DOCKER_IMAGES[*]}" docker_run_image() { - local image="$1" - shift - validate_selected_image "${image}" || return 1 - check_docker || return 1 + local image="$1" + shift + validate_selected_image "${image}" || return 1 + check_docker || return 1 - local cmd=("$@") - local runCmd=() - if [[ ${cmd[*]} == '' ]]; then - DOCKER_RUN_FLAGS+=("-it") - else - runCmd+=("${cmd[@]}") - fi + local cmd=("$@") + local runCmd=() + if [[ ${cmd[*]} == '' ]]; then + DOCKER_RUN_FLAGS+=("-it") + else + runCmd+=("${cmd[@]}") + fi - local image_tag="$(set_distro_image_tag "${image}")" + local image_tag="$(set_distro_image_tag "${image}")" - # if a docker registry is defined, pull from it - if [[ ${DOCKER_REGISTRY} != '' ]]; then - docker_login || return 1 - docker pull \ - "${DOCKER_REGISTRY}/${image_tag}" || return 1 - docker tag "${DOCKER_REGISTRY}/${image_tag}" "${image_tag}" - fi + # if a docker registry is defined, pull from it + if [[ ${DOCKER_REGISTRY} != '' ]]; then + docker_login || return 1 + docker pull \ + "${DOCKER_REGISTRY}/${image_tag}" || return 1 + docker tag "${DOCKER_REGISTRY}/${image_tag}" "${image_tag}" + fi - echo_info "running docker image ${image_tag}" - docker run \ - "${DOCKER_RUN_FLAGS[@]}" \ - -u "$(id -u):$(id -g)" \ - "${image_tag}" \ - "${runCmd[@]}" + echo_info "running docker image ${image_tag}" + docker run \ + "${DOCKER_RUN_FLAGS[@]}" \ + -u "$(id -u):$(id -g)" \ + "${image_tag}" \ + "${runCmd[@]}" - local rv=$? - docker image prune -f - return ${rv} + local rv=$? + docker image prune -f + return ${rv} } FB_FUNC_NAMES+=('build_with_docker') FB_FUNC_DESCS['build_with_docker']='run docker image with given flags' FB_FUNC_COMPLETION['build_with_docker']="${VALID_DOCKER_IMAGES[*]}" build_with_docker() { - local image="$1" - docker_run_image "${image}" ./scripts/build.sh || return 1 + local image="$1" + docker_run_image "${image}" ./scripts/build.sh || return 1 } FB_FUNC_NAMES+=('docker_build_multiarch_image') FB_FUNC_DESCS['docker_build_multiarch_image']='build multiarch docker image' FB_FUNC_COMPLETION['docker_build_multiarch_image']="${VALID_DOCKER_IMAGES[*]}" docker_build_multiarch_image() { - local image="$1" - validate_selected_image "${image}" || return 1 - check_docker || return 1 - PLATFORM='linux/amd64,linux/arm64' + local image="$1" + validate_selected_image "${image}" || return 1 + check_docker || return 1 + PLATFORM='linux/amd64,linux/arm64' - # check if we need to create multiplatform builder - local buildxPlats="$(docker buildx inspect | grep Platforms)" - IFS=',' - local createBuilder=0 - for plat in $PLATFORM; do - grep -q "${plat}" <<<"${buildxPlats}" || createBuilder=1 - done - unset IFS + # check if we need to create multiplatform builder + local buildxPlats="$(docker buildx inspect | grep Platforms)" + IFS=',' + local createBuilder=0 + for plat in $PLATFORM; do + grep -q "${plat}" <<<"${buildxPlats}" || createBuilder=1 + done + unset IFS - if [[ ${createBuilder} == 1 ]]; then - echo_info "creating multiplatform (${PLATFORM}) docker builder" - docker buildx create \ - --use \ - --platform="${PLATFORM}" \ - --name my-multiplatform-builder \ - --driver=docker-container - fi + if [[ ${createBuilder} == 1 ]]; then + echo_info "creating multiplatform (${PLATFORM}) docker builder" + docker buildx create \ + --use \ + --platform="${PLATFORM}" \ + --name my-multiplatform-builder \ + --driver=docker-container + fi - docker_build_image "$@" + docker_build_image "$@" } diff --git a/lib/efg.sh b/lib/efg.sh index 3379880..80493ca 100644 --- a/lib/efg.sh +++ b/lib/efg.sh @@ -1,231 +1,231 @@ #!/usr/bin/env bash efg_usage() { - echo "efg -i input [options]" - echo -e "\t[-l NUM] low value (default: ${LOW})" - echo -e "\t[-s NUM] step value (default: ${STEP})" - echo -e "\t[-h NUM] high value (default: ${HIGH})" - echo -e "\t[-p] plot bitrates using gnuplot" - echo -e "\n\t[-I] system install at ${EFG_INSTALL_PATH}" - echo -e "\t[-U] uninstall from ${EFG_INSTALL_PATH}" - return 0 + echo "efg -i input [options]" + echo -e "\t[-l NUM] low value (default: ${LOW})" + echo -e "\t[-s NUM] step value (default: ${STEP})" + echo -e "\t[-h NUM] high value (default: ${HIGH})" + echo -e "\t[-p] plot bitrates using gnuplot" + echo -e "\n\t[-I] system install at ${EFG_INSTALL_PATH}" + echo -e "\t[-U] uninstall from ${EFG_INSTALL_PATH}" + return 0 } set_efg_opts() { - local opts='pl:s:h:i:IU' - local numOpts=${#opts} - # default values - unset INPUT - LOW=0 - STEP=1 - HIGH=30 - PLOT=false - EFG_INSTALL_PATH='/usr/local/bin/efg' - # only using -I or -U - local minOpt=1 - # using all - local maxOpt=${numOpts} - test $# -lt ${minOpt} && efg_usage && return 1 - test $# -gt ${maxOpt} && efg_usage && return 1 - local OPTARG OPTIND - while getopts "${opts}" flag; do - case "${flag}" in - I) - echo_warn "attempting install" - sudo ln -sf "${SCRIPT_DIR}/efg.sh" \ - "${EFG_INSTALL_PATH}" || return 1 - echo_pass "succesfull install" - return ${FUNC_EXIT_SUCCESS} - ;; - U) - echo_warn "attempting uninstall" - sudo rm "${EFG_INSTALL_PATH}" || return 1 - echo_pass "succesfull uninstall" - return ${FUNC_EXIT_SUCCESS} - ;; - i) - if [[ $# -lt 2 ]]; then - echo_fail "wrong arguments given" - efg_usage - return 1 - fi - INPUT="${OPTARG}" - ;; - p) - missing_cmd gnuplot && return 1 - PLOT=true - ;; - l) - if ! is_positive_integer "${OPTARG}"; then - efg_usage - return 1 - fi - LOW="${OPTARG}" - ;; - s) - if ! is_positive_integer "${OPTARG}"; then - efg_usage - return 1 - fi - STEP="${OPTARG}" - ;; - h) - if ! is_positive_integer "${OPTARG}"; then - efg_usage - return 1 - fi - HIGH="${OPTARG}" - ;; - *) - echo "wrong flags given" - efg_usage - return 1 - ;; - esac - done + local opts='pl:s:h:i:IU' + local numOpts=${#opts} + # default values + unset INPUT + LOW=0 + STEP=1 + HIGH=30 + PLOT=false + EFG_INSTALL_PATH='/usr/local/bin/efg' + # only using -I or -U + local minOpt=1 + # using all + local maxOpt=${numOpts} + test $# -lt ${minOpt} && efg_usage && return 1 + test $# -gt ${maxOpt} && efg_usage && return 1 + local OPTARG OPTIND + while getopts "${opts}" flag; do + case "${flag}" in + I) + echo_warn "attempting install" + sudo ln -sf "${SCRIPT_DIR}/efg.sh" \ + "${EFG_INSTALL_PATH}" || return 1 + echo_pass "succesfull install" + return ${FUNC_EXIT_SUCCESS} + ;; + U) + echo_warn "attempting uninstall" + sudo rm "${EFG_INSTALL_PATH}" || return 1 + echo_pass "succesfull uninstall" + return ${FUNC_EXIT_SUCCESS} + ;; + i) + if [[ $# -lt 2 ]]; then + echo_fail "wrong arguments given" + efg_usage + return 1 + fi + INPUT="${OPTARG}" + ;; + p) + missing_cmd gnuplot && return 1 + PLOT=true + ;; + l) + if ! is_positive_integer "${OPTARG}"; then + efg_usage + return 1 + fi + LOW="${OPTARG}" + ;; + s) + if ! is_positive_integer "${OPTARG}"; then + efg_usage + return 1 + fi + STEP="${OPTARG}" + ;; + h) + if ! is_positive_integer "${OPTARG}"; then + efg_usage + return 1 + fi + HIGH="${OPTARG}" + ;; + *) + echo "wrong flags given" + efg_usage + return 1 + ;; + esac + done - if [[ ! -f ${INPUT} ]]; then - echo "${INPUT} does not exist" - efg_usage - return 1 - fi + if [[ ! -f ${INPUT} ]]; then + echo "${INPUT} does not exist" + efg_usage + return 1 + fi - # set custom EFG_DIR based off of sanitized inputfile - local sanitizedInput="$(bash_basename "${INPUT}")" - local sanitizeChars=(' ' '@' ':') - for char in "${sanitizeChars[@]}"; do - sanitizedInput="${sanitizedInput//${char}/}" - done - EFG_DIR+="-${sanitizedInput}" + # set custom EFG_DIR based off of sanitized inputfile + local sanitizedInput="$(bash_basename "${INPUT}")" + local sanitizeChars=(' ' '@' ':') + for char in "${sanitizeChars[@]}"; do + sanitizedInput="${sanitizedInput//${char}/}" + done + EFG_DIR+="-${sanitizedInput}" - echo_info "estimating film grain for ${INPUT}" - echo_info "range: $LOW-$HIGH with $STEP step increments" + echo_info "estimating film grain for ${INPUT}" + echo_info "range: $LOW-$HIGH with $STEP step increments" } efg_segment() { - # number of segments to split video - local segments=30 - # duration of each segment - local segmentTime=3 + # number of segments to split video + local segments=30 + # duration of each segment + local segmentTime=3 - # get times to split the input based - # off of number of segments - local duration - duration="$(get_duration "${INPUT}")" || return 1 - # trim decimal points if any - IFS=. read -r duration _ <<<"${duration}" - # number of seconds that equal 1 percent of the video - local percentTime=$((duration / 100)) - # percent that each segment takes - local percentSegment=$((100 / segments)) - # number of seconds to increment between segments - local timeBetweenSegments=$((percentTime * percentSegment)) - if [[ ${timeBetweenSegments} -lt ${segmentTime} ]]; then - timeBetweenSegments=${segmentTime} - fi - local segmentBitrates=() + # get times to split the input based + # off of number of segments + local duration + duration="$(get_duration "${INPUT}")" || return 1 + # trim decimal points if any + IFS=. read -r duration _ <<<"${duration}" + # number of seconds that equal 1 percent of the video + local percentTime=$((duration / 100)) + # percent that each segment takes + local percentSegment=$((100 / segments)) + # number of seconds to increment between segments + local timeBetweenSegments=$((percentTime * percentSegment)) + if [[ ${timeBetweenSegments} -lt ${segmentTime} ]]; then + timeBetweenSegments=${segmentTime} + fi + local segmentBitrates=() - # clean workspace - recreate_dir "${EFG_DIR}" || return 1 + # clean workspace + recreate_dir "${EFG_DIR}" || return 1 - # split up video into segments based on start times - for ((time = 0; time < duration; time += timeBetweenSegments)); do - local outSegment="${EFG_DIR}/segment-${#segmentBitrates[@]}.mkv" - split_video "${INPUT}" "${time}" "${segmentTime}" "${outSegment}" || return 1 - local segmentBitrate - segmentBitrate="$(get_avg_bitrate "${outSegment}")" || return 1 - segmentBitrates+=("${segmentBitrate}:${outSegment}") - done - local numSegments="${#segmentBitrates[@]}" + # split up video into segments based on start times + for ((time = 0; time < duration; time += timeBetweenSegments)); do + local outSegment="${EFG_DIR}/segment-${#segmentBitrates[@]}.mkv" + split_video "${INPUT}" "${time}" "${segmentTime}" "${outSegment}" || return 1 + local segmentBitrate + segmentBitrate="$(get_avg_bitrate "${outSegment}")" || return 1 + segmentBitrates+=("${segmentBitrate}:${outSegment}") + done + local numSegments="${#segmentBitrates[@]}" - local removeSegments - if [[ ${numSegments} -lt ${ENCODE_SEGMENTS} ]]; then - removeSegments=0 - else - removeSegments=$((numSegments - ENCODE_SEGMENTS)) - fi + local removeSegments + if [[ ${numSegments} -lt ${ENCODE_SEGMENTS} ]]; then + removeSegments=0 + else + removeSegments=$((numSegments - ENCODE_SEGMENTS)) + fi - # sort the segments - mapfile -t sortedSegments < <(IFS=: bash_sort "${segmentBitrates[@]}") - # make sure bitrate for each file is actually increasing - local prevBitrate=0 - # remove all but the highest bitrate segments - for segment in "${sortedSegments[@]}"; do - test ${removeSegments} -eq 0 && break - local file currBitrate - IFS=: read -r _ file <<<"${segment}" - currBitrate="$(get_avg_bitrate "${file}")" || return 1 + # sort the segments + mapfile -t sortedSegments < <(IFS=: bash_sort "${segmentBitrates[@]}") + # make sure bitrate for each file is actually increasing + local prevBitrate=0 + # remove all but the highest bitrate segments + for segment in "${sortedSegments[@]}"; do + test ${removeSegments} -eq 0 && break + local file currBitrate + IFS=: read -r _ file <<<"${segment}" + currBitrate="$(get_avg_bitrate "${file}")" || return 1 - if [[ ${currBitrate} -lt ${prevBitrate} ]]; then - echo_fail "${file} is not a higher bitrate than previous" - return 1 - fi - prevBitrate=${currBitrate} + if [[ ${currBitrate} -lt ${prevBitrate} ]]; then + echo_fail "${file} is not a higher bitrate than previous" + return 1 + fi + prevBitrate=${currBitrate} - rm "${file}" || return 1 - removeSegments=$((removeSegments - 1)) - done + rm "${file}" || return 1 + removeSegments=$((removeSegments - 1)) + done } efg_encode() { - echo -n >"${GRAIN_LOG}" - for vid in "${EFG_DIR}/"*.mkv; do - echo "file: ${vid}" >>"${GRAIN_LOG}" - for ((grain = LOW; grain <= HIGH; grain += STEP)); do - local file="$(bash_basename "${vid}")" - local out="${EFG_DIR}/grain-${grain}-${file}" - echo_info "encoding ${file} with grain ${grain}" - echo_if_fail encode -P 10 -g ${grain} -i "${vid}" "${out}" - echo -e "\tgrain: ${grain}, bitrate: $(get_avg_bitrate "${out}")" >>"${GRAIN_LOG}" - rm "${out}" - done - done + echo -n >"${GRAIN_LOG}" + for vid in "${EFG_DIR}/"*.mkv; do + echo "file: ${vid}" >>"${GRAIN_LOG}" + for ((grain = LOW; grain <= HIGH; grain += STEP)); do + local file="$(bash_basename "${vid}")" + local out="${EFG_DIR}/grain-${grain}-${file}" + echo_info "encoding ${file} with grain ${grain}" + echo_if_fail encode -P 10 -g ${grain} -i "${vid}" "${out}" + echo -e "\tgrain: ${grain}, bitrate: $(get_avg_bitrate "${out}")" >>"${GRAIN_LOG}" + rm "${out}" + done + done - less "${GRAIN_LOG}" + echo "$(<"${GRAIN_LOG}")" } efg_plot() { - declare -A normalizedBitrateSums=() - local referenceBitrate='' - local setNewReference='' + declare -A normalizedBitrateSums=() + local referenceBitrate='' + local setNewReference='' - while read -r line; do - local noWhite="${line// /}" - # new file, reset logic - if line_starts_with "${noWhite}" 'file:'; then - setNewReference=true - continue - fi + while read -r line; do + local noWhite="${line// /}" + # new file, reset logic + if line_starts_with "${noWhite}" 'file:'; then + setNewReference=true + continue + fi - IFS=',' read -r grainText bitrateText <<<"${noWhite}" - IFS=':' read -r _ grain <<<"${grainText}" - IFS=':' read -r _ bitrate <<<"${bitrateText}" - if [[ ${setNewReference} == true ]]; then - referenceBitrate="${bitrate}" - setNewReference=false - fi - # bash doesn't support floats, so scale up by 10000 - local normBitrate=$((bitrate * 10000 / referenceBitrate)) - local currSumBitrate=${normalizedBitrateSums[${grain}]} - normalizedBitrateSums[${grain}]=$((normBitrate + currSumBitrate)) - setNewReference=false - done <"${GRAIN_LOG}" + IFS=',' read -r grainText bitrateText <<<"${noWhite}" + IFS=':' read -r _ grain <<<"${grainText}" + IFS=':' read -r _ bitrate <<<"${bitrateText}" + if [[ ${setNewReference} == true ]]; then + referenceBitrate="${bitrate}" + setNewReference=false + fi + # bash doesn't support floats, so scale up by 10000 + local normBitrate=$((bitrate * 10000 / referenceBitrate)) + local currSumBitrate=${normalizedBitrateSums[${grain}]} + normalizedBitrateSums[${grain}]=$((normBitrate + currSumBitrate)) + setNewReference=false + done <"${GRAIN_LOG}" - # create grain:average plot file - local plotFile="${EFG_DIR}/plot.dat" - echo -n >"${plotFile}" - for ((grain = LOW; grain <= HIGH; grain += STEP)); do - local sum=${normalizedBitrateSums[${grain}]} - local avg=$((sum / ENCODE_SEGMENTS)) - echo -e "${grain}\t${avg}" >>"${plotFile}" - done + # create grain:average plot file + local plotFile="${EFG_DIR}/plot.dat" + echo -n >"${plotFile}" + for ((grain = LOW; grain <= HIGH; grain += STEP)); do + local sum=${normalizedBitrateSums[${grain}]} + local avg=$((sum / ENCODE_SEGMENTS)) + echo -e "${grain}\t${avg}" >>"${plotFile}" + done - # plot data - # run subprocess for bash COLUMNS/LINES - shopt -s checkwinsize - (true) - gnuplot -p -e "\ + # plot data + # run subprocess for bash COLUMNS/LINES + shopt -s checkwinsize + (true) + gnuplot -p -e "\ set terminal dumb size ${COLUMNS}, ${LINES}; \ set autoscale; \ set style line 1 \ @@ -233,37 +233,37 @@ linecolor rgb '#0060ad' \ linetype 1 linewidth 2 \ pointtype 7 pointsize 1.5; \ plot \"${plotFile}\" with linespoints linestyle 1" | less - echo_info "grain log: ${GRAIN_LOG}" + echo_info "grain log: ${GRAIN_LOG}" } FB_FUNC_NAMES+=('efg') # shellcheck disable=SC2034 FB_FUNC_DESCS['efg']='estimate the film grain of a given file' efg() { - EFG_DIR="${TMP_DIR}/efg" - # encode N highest-bitrate segments - ENCODE_SEGMENTS=5 + EFG_DIR="${TMP_DIR}/efg" + # encode N highest-bitrate segments + ENCODE_SEGMENTS=5 - set_efg_opts "$@" - local ret=$? - if [[ ${ret} -eq ${FUNC_EXIT_SUCCESS} ]]; then - return 0 - elif [[ ${ret} -ne 0 ]]; then - return ${ret} - fi - ensure_dir "${EFG_DIR}" + set_efg_opts "$@" + local ret=$? + if [[ ${ret} -eq ${FUNC_EXIT_SUCCESS} ]]; then + return 0 + elif [[ ${ret} -ne 0 ]]; then + return ${ret} + fi + ensure_dir "${EFG_DIR}" - GRAIN_LOG="${EFG_DIR}/${LOW}-${STEP}-${HIGH}-grains.txt" + GRAIN_LOG="${EFG_DIR}/${LOW}-${STEP}-${HIGH}-grains.txt" - if [[ ${PLOT} == true && -f ${GRAIN_LOG} ]]; then - efg_plot - return $? - fi + if [[ ${PLOT} == true && -f ${GRAIN_LOG} ]]; then + efg_plot + return $? + fi - efg_segment || return 1 - efg_encode || return 1 + efg_segment || return 1 + efg_encode || return 1 - if [[ ${PLOT} == true && -f ${GRAIN_LOG} ]]; then - efg_plot || return 1 - fi + if [[ ${PLOT} == true && -f ${GRAIN_LOG} ]]; then + efg_plot || return 1 + fi } diff --git a/lib/encode.sh b/lib/encode.sh index cea603b..ff8983b 100644 --- a/lib/encode.sh +++ b/lib/encode.sh @@ -2,481 +2,481 @@ # sets unmapStreams set_unmap_streams() { - local file="$1" - local unmapFilter='bin_data|jpeg|png' - local streamsStr - unmapStreams=() - streamsStr="$(get_num_streams "${file}")" || return 1 - mapfile -t streams <<<"${streamsStr}" || return 1 - for stream in "${streams[@]}"; do - if [[ "$(get_stream_codec "${file}" "${stream}")" =~ ${unmapFilter} ]]; then - unmapStreams+=("-map" "-0:${stream}") - fi - done + local file="$1" + local unmapFilter='bin_data|jpeg|png' + local streamsStr + unmapStreams=() + streamsStr="$(get_num_streams "${file}")" || return 1 + mapfile -t streams <<<"${streamsStr}" || return 1 + for stream in "${streams[@]}"; do + if [[ "$(get_stream_codec "${file}" "${stream}")" =~ ${unmapFilter} ]]; then + unmapStreams+=("-map" "-0:${stream}") + fi + done } # sets audioParams set_audio_params() { - local file="$1" - local videoLang - audioParams=() - videoLang="$(get_stream_lang "${file}" 'v:0')" || return 1 - for stream in $(get_num_streams "${file}" 'a'); do - local numChannels codec lang - numChannels="$(get_num_audio_channels "${file}" "${stream}")" || return 1 - if [[ ${numChannels} == '' ]]; then - echo_fail "could not obtain channel count for stream ${stream}" - return 1 - fi - local channelBitrate=$((numChannels * 64)) - codec="$(get_stream_codec "${file}" "${stream}")" || return 1 - lang="$(get_stream_lang "${file}" "${stream}")" || return 1 - if [[ ${videoLang} != '' && ${videoLang} != "${lang}" ]]; then - audioParams+=( - '-map' - "-0:${stream}" - ) - elif [[ ${codec} == 'opus' ]]; then - audioParams+=( - "-c:${OUTPUT_INDEX}" - "copy" - ) - OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) - else - audioParams+=( - "-filter:${OUTPUT_INDEX}" - "aformat=channel_layouts=7.1|5.1|stereo|mono" - "-c:${OUTPUT_INDEX}" - "libopus" - "-b:${OUTPUT_INDEX}" - "${channelBitrate}k" - ) - OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) - fi - done + local file="$1" + local videoLang + audioParams=() + videoLang="$(get_stream_lang "${file}" 'v:0')" || return 1 + for stream in $(get_num_streams "${file}" 'a'); do + local numChannels codec lang + numChannels="$(get_num_audio_channels "${file}" "${stream}")" || return 1 + if [[ ${numChannels} == '' ]]; then + echo_fail "could not obtain channel count for stream ${stream}" + return 1 + fi + local channelBitrate=$((numChannels * 64)) + codec="$(get_stream_codec "${file}" "${stream}")" || return 1 + lang="$(get_stream_lang "${file}" "${stream}")" || return 1 + if [[ ${videoLang} != '' && ${videoLang} != "${lang}" ]]; then + audioParams+=( + '-map' + "-0:${stream}" + ) + elif [[ ${codec} == 'opus' ]]; then + audioParams+=( + "-c:${OUTPUT_INDEX}" + "copy" + ) + OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) + else + audioParams+=( + "-filter:${OUTPUT_INDEX}" + "aformat=channel_layouts=7.1|5.1|stereo|mono" + "-c:${OUTPUT_INDEX}" + "libopus" + "-b:${OUTPUT_INDEX}" + "${channelBitrate}k" + ) + OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) + fi + done } # sets subtitleParams set_subtitle_params() { - local file="$1" - local convertCodec='eia_608' - local keepLang='eng' + local file="$1" + local convertCodec='eia_608' + local keepLang='eng' - local defaultTextCodec - if [[ ${SAME_CONTAINER} == false && ${FILE_EXT} == 'mkv' ]]; then - defaultTextCodec='srt' - convertCodec+='|mov_text' - else - defaultTextCodec='mov_text' - convertCodec+='|srt' - fi + local defaultTextCodec + if [[ ${SAME_CONTAINER} == false && ${FILE_EXT} == 'mkv' ]]; then + defaultTextCodec='srt' + convertCodec+='|mov_text' + else + defaultTextCodec='mov_text' + convertCodec+='|srt' + fi - subtitleParams=() - for stream in $(get_num_streams "${file}" 's'); do - local codec lang - codec="$(get_stream_codec "${file}" "${stream}")" || return 1 - lang="$(get_stream_lang "${file}" "${stream}")" || return 1 - if [[ ${lang} != '' && ${keepLang} != "${lang}" ]]; then - subtitleParams+=( - '-map' - "-0:${stream}" - ) - elif [[ ${codec} =~ ${convertCodec} ]]; then - subtitleParams+=("-c:${OUTPUT_INDEX}" "${defaultTextCodec}") - OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) - fi - done + subtitleParams=() + for stream in $(get_num_streams "${file}" 's'); do + local codec lang + codec="$(get_stream_codec "${file}" "${stream}")" || return 1 + lang="$(get_stream_lang "${file}" "${stream}")" || return 1 + if [[ ${lang} != '' && ${keepLang} != "${lang}" ]]; then + subtitleParams+=( + '-map' + "-0:${stream}" + ) + elif [[ ${codec} =~ ${convertCodec} ]]; then + subtitleParams+=("-c:${OUTPUT_INDEX}" "${defaultTextCodec}") + OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) + fi + done } get_encode_versions() { - action="${1:-}" + action="${1:-}" - encodeVersion="encode=$(git -C "${REPO_DIR}" rev-parse --short HEAD)" - ffmpegVersion='' - videoEncVersion='' - audioEncVersion='' + encodeVersion="encode=$(git -C "${REPO_DIR}" rev-parse --short HEAD)" + ffmpegVersion='' + videoEncVersion='' + audioEncVersion='' - # shellcheck disable=SC2155 - local output="$(ffmpeg -version 2>&1)" - while read -r line; do - if line_starts_with "${line}" 'ffmpeg='; then - ffmpegVersion="${line}" - elif line_starts_with "${line}" 'libsvtav1'; then - videoEncVersion="${line}" - elif line_starts_with "${line}" 'libopus='; then - audioEncVersion="${line}" - fi - done <<<"${output}" + # shellcheck disable=SC2155 + local output="$(ffmpeg -version 2>&1)" + while read -r line; do + if line_starts_with "${line}" 'ffmpeg='; then + ffmpegVersion="${line}" + elif line_starts_with "${line}" 'libsvtav1'; then + videoEncVersion="${line}" + elif line_starts_with "${line}" 'libopus='; then + audioEncVersion="${line}" + fi + done <<<"${output}" - local version - if [[ ${ffmpegVersion} == '' ]]; then - while read -r line; do - if line_starts_with "${line}" 'ffmpeg version '; then - read -r _ _ version _ <<<"${line}" - ffmpegVersion="ffmpeg=${version}" - break - fi - done <<<"${output}" - fi + local version + if [[ ${ffmpegVersion} == '' ]]; then + while read -r line; do + if line_starts_with "${line}" 'ffmpeg version '; then + read -r _ _ version _ <<<"${line}" + ffmpegVersion="ffmpeg=${version}" + break + fi + done <<<"${output}" + fi - if [[ ${videoEncVersion} == '' ]]; then - version="$(get_pkgconfig_version SvtAv1Enc)" - test "${version}" == '' && return 1 - videoEncVersion="libsvtav1=${version}" - fi + if [[ ${videoEncVersion} == '' ]]; then + version="$(get_pkgconfig_version SvtAv1Enc)" + test "${version}" == '' && return 1 + videoEncVersion="libsvtav1=${version}" + fi - if [[ ${audioEncVersion} == '' ]]; then - version="$(get_pkgconfig_version opus)" - test "${version}" == '' && return 1 - audioEncVersion="libopus=${version}" - fi + if [[ ${audioEncVersion} == '' ]]; then + version="$(get_pkgconfig_version opus)" + test "${version}" == '' && return 1 + audioEncVersion="libopus=${version}" + fi - test "${ffmpegVersion}" == '' && return 1 - test "${videoEncVersion}" == '' && return 1 - test "${audioEncVersion}" == '' && return 1 + test "${ffmpegVersion}" == '' && return 1 + test "${videoEncVersion}" == '' && return 1 + test "${audioEncVersion}" == '' && return 1 - if [[ ${action} == 'print' ]]; then - echo "${encodeVersion}" - echo "${ffmpegVersion}" - echo "${videoEncVersion}" - echo "${audioEncVersion}" - fi - return 0 + if [[ ${action} == 'print' ]]; then + echo "${encodeVersion}" + echo "${ffmpegVersion}" + echo "${videoEncVersion}" + echo "${audioEncVersion}" + fi + return 0 } encode_usage() { - echo "encode -i input [options] output" - echo -e "\t[-P NUM] set preset (default: ${PRESET})" - echo -e "\t[-C NUM] set CRF (default: ${CRF})" - echo -e "\t[-g NUM] set film grain for encode" - echo -e "\t[-p] print the command instead of executing it (default: ${PRINT_OUT})" - echo -e "\t[-c] use cropdetect (default: ${CROP})" - echo -e "\t[-d] enable dolby vision (default: ${DV_TOGGLE})" - echo -e "\t[-v] print relevant version info" - echo -e "\t[-s] use same container as input, default is convert to mkv" - echo -e "\n\t[output] if unset, defaults to \${HOME}/av1-input-file-name.mkv" - echo -e "\n\t[-u] update script (git pull ffmpeg-builder)" - echo -e "\t[-I] system install at ${ENCODE_INSTALL_PATH}" - echo -e "\t[-U] uninstall from ${ENCODE_INSTALL_PATH}" - return 0 + echo "encode -i input [options] output" + echo -e "\t[-P NUM] set preset (default: ${PRESET})" + echo -e "\t[-C NUM] set CRF (default: ${CRF})" + echo -e "\t[-g NUM] set film grain for encode" + echo -e "\t[-p] print the command instead of executing it (default: ${PRINT_OUT})" + echo -e "\t[-c] use cropdetect (default: ${CROP})" + echo -e "\t[-d] enable dolby vision (default: ${DV_TOGGLE})" + echo -e "\t[-v] print relevant version info" + echo -e "\t[-s] use same container as input, default is convert to mkv" + echo -e "\n\t[output] if unset, defaults to \${HOME}/av1-input-file-name.mkv" + echo -e "\n\t[-u] update script (git pull ffmpeg-builder)" + echo -e "\t[-I] system install at ${ENCODE_INSTALL_PATH}" + echo -e "\t[-U] uninstall from ${ENCODE_INSTALL_PATH}" + return 0 } encode_update() { - git -C "${REPO_DIR}" pull + git -C "${REPO_DIR}" pull } set_encode_opts() { - local opts='vi:pcsdg:P:C:uIU' - local numOpts=${#opts} - # default values - PRESET=3 - CRF=25 - GRAIN="" - CROP=false - PRINT_OUT=false - DV_TOGGLE=false - ENCODE_INSTALL_PATH='/usr/local/bin/encode' - SAME_CONTAINER="false" - # only using -I/U - local minOpt=1 - # using all + output name - local maxOpt=$((numOpts + 1)) - test $# -lt ${minOpt} && encode_usage && return 1 - test $# -gt ${maxOpt} && encode_usage && return 1 - local optsUsed=0 - local OPTARG OPTIND - while getopts "${opts}" flag; do - case "${flag}" in - u) - encode_update || return 1 - return ${FUNC_EXIT_SUCCESS} - ;; - I) - echo_warn "attempting install" - sudo ln -sf "${SCRIPT_DIR}/encode.sh" \ - "${ENCODE_INSTALL_PATH}" || return 1 - echo_pass "succesfull install" - return ${FUNC_EXIT_SUCCESS} - ;; - U) - echo_warn "attempting uninstall" - sudo rm "${ENCODE_INSTALL_PATH}" || return 1 - echo_pass "succesfull uninstall" - return ${FUNC_EXIT_SUCCESS} - ;; - v) - get_encode_versions print || return 1 - return ${FUNC_EXIT_SUCCESS} - ;; - i) - if [[ $# -lt 2 ]]; then - echo_fail "wrong arguments given" - encode_usage - return 1 - fi - INPUT="${OPTARG}" - optsUsed=$((optsUsed + 2)) - ;; - p) - PRINT_OUT=true - optsUsed=$((optsUsed + 1)) - ;; - c) - CROP=true - optsUsed=$((optsUsed + 1)) - ;; - d) - DV_TOGGLE=true - optsUsed=$((optsUsed + 1)) - ;; - s) - SAME_CONTAINER=true - optsUsed=$((optsUsed + 1)) - ;; - g) - if ! is_positive_integer "${OPTARG}"; then - encode_usage - return 1 - fi - GRAIN="film-grain=${OPTARG}:film-grain-denoise=1:" - optsUsed=$((optsUsed + 2)) - ;; - P) - if ! is_positive_integer "${OPTARG}"; then - encode_usage - return 1 - fi - PRESET="${OPTARG}" - optsUsed=$((optsUsed + 2)) - ;; - C) - if ! is_positive_integer "${OPTARG}" || test ${OPTARG} -gt 63; then - echo_fail "${OPTARG} is not a valid CRF value (0-63)" - encode_usage - return 1 - fi - CRF="${OPTARG}" - OPTS_USED=$((OPTS_USED + 2)) - ;; - *) - echo_fail "wrong flags given" - encode_usage - return 1 - ;; - esac - done + local opts='vi:pcsdg:P:C:uIU' + local numOpts=${#opts} + # default values + PRESET=3 + CRF=25 + GRAIN="" + CROP=false + PRINT_OUT=false + DV_TOGGLE=false + ENCODE_INSTALL_PATH='/usr/local/bin/encode' + SAME_CONTAINER="false" + # only using -I/U + local minOpt=1 + # using all + output name + local maxOpt=$((numOpts + 1)) + test $# -lt ${minOpt} && encode_usage && return 1 + test $# -gt ${maxOpt} && encode_usage && return 1 + local optsUsed=0 + local OPTARG OPTIND + while getopts "${opts}" flag; do + case "${flag}" in + u) + encode_update || return 1 + return ${FUNC_EXIT_SUCCESS} + ;; + I) + echo_warn "attempting install" + sudo ln -sf "${SCRIPT_DIR}/encode.sh" \ + "${ENCODE_INSTALL_PATH}" || return 1 + echo_pass "succesfull install" + return ${FUNC_EXIT_SUCCESS} + ;; + U) + echo_warn "attempting uninstall" + sudo rm "${ENCODE_INSTALL_PATH}" || return 1 + echo_pass "succesfull uninstall" + return ${FUNC_EXIT_SUCCESS} + ;; + v) + get_encode_versions print || return 1 + return ${FUNC_EXIT_SUCCESS} + ;; + i) + if [[ $# -lt 2 ]]; then + echo_fail "wrong arguments given" + encode_usage + return 1 + fi + INPUT="${OPTARG}" + optsUsed=$((optsUsed + 2)) + ;; + p) + PRINT_OUT=true + optsUsed=$((optsUsed + 1)) + ;; + c) + CROP=true + optsUsed=$((optsUsed + 1)) + ;; + d) + DV_TOGGLE=true + optsUsed=$((optsUsed + 1)) + ;; + s) + SAME_CONTAINER=true + optsUsed=$((optsUsed + 1)) + ;; + g) + if ! is_positive_integer "${OPTARG}"; then + encode_usage + return 1 + fi + GRAIN="film-grain=${OPTARG}:film-grain-denoise=1:" + optsUsed=$((optsUsed + 2)) + ;; + P) + if ! is_positive_integer "${OPTARG}"; then + encode_usage + return 1 + fi + PRESET="${OPTARG}" + optsUsed=$((optsUsed + 2)) + ;; + C) + if ! is_positive_integer "${OPTARG}" || test ${OPTARG} -gt 63; then + echo_fail "${OPTARG} is not a valid CRF value (0-63)" + encode_usage + return 1 + fi + CRF="${OPTARG}" + OPTS_USED=$((OPTS_USED + 2)) + ;; + *) + echo_fail "wrong flags given" + encode_usage + return 1 + ;; + esac + done - # allow optional output filename - if [[ $(($# - optsUsed)) == 1 ]]; then - OUTPUT="${*: -1}" - else - local basename="$(bash_basename "${INPUT}")" - OUTPUT="${HOME}/av1-${basename}" - fi + # allow optional output filename + if [[ $(($# - optsUsed)) == 1 ]]; then + OUTPUT="${*: -1}" + else + local basename="$(bash_basename "${INPUT}")" + OUTPUT="${HOME}/av1-${basename}" + fi - # use same container for output - if [[ $SAME_CONTAINER == "true" ]]; then - local fileFormat - fileFormat="$(get_file_format "${INPUT}")" || return 1 - FILE_EXT='' - if [[ ${fileFormat} == 'MPEG-4' ]]; then - FILE_EXT='mp4' - elif [[ ${fileFormat} == 'Matroska' ]]; then - FILE_EXT='mkv' - else - echo "unrecognized input format" - return 1 - fi - else - FILE_EXT="mkv" - fi - OUTPUT="${OUTPUT%.*}" - OUTPUT+=".${FILE_EXT}" + # use same container for output + if [[ $SAME_CONTAINER == "true" ]]; then + local fileFormat + fileFormat="$(get_file_format "${INPUT}")" || return 1 + FILE_EXT='' + if [[ ${fileFormat} == 'MPEG-4' ]]; then + FILE_EXT='mp4' + elif [[ ${fileFormat} == 'Matroska' ]]; then + FILE_EXT='mkv' + else + echo "unrecognized input format" + return 1 + fi + else + FILE_EXT="mkv" + fi + OUTPUT="${OUTPUT%.*}" + OUTPUT+=".${FILE_EXT}" - if [[ ! -f ${INPUT} ]]; then - echo "${INPUT} does not exist" - encode_usage - return 1 - fi + if [[ ! -f ${INPUT} ]]; then + echo "${INPUT} does not exist" + encode_usage + return 1 + fi - echo - echo_info "INPUT: ${INPUT}" - echo_info "GRAIN: ${GRAIN}" - echo_info "OUTPUT: ${OUTPUT}" - echo + echo + echo_info "INPUT: ${INPUT}" + echo_info "GRAIN: ${GRAIN}" + echo_info "OUTPUT: ${OUTPUT}" + echo } # shellcheck disable=SC2034 # shellcheck disable=SC2155 # shellcheck disable=SC2016 gen_encode_script() { - local genScript="${TMP_DIR}/$(bash_basename "${OUTPUT}").sh" + local genScript="${TMP_DIR}/$(bash_basename "${OUTPUT}").sh" - # global output index number to increment - OUTPUT_INDEX=0 + # global output index number to increment + OUTPUT_INDEX=0 - # single string params - local params=( - INPUT - OUTPUT - PRESET - CRF - crop - encodeVersion - ffmpegVersion - videoEncVersion - audioEncVersion - svtAv1Params - ) - local crop='' - if [[ $CROP == "true" ]]; then - crop="$(get_crop "${INPUT}")" || return 1 - fi + # single string params + local params=( + INPUT + OUTPUT + PRESET + CRF + crop + encodeVersion + ffmpegVersion + videoEncVersion + audioEncVersion + svtAv1Params + ) + local crop='' + if [[ $CROP == "true" ]]; then + crop="$(get_crop "${INPUT}")" || return 1 + fi - svtAv1ParamsArr=( - "tune=0" - "complex-hvs=1" - "spy-rd=1" - "psy-rd=1" - "sharpness=3" - "enable-overlays=1" - "scd=1" - "fast-decode=1" - "enable-variance-boost=1" - "enable-qm=1" - "qm-min=4" - "qm-max=15" - ) - IFS=':' - local svtAv1Params="${GRAIN}${svtAv1ParamsArr[*]}" - unset IFS + svtAv1ParamsArr=( + "tune=0" + "complex-hvs=1" + "spy-rd=1" + "psy-rd=1" + "sharpness=3" + "enable-overlays=1" + "scd=1" + "fast-decode=1" + "enable-variance-boost=1" + "enable-qm=1" + "qm-min=4" + "qm-max=15" + ) + IFS=':' + local svtAv1Params="${GRAIN}${svtAv1ParamsArr[*]}" + unset IFS - # arrays - local arrays=( - unmapStreams - audioParams - videoParams - metadata - subtitleParams - ffmpegParams - ) - local videoParams=( - "-crf" '${CRF}' "-preset" '${PRESET}' "-g" "240" - ) - local ffmpegParams=( - '-hide_banner' - '-i' '${INPUT}' - '-y' - '-map' '0' - '-c:s' 'copy' - ) + # arrays + local arrays=( + unmapStreams + audioParams + videoParams + metadata + subtitleParams + ffmpegParams + ) + local videoParams=( + "-crf" '${CRF}' "-preset" '${PRESET}' "-g" "240" + ) + local ffmpegParams=( + '-hide_banner' + '-i' '${INPUT}' + '-y' + '-map' '0' + '-c:s' 'copy' + ) - # set video params - local inputVideoCodec="$(get_stream_codec "${INPUT}" 'v:0')" - if [[ ${inputVideoCodec} == 'av1' ]]; then - ffmpegParams+=( - "-c:v:${OUTPUT_INDEX}" 'copy' - ) - else - ffmpegParams+=( - '-pix_fmt' 'yuv420p10le' - "-c:v:${OUTPUT_INDEX}" 'libsvtav1' '${videoParams[@]}' - '-svtav1-params' '${svtAv1Params}' - ) - fi - OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) + # set video params + local inputVideoCodec="$(get_stream_codec "${INPUT}" 'v:0')" + if [[ ${inputVideoCodec} == 'av1' ]]; then + ffmpegParams+=( + "-c:v:${OUTPUT_INDEX}" 'copy' + ) + else + ffmpegParams+=( + '-pix_fmt' 'yuv420p10le' + "-c:v:${OUTPUT_INDEX}" 'libsvtav1' '${videoParams[@]}' + '-svtav1-params' '${svtAv1Params}' + ) + fi + OUTPUT_INDEX=$((OUTPUT_INDEX + 1)) - # these values may be empty - local unmapStr audioParamsStr subtitleParamsStr - set_unmap_streams "${INPUT}" || return 1 - set_audio_params "${INPUT}" || return 1 - set_subtitle_params "${INPUT}" || return 1 + # these values may be empty + local unmapStr audioParamsStr subtitleParamsStr + set_unmap_streams "${INPUT}" || return 1 + set_audio_params "${INPUT}" || return 1 + set_subtitle_params "${INPUT}" || return 1 - if [[ ${unmapStreams[*]} != '' ]]; then - ffmpegParams+=('${unmapStreams[@]}') - fi + if [[ ${unmapStreams[*]} != '' ]]; then + ffmpegParams+=('${unmapStreams[@]}') + fi - if [[ ${audioParams[*]} != '' ]]; then - ffmpegParams+=('${audioParams[@]}') - fi + if [[ ${audioParams[*]} != '' ]]; then + ffmpegParams+=('${audioParams[@]}') + fi - if [[ ${subtitleParams[*]} != '' ]]; then - ffmpegParams+=('${subtitleParams[@]}') - fi + if [[ ${subtitleParams[*]} != '' ]]; then + ffmpegParams+=('${subtitleParams[@]}') + fi - if [[ ${crop} != '' ]]; then - ffmpegParams+=('-vf' '${crop}') - fi + if [[ ${crop} != '' ]]; then + ffmpegParams+=('-vf' '${crop}') + fi - get_encode_versions || return 1 - local metadata=( - '-metadata' '${encodeVersion}' - '-metadata' '${ffmpegVersion}' - '-metadata' '${videoEncVersion}' - '-metadata' '${audioEncVersion}' - '-metadata' 'svtav1_params=${svtAv1Params}' - '-metadata' 'video_params=${videoParams[*]}' - ) - ffmpegParams+=('${metadata[@]}') + get_encode_versions || return 1 + local metadata=( + '-metadata' '${encodeVersion}' + '-metadata' '${ffmpegVersion}' + '-metadata' '${videoEncVersion}' + '-metadata' '${audioEncVersion}' + '-metadata' 'svtav1_params=${svtAv1Params}' + '-metadata' 'video_params=${videoParams[*]}' + ) + ffmpegParams+=('${metadata[@]}') - { - echo '#!/usr/bin/env bash' - echo + { + echo '#!/usr/bin/env bash' + echo - # add normal params - for param in "${params[@]}"; do - declare -n value="${param}" - if [[ ${value} != '' ]]; then - echo "${param}=\"${value[*]}\"" - fi - done - for arrName in "${arrays[@]}"; do - declare -n arr="${arrName}" - if [[ -v arr ]]; then - echo "${arrName}=(" - printf '\t"%s"\n' "${arr[@]}" - echo ')' - fi - done + # add normal params + for param in "${params[@]}"; do + declare -n value="${param}" + if [[ ${value} != '' ]]; then + echo "${param}=\"${value[*]}\"" + fi + done + for arrName in "${arrays[@]}"; do + declare -n arr="${arrName}" + if [[ -v arr ]]; then + echo "${arrName}=(" + printf '\t"%s"\n' "${arr[@]}" + echo ')' + fi + done - # actually do ffmpeg commmand - echo - if [[ ${DV_TOGGLE} == true ]]; then - echo 'ffmpeg "${ffmpegParams[@]}" -dolbyvision 1 "${OUTPUT}" || \' - fi - echo 'ffmpeg "${ffmpegParams[@]}" -dolbyvision 0 "${OUTPUT}" || exit 1' + # actually do ffmpeg commmand + echo + if [[ ${DV_TOGGLE} == true ]]; then + echo 'ffmpeg "${ffmpegParams[@]}" -dolbyvision 1 "${OUTPUT}" || \' + fi + echo 'ffmpeg "${ffmpegParams[@]}" -dolbyvision 0 "${OUTPUT}" || exit 1' - # track-stats and clear title - if [[ ${FILE_EXT} == 'mkv' ]] && has_cmd mkvpropedit; then - { - echo - echo 'mkvpropedit "${OUTPUT}" --add-track-statistics-tags' - echo 'mkvpropedit "${OUTPUT}" --edit info --set "title="' - } - fi + # track-stats and clear title + if [[ ${FILE_EXT} == 'mkv' ]] && has_cmd mkvpropedit; then + { + echo + echo 'mkvpropedit "${OUTPUT}" --add-track-statistics-tags' + echo 'mkvpropedit "${OUTPUT}" --edit info --set "title="' + } + fi - echo - } >"${genScript}" + echo + } >"${genScript}" - if [[ ${PRINT_OUT} == true ]]; then - echo_info "${genScript} contents:" - echo "$(<"${genScript}")" - else - bash -x "${genScript}" || return 1 - rm "${genScript}" - fi + if [[ ${PRINT_OUT} == true ]]; then + echo_info "${genScript} contents:" + echo "$(<"${genScript}")" + else + bash -x "${genScript}" || return 1 + rm "${genScript}" + fi } FB_FUNC_NAMES+=('encode') # shellcheck disable=SC2034 FB_FUNC_DESCS['encode']='encode a file using libsvtav1_psy and libopus' encode() { - set_encode_opts "$@" - local ret=$? - if [[ ${ret} -eq ${FUNC_EXIT_SUCCESS} ]]; then - return 0 - elif [[ ${ret} -ne 0 ]]; then - return ${ret} - fi - gen_encode_script || return 1 + set_encode_opts "$@" + local ret=$? + if [[ ${ret} -eq ${FUNC_EXIT_SUCCESS} ]]; then + return 0 + elif [[ ${ret} -ne 0 ]]; then + return ${ret} + fi + gen_encode_script || return 1 } diff --git a/lib/ffmpeg.sh b/lib/ffmpeg.sh index de0ebba..1c71b9c 100644 --- a/lib/ffmpeg.sh +++ b/lib/ffmpeg.sh @@ -1,169 +1,169 @@ #!/usr/bin/env bash get_duration() { - local file="$1" - ffprobe \ - -v error \ - -show_entries format=duration \ - -of default=noprint_wrappers=1:nokey=1 \ - "${file}" + local file="$1" + ffprobe \ + -v error \ + -show_entries format=duration \ + -of default=noprint_wrappers=1:nokey=1 \ + "${file}" } get_avg_bitrate() { - local file="$1" - ffprobe \ - -v error \ - -select_streams v:0 \ - -show_entries format=bit_rate \ - -of default=noprint_wrappers=1:nokey=1 \ - "${file}" + local file="$1" + ffprobe \ + -v error \ + -select_streams v:0 \ + -show_entries format=bit_rate \ + -of default=noprint_wrappers=1:nokey=1 \ + "${file}" } split_video() { - local file="$1" - local start="$2" - local time="$3" - local out="$4" - ffmpeg \ - -ss "${start}" \ - -i "${file}" \ - -hide_banner \ - -loglevel error \ - -t "${time}" \ - -map 0:v \ - -reset_timestamps 1 \ - -c copy \ - "${out}" + local file="$1" + local start="$2" + local time="$3" + local out="$4" + ffmpeg \ + -ss "${start}" \ + -i "${file}" \ + -hide_banner \ + -loglevel error \ + -t "${time}" \ + -map 0:v \ + -reset_timestamps 1 \ + -c copy \ + "${out}" } get_crop() { - local file="$1" - local duration - duration="$(get_duration "${file}")" || return 1 - # don't care about decimal points - IFS='.' read -r duration _ <<<"${duration}" - # get crop value for first half of input - local timeEnc=$((duration / 2)) - ffmpeg \ - -y \ - -hide_banner \ - -ss 0 \ - -discard 'nokey' \ - -i "${file}" \ - -t "${timeEnc}" \ - -map '0:v:0' \ - -filter:v:0 'cropdetect=limit=100:round=16:skip=2:reset_count=0' \ - -codec:v 'wrapped_avframe' \ - -f 'null' '/dev/null' 2>&1 | - grep -o crop=.* | - sort -bh | - uniq -c | - sort -bh | - tail -n1 | - grep -o "crop=.*" + local file="$1" + local duration + duration="$(get_duration "${file}")" || return 1 + # don't care about decimal points + IFS='.' read -r duration _ <<<"${duration}" + # get crop value for first half of input + local timeEnc=$((duration / 2)) + ffmpeg \ + -y \ + -hide_banner \ + -ss 0 \ + -discard 'nokey' \ + -i "${file}" \ + -t "${timeEnc}" \ + -map '0:v:0' \ + -filter:v:0 'cropdetect=limit=100:round=16:skip=2:reset_count=0' \ + -codec:v 'wrapped_avframe' \ + -f 'null' '/dev/null' 2>&1 | + grep -o crop=.* | + sort -bh | + uniq -c | + sort -bh | + tail -n1 | + grep -o "crop=.*" } get_stream_codec() { - local file="$1" - local stream="$2" - ffprobe \ - -v error \ - -select_streams "${stream}" \ - -show_entries stream=codec_name \ - -of default=noprint_wrappers=1:nokey=1 \ - "${file}" + local file="$1" + local stream="$2" + ffprobe \ + -v error \ + -select_streams "${stream}" \ + -show_entries stream=codec_name \ + -of default=noprint_wrappers=1:nokey=1 \ + "${file}" } get_file_format() { - local file="$1" - local probe - probe="$(ffprobe \ - -v error \ - -show_entries format=format_name \ - -of default=noprint_wrappers=1:nokey=1 \ - "${file}")" || return 1 - if line_contains "${probe}" 'matroska'; then - echo mkv - else - echo mp4 - fi + local file="$1" + local probe + probe="$(ffprobe \ + -v error \ + -show_entries format=format_name \ + -of default=noprint_wrappers=1:nokey=1 \ + "${file}")" || return 1 + if line_contains "${probe}" 'matroska'; then + echo mkv + else + echo mp4 + fi } get_num_streams() { - local file="$1" - local type="${2:-}" - local select=() + local file="$1" + local type="${2:-}" + local select=() - if [[ ${type} != '' ]]; then - select=("-select_streams" "${type}") - fi + if [[ ${type} != '' ]]; then + select=("-select_streams" "${type}") + fi - ffprobe \ - -v error "${select[@]}" \ - -show_entries stream=index \ - -of default=noprint_wrappers=1:nokey=1 \ - "${file}" + ffprobe \ + -v error "${select[@]}" \ + -show_entries stream=index \ + -of default=noprint_wrappers=1:nokey=1 \ + "${file}" } get_num_audio_channels() { - local file="$1" - local stream="$2" - ffprobe \ - -v error \ - -select_streams "${stream}" \ - -show_entries stream=channels \ - -of default=noprint_wrappers=1:nokey=1 \ - "${file}" + local file="$1" + local stream="$2" + ffprobe \ + -v error \ + -select_streams "${stream}" \ + -show_entries stream=channels \ + -of default=noprint_wrappers=1:nokey=1 \ + "${file}" } get_stream_lang() { - local file="$1" - local stream="$2" - ffprobe \ - -v error \ - -select_streams "${stream}" \ - -show_entries stream_tags=language \ - -of default=noprint_wrappers=1:nokey=1 \ - "${file}" + local file="$1" + local stream="$2" + ffprobe \ + -v error \ + -select_streams "${stream}" \ + -show_entries stream_tags=language \ + -of default=noprint_wrappers=1:nokey=1 \ + "${file}" } gen_video() { - local outFile="$1" - local addFlags=() - shift + local outFile="$1" + local addFlags=() + shift - local vf="format=yuv420p10le" - for arg in "$@"; do - case "${arg}" in - '1080p') resolution='1920x1080' ;; - '2160p') resolution='3840x2160' ;; - 'grain=yes') vf+=",noise=alls=15:allf=t+u" ;; - 'hdr=yes') - local colorPrimaries='bt2020' - local colorTrc='smpte2084' - local colorspace='bt2020nc' - vf+=",setparams=color_primaries=${colorPrimaries}:color_trc=${colorTrc}:colorspace=${colorspace}" - addFlags+=( - -color_primaries "${colorPrimaries}" - -color_trc "${colorTrc}" - -colorspace "${colorspace}" - -metadata:s:v:0 "mastering_display_metadata=G(13250,34500)B(7500,3000)R(34000,16000)WP(15635,16450)L(10000000,1)" - -metadata:s:v:0 "content_light_level=1000,400" - ) - ;; - *) echo_fail "bad arg ${arg}" && return 1 ;; - esac - done + local vf="format=yuv420p10le" + for arg in "$@"; do + case "${arg}" in + '1080p') resolution='1920x1080' ;; + '2160p') resolution='3840x2160' ;; + 'grain=yes') vf+=",noise=alls=15:allf=t+u" ;; + 'hdr=yes') + local colorPrimaries='bt2020' + local colorTrc='smpte2084' + local colorspace='bt2020nc' + vf+=",setparams=color_primaries=${colorPrimaries}:color_trc=${colorTrc}:colorspace=${colorspace}" + addFlags+=( + -color_primaries "${colorPrimaries}" + -color_trc "${colorTrc}" + -colorspace "${colorspace}" + -metadata:s:v:0 "mastering_display_metadata=G(13250,34500)B(7500,3000)R(34000,16000)WP(15635,16450)L(10000000,1)" + -metadata:s:v:0 "content_light_level=1000,400" + ) + ;; + *) echo_fail "bad arg ${arg}" && return 1 ;; + esac + done - echo_if_fail ffmpeg -y \ - -hide_banner \ - -f lavfi \ - -i "testsrc2=size=${resolution}:rate=24:duration=5" \ - -vf "${vf}" \ - -c:v ffv1 \ - -level 3 \ - -g 1 \ - -color_range tv \ - "${addFlags[@]}" \ - "${outFile}" + echo_if_fail ffmpeg -y \ + -hide_banner \ + -f lavfi \ + -i "testsrc2=size=${resolution}:rate=24:duration=5" \ + -vf "${vf}" \ + -c:v ffv1 \ + -level 3 \ + -g 1 \ + -color_range tv \ + "${addFlags[@]}" \ + "${outFile}" } diff --git a/lib/install_deps.sh b/lib/install_deps.sh index 196fdba..3e49e90 100644 --- a/lib/install_deps.sh +++ b/lib/install_deps.sh @@ -2,131 +2,131 @@ # shellcheck disable=SC2120 determine_pkg_mgr() { - # sudo used externally - # shellcheck disable=SC2034 - if is_windows || test "$(id -u)" -eq 0; then - SUDO='' - else - SUDO='sudo ' - fi + # sudo used externally + # shellcheck disable=SC2034 + if is_windows || test "$(id -u)" -eq 0; then + SUDO='' + else + SUDO='sudo ' + fi - # pkg-mgr update-cmd upgrade-cmd install-cmd check-cmd - # shellcheck disable=SC2016 - local PKG_MGR_MAP=' + # pkg-mgr update-cmd upgrade-cmd install-cmd check-cmd + # shellcheck disable=SC2016 + local PKG_MGR_MAP=' pkg:pkg update:pkg upgrade:pkg install -y:dpkg -l ${pkg} brew:brew update:brew upgrade:brew install:brew list --formula ${pkg} apt-get:${SUDO}apt-get update:${SUDO}apt-get upgrade -y:${SUDO}apt-get install -y:dpkg -l ${pkg} pacman:${SUDO}pacman -Syy:${SUDO}pacman -Syu --noconfirm:${SUDO}pacman -S --noconfirm --needed:pacman -Qi ${pkg} dnf:${SUDO}dnf check-update || true:${SUDO}dnf upgrade --refresh -y:${SUDO}dnf install -y:dnf list -q --installed ${pkg} ' - local supported_pkg_mgr=() - unset pkg_mgr pkg_mgr_update pkg_mgr_upgrade pkg_install pkg_check - while read -r line; do - test "${line}" == '' && continue - IFS=':' read -r pkg_mgr pkg_mgr_update pkg_mgr_upgrade pkg_install pkg_check <<<"${line}" - supported_pkg_mgr+=("${pkg_mgr}") - if ! has_cmd "${pkg_mgr}"; then - pkg_mgr='' - continue - fi - # update/install may use SUDO - eval "pkg_mgr_update=\"${pkg_mgr_update}\"" - eval "pkg_mgr_upgrade=\"${pkg_mgr_upgrade}\"" - eval "pkg_install=\"${pkg_install}\"" - break - done <<<"${PKG_MGR_MAP}" + local supported_pkg_mgr=() + unset pkg_mgr pkg_mgr_update pkg_mgr_upgrade pkg_install pkg_check + while read -r line; do + test "${line}" == '' && continue + IFS=':' read -r pkg_mgr pkg_mgr_update pkg_mgr_upgrade pkg_install pkg_check <<<"${line}" + supported_pkg_mgr+=("${pkg_mgr}") + if ! has_cmd "${pkg_mgr}"; then + pkg_mgr='' + continue + fi + # update/install may use SUDO + eval "pkg_mgr_update=\"${pkg_mgr_update}\"" + eval "pkg_mgr_upgrade=\"${pkg_mgr_upgrade}\"" + eval "pkg_install=\"${pkg_install}\"" + break + done <<<"${PKG_MGR_MAP}" - if [[ ${pkg_mgr} == '' ]]; then - echo_fail "system does not use a supported package manager" "${supported_pkg_mgr[@]}" - return 1 - fi + if [[ ${pkg_mgr} == '' ]]; then + echo_fail "system does not use a supported package manager" "${supported_pkg_mgr[@]}" + return 1 + fi - return 0 + return 0 } print_req_pkgs() { - local common_pkgs=( - autoconf automake cmake libtool - texinfo nasm yasm python3 wget - meson doxygen jq ccache gawk - git gnuplot bison rsync ragel - zip unzip gperf itstool - ) - # shellcheck disable=SC2034 - local brew_pkgs=( - "${common_pkgs[@]}" pkgconf - mkvtoolnix pipx uutils-coreutils - llvm lld - ) - local common_linux_pkgs=( - "${common_pkgs[@]}" clang valgrind - curl bc lshw xxd pkgconf sudo llvm - ) - # shellcheck disable=SC2034 - local apt_get_pkgs=( - "${common_linux_pkgs[@]}" pipx - build-essential libssl-dev gobjc++ - mawk libc6-dev mediainfo ninja-build - mkvtoolnix libgtest-dev lld - ) - # shellcheck disable=SC2034 - local pacman_pkgs=( - "${common_linux_pkgs[@]}" base-devel - python-pipx ninja lld mkvtoolnix-cli - ) - # shellcheck disable=SC2034 - local dnf_pkgs=( - "${common_linux_pkgs[@]}" openssl-devel - pipx ninja-build fontconfig-devel wget2 - glibc-static glibc-devel patch - libstdc++-static libstdc++-devel - llvm-cmake-utils llvm-devel - llvm-static compiler-rt lld - mkvtoolnix - ) - # shellcheck disable=SC2034 - local pkg_pkgs=( - autoconf automake cmake libtool - texinfo nasm yasm python3 wget - doxygen jq ccache gawk rust - git gnuplot bison rsync ragel - zip unzip gperf build-essential - binutils ninja ndk-multilib-native-static - libandroid-posix-semaphore - libandroid-posix-semaphore-static - libandroid-shmem - libandroid-shmem-static - ) - # shellcheck disable=SC2034 - local msys_ucrt_pkgs=( - mingw-w64-ucrt-x86_64-toolchain - mingw-w64-ucrt-x86_64-autotools - mingw-w64-ucrt-x86_64-clang - mingw-w64-ucrt-x86_64-clang-libs - mingw-w64-ucrt-x86_64-cmake - mingw-w64-ucrt-x86_64-compiler-rt - mingw-w64-ucrt-x86_64-doxygen - mingw-w64-ucrt-x86_64-gcc-libs - mingw-w64-ucrt-x86_64-gperf - mingw-w64-ucrt-x86_64-itstool - mingw-w64-ucrt-x86_64-meson - mingw-w64-ucrt-x86_64-bc - mingw-w64-ucrt-x86_64-nasm - mingw-w64-ucrt-x86_64-yasm - mingw-w64-ucrt-x86_64-ccache - mingw-w64-ucrt-x86_64-rustup - mingw-w64-ucrt-x86_64-cargo-c - mingw-w64-ucrt-x86_64-perl - mingw-w64-ucrt-x86_64-perl-modules - ) + local common_pkgs=( + autoconf automake cmake libtool + texinfo nasm yasm python3 wget + meson doxygen jq ccache gawk + git gnuplot bison rsync ragel + zip unzip gperf itstool + ) + # shellcheck disable=SC2034 + local brew_pkgs=( + "${common_pkgs[@]}" pkgconf + mkvtoolnix pipx uutils-coreutils + llvm lld + ) + local common_linux_pkgs=( + "${common_pkgs[@]}" clang valgrind + curl bc lshw xxd pkgconf sudo llvm + ) + # shellcheck disable=SC2034 + local apt_get_pkgs=( + "${common_linux_pkgs[@]}" pipx + build-essential libssl-dev gobjc++ + mawk libc6-dev mediainfo ninja-build + mkvtoolnix libgtest-dev lld + ) + # shellcheck disable=SC2034 + local pacman_pkgs=( + "${common_linux_pkgs[@]}" base-devel + python-pipx ninja lld mkvtoolnix-cli + ) + # shellcheck disable=SC2034 + local dnf_pkgs=( + "${common_linux_pkgs[@]}" openssl-devel + pipx ninja-build fontconfig-devel wget2 + glibc-static glibc-devel patch + libstdc++-static libstdc++-devel + llvm-cmake-utils llvm-devel + llvm-static compiler-rt lld + mkvtoolnix + ) + # shellcheck disable=SC2034 + local pkg_pkgs=( + autoconf automake cmake libtool + texinfo nasm yasm python3 wget + doxygen jq ccache gawk rust + git gnuplot bison rsync ragel + zip unzip gperf build-essential + binutils ninja ndk-multilib-native-static + libandroid-posix-semaphore + libandroid-posix-semaphore-static + libandroid-shmem + libandroid-shmem-static + ) + # shellcheck disable=SC2034 + local msys_ucrt_pkgs=( + mingw-w64-ucrt-x86_64-toolchain + mingw-w64-ucrt-x86_64-autotools + mingw-w64-ucrt-x86_64-clang + mingw-w64-ucrt-x86_64-clang-libs + mingw-w64-ucrt-x86_64-cmake + mingw-w64-ucrt-x86_64-compiler-rt + mingw-w64-ucrt-x86_64-doxygen + mingw-w64-ucrt-x86_64-gcc-libs + mingw-w64-ucrt-x86_64-gperf + mingw-w64-ucrt-x86_64-itstool + mingw-w64-ucrt-x86_64-meson + mingw-w64-ucrt-x86_64-bc + mingw-w64-ucrt-x86_64-nasm + mingw-w64-ucrt-x86_64-yasm + mingw-w64-ucrt-x86_64-ccache + mingw-w64-ucrt-x86_64-rustup + mingw-w64-ucrt-x86_64-cargo-c + mingw-w64-ucrt-x86_64-perl + mingw-w64-ucrt-x86_64-perl-modules + ) - if is_windows; then - local pkg_mgr='msys_ucrt' - fi - local req_pkgs_env_name="${pkg_mgr/-/_}_pkgs" - declare -n req_pkgs="${req_pkgs_env_name}" - local sorted_req_pkgs=($(printf '%s\n' "${req_pkgs[@]}" | sort -u)) - echo "${sorted_req_pkgs[@]}" + if is_windows; then + local pkg_mgr='msys_ucrt' + fi + local req_pkgs_env_name="${pkg_mgr/-/_}_pkgs" + declare -n req_pkgs="${req_pkgs_env_name}" + local sorted_req_pkgs=($(printf '%s\n' "${req_pkgs[@]}" | sort -u)) + echo "${sorted_req_pkgs[@]}" } FB_FUNC_NAMES+=('print_pkg_mgr') @@ -134,60 +134,60 @@ FB_FUNC_NAMES+=('print_pkg_mgr') # shellcheck disable=SC2034 FB_FUNC_DESCS['print_pkg_mgr']='print out evaluated package manager commands and required packages' print_pkg_mgr() { - determine_pkg_mgr || return 1 - echo "export pkg_mgr=\"${pkg_mgr}\"" - echo "export pkg_mgr_update=\"${pkg_mgr_update}\"" - echo "export pkg_mgr_upgrade=\"${pkg_mgr_upgrade}\"" - echo "export pkg_install=\"${pkg_install}\"" - echo "export pkg_check=\"${pkg_check}\"" - echo "export req_pkgs=($(print_req_pkgs))" + determine_pkg_mgr || return 1 + echo "export pkg_mgr=\"${pkg_mgr}\"" + echo "export pkg_mgr_update=\"${pkg_mgr_update}\"" + echo "export pkg_mgr_upgrade=\"${pkg_mgr_upgrade}\"" + echo "export pkg_install=\"${pkg_install}\"" + echo "export pkg_check=\"${pkg_check}\"" + echo "export req_pkgs=($(print_req_pkgs))" } check_for_req_pkgs() { - echo_info "checking for required packages" - local missing_pkgs=() - for pkg in $(print_req_pkgs); do - # pkg_check has ${pkg} unexpanded - eval "pkg_check=\"${pkg_check}\"" - ${pkg_check} "${pkg}" >/dev/null 2>&1 || missing_pkgs+=("${pkg}") - done + echo_info "checking for required packages" + local missing_pkgs=() + for pkg in $(print_req_pkgs); do + # pkg_check has ${pkg} unexpanded + eval "pkg_check=\"${pkg_check}\"" + ${pkg_check} "${pkg}" >/dev/null 2>&1 || missing_pkgs+=("${pkg}") + done - if [[ ${#missing_pkgs[@]} -gt 0 ]]; then - echo_warn "missing packages:" "${missing_pkgs[@]}" - # shellcheck disable=SC2086 - ${pkg_mgr_update} - # shellcheck disable=SC2086 - ${pkg_install} "${missing_pkgs[@]}" || return 1 - fi + if [[ ${#missing_pkgs[@]} -gt 0 ]]; then + echo_warn "missing packages:" "${missing_pkgs[@]}" + # shellcheck disable=SC2086 + ${pkg_mgr_update} + # shellcheck disable=SC2086 + ${pkg_install} "${missing_pkgs[@]}" || return 1 + fi - echo_pass "packages from ${pkg_mgr} installed" - has_cmd pipx || echo_if_fail python3 -m pip install --user pipx || return 1 - has_cmd pipx || echo_if_fail python3 -m pipx ensurepath && source ~/.bashrc || return 1 - echo_if_fail pipx install virtualenv || return 1 - echo_if_fail pipx ensurepath || return 1 - has_cmd meson || echo_if_fail pipx install meson || return 1 - echo_pass "pipx is installed" + echo_pass "packages from ${pkg_mgr} installed" + has_cmd pipx || echo_if_fail python3 -m pip install --user pipx || return 1 + has_cmd pipx || echo_if_fail python3 -m pipx ensurepath && source ~/.bashrc || return 1 + echo_if_fail pipx install virtualenv || return 1 + echo_if_fail pipx ensurepath || return 1 + has_cmd meson || echo_if_fail pipx install meson || return 1 + echo_pass "pipx is installed" - # shellcheck disable=SC1091 - test -f "${HOME}/.cargo/env" && source "${HOME}/.cargo/env" + # shellcheck disable=SC1091 + test -f "${HOME}/.cargo/env" && source "${HOME}/.cargo/env" - if missing_cmd cargo; then - if missing_cmd rustup; then - echo_warn "installing rustup" - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y - # shellcheck disable=SC2016 - grep -q 'source "${HOME}/.cargo/env"' "${HOME}/.bashrc" || - echo 'source "${HOME}/.cargo/env"' >>"${HOME}/.bashrc" - # shellcheck disable=SC1091 - source "${HOME}/.bashrc" - fi - fi + if missing_cmd cargo; then + if missing_cmd rustup; then + echo_warn "installing rustup" + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + # shellcheck disable=SC2016 + grep -q 'source "${HOME}/.cargo/env"' "${HOME}/.bashrc" || + echo 'source "${HOME}/.cargo/env"' >>"${HOME}/.bashrc" + # shellcheck disable=SC1091 + source "${HOME}/.bashrc" + fi + fi - has_cmd cargo-cbuild || echo_if_fail cargo install cargo-c || return 1 - echo_pass "cargo-c is installed" - echo_pass "all required packages installed" + has_cmd cargo-cbuild || echo_if_fail cargo install cargo-c || return 1 + echo_pass "cargo-c is installed" + echo_pass "all required packages installed" - return 0 + return 0 } FB_FUNC_NAMES+=('install_deps') @@ -195,6 +195,6 @@ FB_FUNC_NAMES+=('install_deps') # shellcheck disable=SC2034 FB_FUNC_DESCS['install_deps']='install required dependencies' install_deps() { - determine_pkg_mgr || return 1 - check_for_req_pkgs || return 1 + determine_pkg_mgr || return 1 + check_for_req_pkgs || return 1 } diff --git a/lib/package.sh b/lib/package.sh index cf34a50..cb65d44 100644 --- a/lib/package.sh +++ b/lib/package.sh @@ -1,29 +1,29 @@ #!/usr/bin/env bash check_for_package_cfg() { - local requiredCfg='ON:ON:ON:3' - local currentCfg="${STATIC}:${LTO}:${PGO}:${OPT}" - if [[ ${currentCfg} == "${requiredCfg}" ]]; then - return 0 - else - return 1 - fi + local requiredCfg='ON:ON:ON:3' + local currentCfg="${STATIC}:${LTO}:${PGO}:${OPT}" + if [[ ${currentCfg} == "${requiredCfg}" ]]; then + return 0 + else + return 1 + fi } FB_FUNC_NAMES+=('package') FB_FUNC_DESCS['package']='package ffmpeg build' package() { - local pkgDir="${IGN_DIR}/package" - recreate_dir "${pkgDir}" || return 1 - check_for_package_cfg || return 0 + local pkgDir="${IGN_DIR}/package" + recreate_dir "${pkgDir}" || return 1 + check_for_package_cfg || return 0 - echo_info "packaging" - set_compile_opts || return 1 - cp "${PREFIX}/bin/ff"* "${pkgDir}/" + echo_info "packaging" + set_compile_opts || return 1 + cp "${PREFIX}/bin/ff"* "${pkgDir}/" - cd "${pkgDir}" || return 1 - local tarball="ffmpeg-build-${HOSTTYPE}-$(print_os).tar" - tar -cf "${tarball}" ff* || return 1 - xz -e -9 "${tarball}" || return 1 - echo_pass "finished packaging ${tarball}.xz" + cd "${pkgDir}" || return 1 + local tarball="ffmpeg-build-${HOSTTYPE}-$(print_os).tar" + tar -cf "${tarball}" ff* || return 1 + xz -e -9 "${tarball}" || return 1 + echo_pass "finished packaging ${tarball}.xz" } diff --git a/lib/pgo.sh b/lib/pgo.sh index 45c8bae..107b1c9 100644 --- a/lib/pgo.sh +++ b/lib/pgo.sh @@ -3,58 +3,58 @@ PGO_DIR="${IGN_DIR}/pgo" PGO_PROFDATA="${PGO_DIR}/prof.profdata" gen_profdata() { - recreate_dir "${PGO_DIR}" || return 1 - cd "${PGO_DIR}" || return 1 - setup_pgo_clips || return 1 - for vid in *.mkv; do - local args=() - # add precalculated grain amount based off of filename - line_contains "${vid}" 'grain' && args+=(-g 16) - # make fhd preset 2 - line_contains "${vid}" 'fhd' && args+=(-P 2) + recreate_dir "${PGO_DIR}" || return 1 + cd "${PGO_DIR}" || return 1 + setup_pgo_clips || return 1 + for vid in *.mkv; do + local args=() + # add precalculated grain amount based off of filename + line_contains "${vid}" 'grain' && args+=(-g 16) + # make fhd preset 2 + line_contains "${vid}" 'fhd' && args+=(-P 2) - echo_info "encoding pgo vid: ${vid}" - LLVM_PROFILE_FILE="${PGO_DIR}/default_%p.profraw" \ - echo_if_fail encode -i "${vid}" "${args[@]}" "encoded-${vid}" || return 1 - done + echo_info "encoding pgo vid: ${vid}" + LLVM_PROFILE_FILE="${PGO_DIR}/default_%p.profraw" \ + echo_if_fail encode -i "${vid}" "${args[@]}" "encoded-${vid}" || return 1 + done - # merge profraw into profdata - local mergeCmd=() - # darwin needs special invoke - if is_darwin; then - mergeCmd+=(xcrun) - fi + # merge profraw into profdata + local mergeCmd=() + # darwin needs special invoke + if is_darwin; then + mergeCmd+=(xcrun) + fi - mergeCmd+=( - llvm-profdata - merge - "--output=${PGO_PROFDATA}" - ) - "${mergeCmd[@]}" default*.profraw || return 1 + mergeCmd+=( + llvm-profdata + merge + "--output=${PGO_PROFDATA}" + ) + "${mergeCmd[@]}" default*.profraw || return 1 - return 0 + return 0 } setup_pgo_clips() { - local clips=( - "fhd-grainy.mkv 1080p,grain=yes" - "uhd.mkv 2160p" - "uhd-hdr.mkv 2160p,hdr=yes" - ) - for clip in "${clips[@]}"; do - local genVid genVidArgs pgoFile genVidArgsArr - IFS=' ' read -r genVid genVidArgs <<<"${clip}" - # pgo path is separate - pgoFile="${PGO_DIR}/${genVid}" - genVid="${TMP_DIR}/${genVid}" - # create array of args split with , - genVidArgsArr=(${genVidArgs//,/ }) - # create generated vid without any profiling if needed - test -f "${genVid}" || - LLVM_PROFILE_FILE='/dev/null' gen_video "${genVid}" "${genVidArgsArr[@]}" || return 1 - # and move to the pgo directory - test -f "${pgoFile}" || - cp "${genVid}" "${pgoFile}" || return 1 + local clips=( + "fhd-grainy.mkv 1080p,grain=yes" + "uhd.mkv 2160p" + "uhd-hdr.mkv 2160p,hdr=yes" + ) + for clip in "${clips[@]}"; do + local genVid genVidArgs pgoFile genVidArgsArr + IFS=' ' read -r genVid genVidArgs <<<"${clip}" + # pgo path is separate + pgoFile="${PGO_DIR}/${genVid}" + genVid="${TMP_DIR}/${genVid}" + # create array of args split with , + genVidArgsArr=(${genVidArgs//,/ }) + # create generated vid without any profiling if needed + test -f "${genVid}" || + LLVM_PROFILE_FILE='/dev/null' gen_video "${genVid}" "${genVidArgsArr[@]}" || return 1 + # and move to the pgo directory + test -f "${pgoFile}" || + cp "${genVid}" "${pgoFile}" || return 1 - done + done } diff --git a/lib/readme.sh b/lib/readme.sh index 70006ef..453b461 100644 --- a/lib/readme.sh +++ b/lib/readme.sh @@ -1,29 +1,29 @@ #!/usr/bin/env bash make_bullet_points() { - for arg in "$@"; do - echo "- ${arg}" - done + for arg in "$@"; do + echo "- ${arg}" + done } gen_function_info() { - local funcName="$1" - echo "${FB_FUNC_DESCS[${funcName}]} using \`./scripts/${funcName}.sh\`" + local funcName="$1" + echo "${FB_FUNC_DESCS[${funcName}]} using \`./scripts/${funcName}.sh\`" } gen_compile_opts_info() { - for opt in "${FB_COMP_OPTS[@]}"; do - declare -n defOptVal="DEFAULT_${opt}" - echo "- \`${opt}\`: ${FB_COMP_OPTS_DESC[${opt}]} (default: ${defOptVal})" - done + for opt in "${FB_COMP_OPTS[@]}"; do + declare -n defOptVal="DEFAULT_${opt}" + echo "- \`${opt}\`: ${FB_COMP_OPTS_DESC[${opt}]} (default: ${defOptVal})" + done } FB_FUNC_NAMES+=('gen_readme') FB_FUNC_DESCS['gen_readme']='generate project README.md' gen_readme() { - local readme="${REPO_DIR}/README.md" + local readme="${REPO_DIR}/README.md" - echo " + echo " # ffmpeg-builder A collection of scripts for building \`ffmpeg\` and encoding content with the built \`ffmpeg\`. diff --git a/lib/utils.sh b/lib/utils.sh deleted file mode 100644 index ec1e156..0000000 --- a/lib/utils.sh +++ /dev/null @@ -1,389 +0,0 @@ -#!/usr/bin/env bash - -# shellcheck disable=SC2034 - -# ANSI colors -RED='\e[0;31m' -CYAN='\e[0;36m' -GREEN='\e[0;32m' -YELLOW='\e[0;33m' -NC='\e[0m' - -# echo wrappers -echo_wrapper() { - local args - if [[ $1 == '-n' ]]; then - args=("$1") - shift - fi - # COLOR is override for using ${color} - # shellcheck disable=SC2153 - if [[ ${COLOR} == 'OFF' ]]; then - color='' - endColor='' - else - endColor="${NC}" - fi - - echo -e "${args[@]}" "${color}${word:-''}${endColor}" "$@" -} -echo_fail() { color="${RED}" word="FAIL" echo_wrapper "$@"; } -echo_info() { color="${CYAN}" word="INFO" echo_wrapper "$@"; } -echo_pass() { color="${GREEN}" word="PASS" echo_wrapper "$@"; } -echo_warn() { color="${YELLOW}" word="WARN" echo_wrapper "$@"; } -echo_exit() { - echo_fail "$@" - exit 1 -} -void() { echo "$@" >/dev/null; } - -echo_if_fail() { - local cmd=("$@") - local logName="${LOGNAME:-${RANDOM}}-" - local out="${TMP_DIR}/${logName}stdout" - local err="${TMP_DIR}/${logName}stderr" - - # set trace to the cmdEvalTrace and open file descriptor - local cmdEvalTrace="${TMP_DIR}/${logName}cmdEvalTrace" - exec 5>"${cmdEvalTrace}" - export BASH_XTRACEFD=5 - - set -x - "${cmd[@]}" >"${out}" 2>"${err}" - local retval=$? - - # unset and close file descriptor - set +x - exec 5>&- - - # parse out relevant part of the trace - local cmdEvalLines=() - while IFS= read -r line; do - line="${line/${PS4}/}" - test "${line}" == 'set +x' && continue - test "${line}" == '' && continue - cmdEvalLines+=("${line}") - done <"${cmdEvalTrace}" - - if ! test ${retval} -eq 0; then - echo - echo_fail "command failed with ${retval}:" - printf "%s\n" "${cmdEvalLines[@]}" - echo_warn "command stdout:" - tail -n 32 "${out}" - echo_warn "command stderr:" - tail -n 32 "${err}" - echo - fi - if [[ -z ${LOGNAME} ]]; then - rm "${out}" "${err}" "${cmdEvalTrace}" - fi - return ${retval} -} - -is_root_owned() { - local path=$1 - local uid - - if stat --version >/dev/null 2>&1; then - # GNU coreutils (Linux) - uid=$(stat -c '%u' "$path") - else - # BSD/macOS - uid=$(stat -f '%u' "$path") - fi - - test "$uid" -eq 0 -} - -dump_arr() { - local arrayNames=("$@") - for arrayName in "${arrayNames[@]}"; do - declare -n array="${arrayName}" - arrayExpanded=("${array[@]}") - - # skip showing single element arrays by default - if [[ ! ${#arrayExpanded[@]} -gt 1 ]]; then - if [[ ${SHOW_SINGLE} == true ]]; then - echo_info "${arrayName}='${arrayExpanded[*]}'" - else - continue - fi - fi - - echo - # don't care that the variable has "ARR" - echo_info "${arrayName//"_ARR"/}" - printf "\t%s\n" "${arrayExpanded[@]}" - done -} - -has_cmd() { - local cmds=("$@") - local rv=0 - for cmd in "${cmds[@]}"; do - command -v "${cmd}" >/dev/null 2>&1 || rv=1 - done - - return ${rv} -} - -missing_cmd() { - local cmds=("$@") - local rv=1 - for cmd in "${cmds[@]}"; do - if ! has_cmd "${cmd}"; then - echo_warn "missing ${cmd}" - rv=0 - fi - done - - return ${rv} -} - -bash_dirname() { - local tmp=${1:-.} - - [[ $tmp != *[!/]* ]] && { - printf '/\n' - return - } - - tmp=${tmp%%"${tmp##*[!/]}"} - - [[ $tmp != */* ]] && { - printf '.\n' - return - } - - tmp=${tmp%/*} - tmp=${tmp%%"${tmp##*[!/]}"} - - printf '%s\n' "${tmp:-/}" -} - -bash_basename() { - local tmp - path="$1" - suffix="${2:-''}" - - tmp=${path%"${path##*[!/]}"} - tmp=${tmp##*/} - tmp=${tmp%"${suffix/"$tmp"/}"} - - printf '%s\n' "${tmp:-/}" -} - -bash_realpath() { - local file=$1 - local dir - - # If the file is already absolute - [[ $file == /* ]] && { - printf '%s\n' "$file" - return - } - - # Otherwise: split into directory + basename - dir="$(bash_dirname "${file}")" - file="$(bash_basename "${file}")" - - # If no directory component, use current directory - if [[ $dir == "$file" ]]; then - dir="$PWD" - else - # Save current dir, move into target dir, capture $PWD, then return - local oldpwd="$PWD" - cd "$dir" || return 1 - dir="$PWD" - cd "$oldpwd" || return 1 - fi - - printf '%s/%s\n' "$dir" "$file" -} - -line_contains() { - local line="$1" - local substr="$2" - if [[ $line == *"${substr}"* ]]; then - return 0 - else - return 1 - fi -} - -line_starts_with() { - local line="$1" - local substr="$2" - if [[ $line == "${substr}"* ]]; then - return 0 - else - return 1 - fi -} - -is_linux() { - line_contains "${OSTYPE}" 'linux' -} - -is_darwin() { - line_contains "$(print_os)" darwin -} - -is_windows() { - line_contains "$(print_os)" windows -} - -is_android() { - line_contains "$(print_os)" android -} - -print_os() { - # cached response - if [[ -n ${FB_OS} ]]; then - echo "${FB_OS}" - return 0 - fi - - unset FB_OS - if [[ -f /etc/os-release ]]; then - source /etc/os-release - FB_OS="${ID}" - if [[ ${VERSION_ID} != '' ]]; then - FB_OS+="-${VERSION_ID}" - fi - if line_starts_with "${FB_OS}" 'arch'; then - FB_OS='archlinux' - fi - else - FB_OS="$(uname -o)" - fi - - # lowercase - FB_OS="${FB_OS,,}" - - # special treatment for windows - if line_contains "${FB_OS}" 'windows' || line_contains "${FB_OS}" 'msys'; then - FB_OS='windows' - fi - - echo "${FB_OS}" -} - -is_positive_integer() { - local input="$1" - if [[ ${input} != ?(-)+([[:digit:]]) || ${input} -lt 0 ]]; then - echo_fail "${input} is not a positive integer" - return 1 - fi - return 0 -} - -replace_line() { - local file="$1" - local search="$2" - local newLine="$3" - local newFile="${TMP_DIR}/$(bash_basename "${file}")" - - test -f "${newFile}" && rm "${newFile}" - while read -r line; do - if line_contains "${line}" "${search}"; then - echo -en "${newLine}" >>"${newFile}" - continue - fi - echo "${line}" >>"${newFile}" - done <"${file}" - - cp "${newFile}" "${file}" -} - -remove_line() { - local file="$1" - local search="$2" - replace_line "${file}" "${search}" '' -} - -bash_sort() { - local arr=("$@") - local n=${#arr[@]} - local i j val1 val2 - - # Bubble sort, numeric comparison - for ((i = 0; i < n; i++)); do - for ((j = 0; j < n - i - 1; j++)); do - read -r val1 _ <<<"${arr[j]}" - read -r val2 _ <<<"${arr[j + 1]}" - if (("${val1}" > "${val2}")); then - local tmp=${arr[j]} - arr[j]=${arr[j + 1]} - arr[j + 1]=$tmp - fi - done - done - - printf '%s\n' "${arr[@]}" -} - -_start_spinner() { - local spinChars=( - "-" - '\' - "|" - "/" - ) - - sleep 1 - - while true; do - for ((ind = 0; ind < "${#spinChars[@]}"; ind++)); do - echo -ne "${spinChars[${ind}]}" '\b\b' - sleep .25 - done - done -} - -spinner() { - local action="$1" - local spinPidFile="${TMP_DIR}/.spinner-pid" - case "${action}" in - start) - test -f "${spinPidFile}" && rm "${spinPidFile}" - - # don't want to clutter logs if running headless - test "${HEADLESS}" == '1' && return - - _start_spinner & - echo $! >"${spinPidFile}" - ;; - stop) - test -f "${spinPidFile}" && kill "$(<"${spinPidFile}")" - echo -ne ' \n' - ;; - esac -} - -get_pkgconfig_version() { - local pkg="$1" - pkg-config --modversion "${pkg}" -} - -using_cmake_4() { - local cmakeVersion - IFS=$' \t' read -r _ _ cmakeVersion <<<"$(command cmake --version)" - line_starts_with "${cmakeVersion}" 4 -} - -recreate_dir() { - local dirs=("$@") - for dir in "${dirs[@]}"; do - test -d "${dir}" && rm -rf "${dir}" - mkdir -p "${dir}" || return 1 - done -} - -ensure_dir() { - local dirs=("$@") - for dir in "${dirs[@]}"; do - test -d "${dir}" || mkdir -p "${dir}" || return 1 - done -} diff --git a/main.sh b/main.sh index 272de43..b632954 100755 --- a/main.sh +++ b/main.sh @@ -2,8 +2,8 @@ # set top dir if [[ -z ${REPO_DIR} ]]; then - thisFile="$(readlink -f "${BASH_SOURCE[0]}")" - REPO_DIR="$(dirname "${thisFile}")" + thisFile="$(readlink -f "${BASH_SOURCE[0]}")" + REPO_DIR="$(dirname "${thisFile}")" fi IGN_DIR="${REPO_DIR}/gitignore" @@ -22,14 +22,14 @@ test -v FUNC_EXIT_SUCCESS || readonly FUNC_EXIT_SUCCESS=9 # make paths if needed IGN_DIRS=( - "${TMP_DIR}" - "${DL_DIR}" - "${BUILD_DIR}" - "${CCACHE_DIR}" - "${DOCKER_DIR}" + "${TMP_DIR}" + "${DL_DIR}" + "${BUILD_DIR}" + "${CCACHE_DIR}" + "${DOCKER_DIR}" ) for dir in "${IGN_DIRS[@]}"; do - test -d "${dir}" || mkdir -p "${dir}" + test -d "${dir}" || mkdir -p "${dir}" done unset IGN_DIRS @@ -47,12 +47,12 @@ FB_COMPILE_OPTS_SET=0 SCRIPT_DIR="${REPO_DIR}/scripts" ENTRY_SCRIPT="${SCRIPT_DIR}/entry.sh" src_scripts() { - local SCRIPT_DIR="${REPO_DIR}/scripts" + local SCRIPT_DIR="${REPO_DIR}/scripts" - if [[ $FB_RUNNING_AS_SCRIPT -eq 0 ]]; then - rm "${SCRIPT_DIR}"/*.sh - # shellcheck disable=SC2016 - echo '#!/usr/bin/env bash + if [[ $FB_RUNNING_AS_SCRIPT -eq 0 ]]; then + rm "${SCRIPT_DIR}"/*.sh + # shellcheck disable=SC2016 + echo '#!/usr/bin/env bash export FB_RUNNING_AS_SCRIPT=1 thisFile="$(readlink -f "$0")" export REPO_DIR="$(cd "$(dirname "${thisFile}")/.." && echo "$PWD")" @@ -61,32 +61,32 @@ scr_name="$(bash_basename $0)" cmd="${scr_name//.sh/}" if [[ $DEBUG == 1 ]]; then set -x; fi $cmd "$@"' >"${ENTRY_SCRIPT}" - chmod +x "${ENTRY_SCRIPT}" - fi + chmod +x "${ENTRY_SCRIPT}" + fi - for script in "${REPO_DIR}/lib/"*.sh; do - # shellcheck disable=SC1090 - source "${script}" - done + for script in "${REPO_DIR}/lib/"*.sh; do + # shellcheck disable=SC1090 + source "${script}" + done } FB_FUNC_NAMES+=('print_cmds') FB_FUNC_DESCS['print_cmds']='print usable commands' print_cmds() { - echo -e "~~~ Usable Commands ~~~\n" - for funcName in "${FB_FUNC_NAMES[@]}"; do - color="${CYAN}" word="${funcName}:" echo_wrapper "\n\t${FB_FUNC_DESCS[${funcName}]}" - if [[ $FB_RUNNING_AS_SCRIPT -eq 0 ]]; then - (cd "$SCRIPT_DIR" && ln -sf entry.sh "${funcName}.sh") - fi - done - echo -e "\n" + echo -e "~~~ Usable Commands ~~~\n" + for funcName in "${FB_FUNC_NAMES[@]}"; do + color="${CYAN}" word="${funcName}:" echo_wrapper "\n\t${FB_FUNC_DESCS[${funcName}]}" + if [[ $FB_RUNNING_AS_SCRIPT -eq 0 ]]; then + (cd "$SCRIPT_DIR" && ln -sf entry.sh "${funcName}.sh") + fi + done + echo -e "\n" } set_completions() { - for funcName in "${FB_FUNC_NAMES[@]}"; do - complete -W "${FB_FUNC_COMPLETION[${funcName}]}" "${funcName}" - done + for funcName in "${FB_FUNC_NAMES[@]}"; do + complete -W "${FB_FUNC_COMPLETION[${funcName}]}" "${funcName}" + done } src_scripts || return 1 @@ -98,6 +98,6 @@ check_compile_opts_override || return 1 LOCAL_PREFIX="${IGN_DIR}/$(print_os)_sysroot" if [[ ${FB_RUNNING_AS_SCRIPT} -eq 0 ]]; then - print_cmds || return 1 + print_cmds || return 1 fi set_completions || return 1 diff --git a/shfmt-watcher.sh b/shfmt-watcher.sh index 637eb31..2fc1005 100755 --- a/shfmt-watcher.sh +++ b/shfmt-watcher.sh @@ -3,13 +3,13 @@ base="$(dirname "$(readlink -f "$0")")" inotifywait -m -r \ - -e close_write \ - -e moved_to \ - --format '%w%f' \ - "$base/lib" \ - "$base/scripts" \ - "$base/main.sh" | while read -r file; do - if [[ -f $file && $file =~ .sh ]]; then - shfmt --write --simplify "$file" - fi + -e close_write \ + -e moved_to \ + --format '%w%f' \ + "${base}/lib" \ + "${base}/scripts" \ + "${base}/main.sh" | while read -r file; do + if [[ -f $file && $file =~ .sh ]]; then + shfmt --indent 4 --write --simplify "${file}" + fi done