From 2b7faac58c5f214addf3403ee17ce0eec20568c6 Mon Sep 17 00:00:00 2001 From: Levon Gevorgyan Date: Sun, 27 Jul 2025 11:50:38 -0500 Subject: [PATCH] major rework --- cfgs/builds.json | 55 --- cfgs/compile_opts.json | 13 - ...od_implementation_of_sad_loop_kernel.patch | 156 +++++++ lib/build-docker-images.sh | 26 ++ lib/build.sh | 411 ++++++++++++++++++ lib/common.sh | 129 ++++++ lib/compile_opts.sh | 26 ++ lib/install_deps.sh | 132 ++++++ main.sh | 71 +-- scripts/build-docker-images.sh | 24 - scripts/build.sh | 410 +---------------- scripts/build_docker_images.sh | 1 + scripts/common.sh | 73 ---- scripts/do_build.sh | 1 + scripts/docker_build_images.sh | 1 + scripts/entry.sh | 7 + scripts/install_deps.sh | 194 +-------- scripts/print_cmds.sh | 1 + shfmt-watcher.sh | 12 + 19 files changed, 947 insertions(+), 796 deletions(-) delete mode 100644 cfgs/builds.json delete mode 100644 cfgs/compile_opts.json create mode 100644 fix_neon_dotprod_implementation_of_sad_loop_kernel.patch create mode 100755 lib/build-docker-images.sh create mode 100644 lib/build.sh create mode 100644 lib/common.sh create mode 100644 lib/compile_opts.sh create mode 100644 lib/install_deps.sh delete mode 100755 scripts/build-docker-images.sh mode change 100644 => 120000 scripts/build.sh create mode 120000 scripts/build_docker_images.sh delete mode 100644 scripts/common.sh create mode 120000 scripts/do_build.sh create mode 120000 scripts/docker_build_images.sh create mode 100755 scripts/entry.sh mode change 100644 => 120000 scripts/install_deps.sh create mode 120000 scripts/print_cmds.sh create mode 100755 shfmt-watcher.sh diff --git a/cfgs/builds.json b/cfgs/builds.json deleted file mode 100644 index c00c3d5..0000000 --- a/cfgs/builds.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "ffmpeg": { - "ver": "7cd1edeaa410d977a9f1ff8436f480cb45b80178", - "ext": "git", - "url": "https://github.com/FFmpeg/FFmpeg/" - }, - "hdr10plus_tool": { - "ver": "1.7.0", - "ext": "tar.gz", - "url": "https://github.com/quietvoid/hdr10plus_tool/archive/refs/tags/${ver}.${ext}" - }, - "dovi_tool": { - "ver": "2.2.0", - "ext": "tar.gz", - "url": "https://github.com/quietvoid/dovi_tool/archive/refs/tags/${ver}.${ext}" - }, - "libsvtav1": { - "ver": "3.0.2", - "ext": "tar.gz", - "url": "https://gitlab.com/AOMediaCodec/SVT-AV1/-/archive/v${ver}/SVT-AV1-v${ver}.${ext}" - }, - "libsvtav1_psy": { - "ver": "3.0.2", - "ext": "tar.gz", - "url": "https://github.com/psy-ex/svt-av1-psy/archive/refs/tags/v${ver}.${ext}", - "deps": [ - "dovi_tool", "hdr10plus_tool" - ] - }, - "librav1e": { - "ver": "0.7.1", - "ext": "tar.gz", - "url": "https://github.com/xiph/rav1e/archive/refs/tags/v${ver}.${ext}" - }, - "libaom": { - "ver": "3.12.1", - "ext": "tar.gz", - "url": "https://storage.googleapis.com/aom-releases/libaom-${ver}.${ext}" - }, - "libvmaf": { - "ver": "3.0.0", - "ext": "tar.gz", - "url": "https://github.com/Netflix/vmaf/archive/refs/tags/v${ver}.${ext}" - }, - "libopus": { - "ver": "1.5.2", - "ext": "tar.gz", - "url": "https://github.com/xiph/opus/releases/download/v${ver}/opus-${ver}.${ext}" - }, - "libdav1d": { - "ver": "1.5.0", - "ext": "tar.xz", - "url": "http://downloads.videolan.org/videolan/dav1d/${ver}/dav1d-${ver}.${ext}" - } -} diff --git a/cfgs/compile_opts.json b/cfgs/compile_opts.json deleted file mode 100644 index 15b579d..0000000 --- a/cfgs/compile_opts.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "clean": true, - "lto": false, - "optimization": 0, - "static": true, - "shared": false, - "cpu": "native", - "arch": "native", - "target_windows": false, - "ffmpeg_enable": [ - "libopus", "libdav1d", "libsvtav1_psy" - ] -} \ No newline at end of file diff --git a/fix_neon_dotprod_implementation_of_sad_loop_kernel.patch b/fix_neon_dotprod_implementation_of_sad_loop_kernel.patch new file mode 100644 index 0000000..005a969 --- /dev/null +++ b/fix_neon_dotprod_implementation_of_sad_loop_kernel.patch @@ -0,0 +1,156 @@ +commit 5def505f7f193d890be61e869831378f212a07bd +Author: Salome Thirot +Date: Fri May 2 11:20:54 2025 +0100 + + Fix Neon Dotprod implementation of sad_loop_kernel + + search_area_width and search_area_height can sometimes be 0, so replace + all the do while loops with for loops. + +diff --git a/Source/Lib/ASM_NEON_DOTPROD/compute_sad_neon_dotprod.c b/Source/Lib/ASM_NEON_DOTPROD/compute_sad_neon_dotprod.c +index c116037..15d3da6 100644 +--- a/Source/Lib/ASM_NEON_DOTPROD/compute_sad_neon_dotprod.c ++++ b/Source/Lib/ASM_NEON_DOTPROD/compute_sad_neon_dotprod.c +@@ -237,10 +237,8 @@ static inline void svt_sad_loop_kernel16xh_neon_dotprod(uint8_t *src, uint32_t s + y_search_step = 2; + } + +- int y_search_index = y_search_start; +- do { +- int x_search_index = 0; +- do { ++ for (int y_search_index = y_search_start; y_search_index < search_area_height; y_search_index += y_search_step) { ++ for (int x_search_index = 0; x_search_index < search_area_width; x_search_index += 8) { + /* Get the SAD of 8 search spaces aligned along the width and store it in 'sad4'. */ + uint32x4_t sad4_0 = sad16xhx4d_neon_dotprod( + src, src_stride, ref + x_search_index, ref_stride, block_height); +@@ -248,13 +246,10 @@ static inline void svt_sad_loop_kernel16xh_neon_dotprod(uint8_t *src, uint32_t s + src, src_stride, ref + x_search_index + 4, ref_stride, block_height); + update_best_sad_u32(sad4_0, best_sad, x_search_center, y_search_center, x_search_index, y_search_index); + update_best_sad_u32(sad4_1, best_sad, x_search_center, y_search_center, x_search_index + 4, y_search_index); +- +- x_search_index += 8; +- } while (x_search_index != search_area_width); ++ } + + ref += src_stride_raw; +- y_search_index += y_search_step; +- } while (y_search_index < search_area_height); ++ } + } + + static inline void svt_sad_loop_kernel16xh_small_neon_dotprod(uint8_t *src, uint32_t src_stride, uint8_t *ref, +@@ -273,8 +268,7 @@ static inline void svt_sad_loop_kernel16xh_small_neon_dotprod(uint8_t *src, uint + y_search_step = 2; + } + +- int y_search_index = y_search_start; +- do { ++ for (int y_search_index = y_search_start; y_search_index < search_area_height; y_search_index += y_search_step) { + int x_search_index; + for (x_search_index = 0; x_search_index <= search_area_width - 4; x_search_index += 4) { + /* Get the SAD of 4 search spaces aligned along the width and store it in 'sad4'. */ +@@ -289,8 +283,7 @@ static inline void svt_sad_loop_kernel16xh_small_neon_dotprod(uint8_t *src, uint + } + + ref += src_stride_raw; +- y_search_index += y_search_step; +- } while (y_search_index < search_area_height); ++ } + } + + static inline void svt_sad_loop_kernel32xh_neon_dotprod(uint8_t *src, uint32_t src_stride, uint8_t *ref, +@@ -298,10 +291,8 @@ static inline void svt_sad_loop_kernel32xh_neon_dotprod(uint8_t *src, uint32_t s + int16_t *x_search_center, int16_t *y_search_center, + uint32_t src_stride_raw, int16_t search_area_width, + int16_t search_area_height) { +- int y_search_index = 0; +- do { +- int x_search_index = 0; +- do { ++ for (int y_search_index = 0; y_search_index < search_area_height; y_search_index++) { ++ for (int x_search_index = 0; x_search_index < search_area_width; x_search_index += 8) { + /* Get the SAD of 4 search spaces aligned along the width and store it in 'sad4'. */ + uint32x4_t sad4_0 = sad32xhx4d_neon_dotprod( + src, src_stride, ref + x_search_index, ref_stride, block_height); +@@ -309,11 +300,10 @@ static inline void svt_sad_loop_kernel32xh_neon_dotprod(uint8_t *src, uint32_t s + src, src_stride, ref + x_search_index + 4, ref_stride, block_height); + update_best_sad_u32(sad4_0, best_sad, x_search_center, y_search_center, x_search_index, y_search_index); + update_best_sad_u32(sad4_1, best_sad, x_search_center, y_search_center, x_search_index + 4, y_search_index); +- x_search_index += 8; +- } while (x_search_index != search_area_width); ++ } + + ref += src_stride_raw; +- } while (++y_search_index != search_area_height); ++ } + } + + static inline void svt_sad_loop_kernel32xh_small_neon_dotprod(uint8_t *src, uint32_t src_stride, uint8_t *ref, +@@ -321,8 +311,7 @@ static inline void svt_sad_loop_kernel32xh_small_neon_dotprod(uint8_t *src, uint + uint64_t *best_sad, int16_t *x_search_center, + int16_t *y_search_center, uint32_t src_stride_raw, + int16_t search_area_width, int16_t search_area_height) { +- int y_search_index = 0; +- do { ++ for (int y_search_index = 0; y_search_index < search_area_height; y_search_index++) { + int x_search_index; + for (x_search_index = 0; x_search_index <= search_area_width - 4; x_search_index += 4) { + /* Get the SAD of 4 search spaces aligned along the width and store it in 'sad4'. */ +@@ -337,7 +326,7 @@ static inline void svt_sad_loop_kernel32xh_small_neon_dotprod(uint8_t *src, uint + } + + ref += src_stride_raw; +- } while (++y_search_index != search_area_height); ++ } + } + + static inline void svt_sad_loop_kernel64xh_neon_dotprod(uint8_t *src, uint32_t src_stride, uint8_t *ref, +@@ -345,10 +334,8 @@ static inline void svt_sad_loop_kernel64xh_neon_dotprod(uint8_t *src, uint32_t s + int16_t *x_search_center, int16_t *y_search_center, + uint32_t src_stride_raw, int16_t search_area_width, + int16_t search_area_height) { +- int y_search_index = 0; +- do { +- int x_search_index = 0; +- do { ++ for (int y_search_index = 0; y_search_index < search_area_height; y_search_index++) { ++ for (int x_search_index = 0; x_search_index < search_area_width; x_search_index += 8) { + /* Get the SAD of 4 search spaces aligned along the width and store it in 'sad4'. */ + uint32x4_t sad4_0 = sad64xhx4d_neon_dotprod( + src, src_stride, ref + x_search_index, ref_stride, block_height); +@@ -356,11 +343,10 @@ static inline void svt_sad_loop_kernel64xh_neon_dotprod(uint8_t *src, uint32_t s + src, src_stride, ref + x_search_index + 4, ref_stride, block_height); + update_best_sad_u32(sad4_0, best_sad, x_search_center, y_search_center, x_search_index, y_search_index); + update_best_sad_u32(sad4_1, best_sad, x_search_center, y_search_center, x_search_index + 4, y_search_index); ++ } + +- x_search_index += 8; +- } while (x_search_index != search_area_width); + ref += src_stride_raw; +- } while (++y_search_index != search_area_height); ++ } + } + + static inline void svt_sad_loop_kernel64xh_small_neon_dotprod(uint8_t *src, uint32_t src_stride, uint8_t *ref, +@@ -368,8 +354,7 @@ static inline void svt_sad_loop_kernel64xh_small_neon_dotprod(uint8_t *src, uint + uint64_t *best_sad, int16_t *x_search_center, + int16_t *y_search_center, uint32_t src_stride_raw, + int16_t search_area_width, int16_t search_area_height) { +- int y_search_index = 0; +- do { ++ for (int y_search_index = 0; y_search_index < search_area_height; y_search_index++) { + int x_search_index; + for (x_search_index = 0; x_search_index <= search_area_width - 4; x_search_index += 4) { + /* Get the SAD of 4 search spaces aligned along the width and store it in 'sad4'. */ +@@ -382,8 +367,9 @@ static inline void svt_sad_loop_kernel64xh_small_neon_dotprod(uint8_t *src, uint + uint64_t temp_sad = sad64xh_neon_dotprod(src, src_stride, ref + x_search_index, ref_stride, block_height); + update_best_sad(temp_sad, best_sad, x_search_center, y_search_center, x_search_index, y_search_index); + } ++ + ref += src_stride_raw; +- } while (++y_search_index != search_area_height); ++ } + } + + void svt_sad_loop_kernel_neon_dotprod(uint8_t *src, uint32_t src_stride, uint8_t *ref, uint32_t ref_stride, diff --git a/lib/build-docker-images.sh b/lib/build-docker-images.sh new file mode 100755 index 0000000..e2ebc4a --- /dev/null +++ b/lib/build-docker-images.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +FB_FUNC_NAMES+=('docker_build_images') +# FB_FUNC_DESCS used externally +# shellcheck disable=SC2034 +FB_FUNC_DESCS['docker_build_images']='build docker images with required dependencies pre-installed' +docker_build_images() { + DISTROS=(debian ubuntu archlinux fedora) + DOCKERFILE_DIR="${IGN_DIR}/Dockerfiles" + test -d "${DOCKERFILE_DIR}" || mkdir -p "${DOCKERFILE_DIR}" + for distro in "${DISTROS[@]}"; do + echo "\ +FROM ${distro} +COPY scripts/ /ffmpeg-builder/scripts/ +COPY main.sh /ffmpeg-builder/ +RUN bash -c 'source /ffmpeg-builder/main.sh ; install_deps' || exit 1" \ + >"${DOCKERFILE_DIR}/Dockerfile_${distro}" + image_tag="ffmpeg_builder_${distro}" + dockerfile="Dockerfile_${distro}" + echo_info "building ${image_tag}" + docker build \ + -t "${image_tag}" \ + -f "${DOCKERFILE_DIR}/${dockerfile}" \ + "${REPO_DIR}/" + done +} diff --git a/lib/build.sh b/lib/build.sh new file mode 100644 index 0000000..1a4c238 --- /dev/null +++ b/lib/build.sh @@ -0,0 +1,411 @@ +#!/usr/bin/env bash + +set_compile_opts() { + unset LDFLAGS C_FLAGS CXX_FLAGS CPP_FLAGS \ + CONFIGURE_FLAGS MESON_FLAGS \ + RUSTFLAGS CMAKE_FLAGS \ + FFMPEG_EXTRA_FLAGS + export LDFLAGS C_FLAGS CXX_FLAGS CPP_FLAGS \ + CONFIGURE_FLAGS MESON_FLAGS \ + RUSTFLAGS CMAKE_FLAGS \ + FFMPEG_EXTRA_FLAGS + + # set job count for all builds + JOBS="$(nproc)" + + MACHINE="$(cc -dumpmachine)" + test "${MACHINE}" == '' && return 1 + MACHINE_LIB="${PREFIX}/lib/${MACHINE}" + + # set prefix flags + CONFIGURE_FLAGS+=("--prefix=${PREFIX}") + MESON_FLAGS+=("--prefix" "${PREFIX}") + CMAKE_FLAGS+=("-DCMAKE_PREFIX_PATH=${PREFIX}") + CMAKE_FLAGS+=("-DCMAKE_INSTALL_PREFIX=${PREFIX}") + PKG_CONFIG_PATH="${PREFIX}/lib/pkgconfig" + PKG_CONFIG_PATH+=":${MACHINE_LIB}/pkgconfig" + export PKG_CONFIG_PATH + export PKG_CONFIG_DEBUG_SPEW=1 + + # add prefix include + C_FLAGS+=("-I${PREFIX}/include") + + # enabling a clean build + if [[ ${CLEAN} == 'true' ]]; then + CLEAN="${SUDO} rm -rf" + echo_info "performing clean build" + else + CLEAN='void' + fi + + # enabling link-time optimization + # shellcheck disable=SC2034 + unset LTO_SWITCH LTO_FLAG LTO_BOOL + export LTO_SWITCH LTO_FLAG LTO_BOOL + if [[ ${LTO} == 'true' ]]; then + echo_info "building with LTO" + LTO_SWITCH='ON' + LTO_FLAG='-flto' + C_FLAGS+=("${LTO_FLAG}") + CONFIGURE_FLAGS+=('--enable-lto') + MESON_FLAGS+=("-Db_lto=true") + RUSTFLAGS+=("-C lto=yes" "-C inline-threshold=1000" "-C codegen-units=1") + else + echo_info "building without LTO" + LTO_SWITCH='OFF' + LTO_FLAG='' + MESON_FLAGS+=("-Db_lto=false") + RUSTFLAGS+=("-C lto=no") + fi + + # setting optimization level + if [[ ${OPT_LVL} == '' ]]; then + OPT_LVL='0' + fi + C_FLAGS+=("-O${OPT_LVL}") + RUSTFLAGS+=("-C opt-level=${OPT_LVL}") + MESON_FLAGS+=("--optimization=${OPT_LVL}") + echo_info "building with optimization: ${OPT_LVL}" + + # static/shared linking + unset PKG_CFG_FLAGS LIB_SUFF + export PKG_CFG_FLAGS LIB_SUFF + if [[ ${STATIC} == true ]]; then + LDFLAGS+=('-static') + CONFIGURE_FLAGS+=('--enable-static') + CMAKE_FLAGS+=("-DBUILD_SHARED_LIBS=OFF") + MESON_FLAGS+=('--default-library=static') + CMAKE_FLAGS+=("-DCMAKE_EXE_LINKER_FLAGS='-static'") + PKG_CFG_FLAGS='--static' + LIB_SUFF='a' + else + LDFLAGS+=("-Wl,-rpath,${MACHINE_LIB}") + CONFIGURE_FLAGS+=('--enable-shared') + CMAKE_FLAGS+=("-DBUILD_SHARED_LIBS=ON") + CMAKE_FLAGS+=("-DCMAKE_INSTALL_RPATH=${PREFIX}/lib;${MACHINE_LIB}") + FFMPEG_EXTRA_FLAGS+=('--enable-rpath') + LIB_SUFF='so' + fi + + # architecture/cpu compile flags + # arm prefers -mcpu over -march + # https://community.arm.com/arm-community-blogs/b/tools-software-ides-blog/posts/compiler-flags-across-architectures-march-mtune-and-mcpu + arch_flags="" + test_arch="$(uname -m)" + if [[ ${test_arch} == "x86_64" ]]; then + arch_flags="-march=${CPU}" + elif [[ ${test_arch} == "aarch64" || + ${test_arch} == "arm64" ]]; then + arch_flags="-mcpu=${CPU}" + fi + + C_FLAGS+=("${arch_flags}") + CXX_FLAGS=("${C_FLAGS[@]}") + CPP_FLAGS=("${C_FLAGS[@]}") + RUSTFLAGS+=("-C target-cpu=${CPU}") + CMAKE_FLAGS+=("-DCMAKE_C_FLAGS='${C_FLAGS[*]}'") + CMAKE_FLAGS+=("-DCMAKE_CXX_FLAGS='${CXX_FLAGS[*]}'") + MESON_FLAGS+=("-Dc_args=${C_FLAGS[*]}" "-Dcpp_args=${CPP_FLAGS[*]}") + dump_arr CONFIGURE_FLAGS + dump_arr C_FLAGS + dump_arr RUSTFLAGS + dump_arr CMAKE_FLAGS + dump_arr MESON_FLAGS + dump_arr PKG_CFG_FLAGS + + # extra ffmpeg flags + FFMPEG_EXTRA_FLAGS+=( + --extra-cflags="${C_FLAGS[*]}" + --extra-cxxflags="${CXX_FLAGS[*]}" + --extra-ldflags="${LDFLAGS[*]}" + ) + # dump_arr FFMPEG_EXTRA_FLAGS + + # shellcheck disable=SC2178 + RUSTFLAGS="${RUSTFLAGS[*]}" + + # make sure RUSTUP_HOME and CARGO_HOME are defined + RUSTUP_HOME="${RUSTUP_HOME:-"${HOME}/.rustup"}" + CARGO_HOME="${CARGO_HOME:-"${HOME}/.rustup"}" + test -d "${RUSTUP_HOME}" || echo_exit "RUSTUP_HOME does not exist" + test -d "${CARGO_HOME}" || echo_exit "CARGO_HOME does not exist" + export RUSTUP_HOME CARGO_HOME + + # cargo does not have an easy way to install into system directories + unset SUDO_CARGO + if [[ ${SUDO} != '' ]]; then + export SUDO_CARGO="${SUDO} --preserve-env=PATH,RUSTUP_HOME,CARGO_HOME" + fi + echo +} + +get_build_conf() { + local getBuild="${1}" + + # name version file-extension url dep1,dep2 + # shellcheck disable=SC2016 + local BUILDS_CONF=' +ffmpeg 7cd1edeaa410d977a9f1ff8436f480cb45b80178 git https://github.com/FFmpeg/FFmpeg/ +hdr10plus_tool 1.7.0 tar.gz https://github.com/quietvoid/hdr10plus_tool/archive/refs/tags/${ver}.${ext} +dovi_tool 2.2.0 tar.gz https://github.com/quietvoid/dovi_tool/archive/refs/tags/${ver}.${ext} +libsvtav1 3.0.2 tar.gz https://gitlab.com/AOMediaCodec/SVT-AV1/-/archive/v${ver}/SVT-AV1-v${ver}.${ext} +libsvtav1_psy 3.0.2 tar.gz https://github.com/psy-ex/svt-av1-psy/archive/refs/tags/v${ver}.${ext} dovi_tool,hdr10plus_tool +librav1e 0.7.1 tar.gz https://github.com/xiph/rav1e/archive/refs/tags/v${ver}.${ext} +libaom 3.12.1 tar.gz https://storage.googleapis.com/aom-releases/libaom-${ver}.${ext} +libvmaf 3.0.0 tar.gz https://github.com/Netflix/vmaf/archive/refs/tags/v${ver}.${ext} +libopus 1.5.2 tar.gz https://github.com/xiph/opus/releases/download/v${ver}/opus-${ver}.${ext} +libdav1d 1.5.0 tar.xz http://downloads.videolan.org/videolan/dav1d/${ver}/dav1d-${ver}.${ext} +' + local supported_builds=() + unset ver ext url deps extracted_dir + while read -r line; do + test "${line}" == '' && continue + IFS=' ' read -r build ver ext url deps <<<"${line}" + supported_builds+=("${build}") + if [[ ${getBuild} != "${build}" ]]; then + build='' + continue + fi + break + done <<<"${BUILDS_CONF}" + + if [[ ${getBuild} == 'supported' ]]; then + echo "${supported_builds[@]}" + return 0 + fi + + if [[ ${build} == '' ]]; then + echo_fail "build ${getBuild} is not supported" + return 1 + fi + + # url uses ver and extension + eval "url=\"$url\"" + # set dependencies array + # shellcheck disable=SC2206 + deps=(${deps//,/ }) + # set extracted directory + extracted_dir="${BUILD_DIR}/${build}-v${ver}" + + return 0 +} + +download_release() { + local build="${1}" + # set env for wget download + get_build_conf "${build}" || return 1 + local base_path="${build}-v${ver}" + local base_dl_path="${DL_DIR}/${base_path}" + + # remove other versions of a download + for wrong_ver_dl in "${DL_DIR}/${build}-v"*; do + if [[ ${wrong_ver_dl} =~ ${base_path} ]]; then + continue + fi + test -f "${wrong_ver_dl}" || continue + echo_warn "removing wrong version: ${wrong_ver_dl}" + rm -rf "${wrong_ver_dl}" + done + # remove other versions of a build + for wrong_ver_build in "${BUILD_DIR}/${build}-v"*; do + if [[ ${wrong_ver_build} =~ ${base_path} ]]; then + continue + fi + test -d "${wrong_ver_build}" || continue + echo_warn "removing wrong version: ${extracted_dir}" + rm -rf "${wrong_ver_build}" + done + + # create new build dir for clean builds + test -d "${extracted_dir}" && + ${CLEAN} "${extracted_dir}" + + if test "${ext}" != "git"; then + wget_out="${base_dl_path}.${ext}" + + # download archive if not present + if ! test -f "${wget_out}"; then + echo_info "downloading ${build}" + echo_if_fail wget "${url}" -O "${wget_out}" + fi + + # create new build directory + test -d "${extracted_dir}" || + { + mkdir "${extracted_dir}" + tar -xf "${wget_out}" --strip-components=1 -C "${extracted_dir}" + } + else + # for git downloads + test -d "${base_dl_path}" || + git clone "${url}" "${base_dl_path}" || return 1 + + # create new build directory + test -d "${extracted_dir}" || + cp -r "${base_dl_path}" "${extracted_dir}" || return 1 + fi +} + +FB_FUNC_NAMES+=('do_build') +# shellcheck disable=SC2034 +FB_FUNC_DESCS['do_build']='build a specific project' +# shellcheck disable=SC2034 +FB_FUNC_COMPLETION['do_build']="$(get_build_conf supported)" +do_build() { + local build="${1:-''}" + download_release "${build}" || return 1 + get_build_conf "${build}" || return 1 + for dep in "${deps[@]}"; do + do_build "${dep}" || return 1 + done + get_build_conf "${build}" || return 1 + echo_info "building ${build}" + pushd "$extracted_dir" >/dev/null || return 1 + echo_if_fail build_"${build}" + retval=$? + popd >/dev/null || return 1 + test ${retval} -eq 0 || return ${retval} + echo_pass "built ${build}" +} + +FB_FUNC_NAMES+=('build') +# shellcheck disable=SC2034 +FB_FUNC_DESCS['build']='build ffmpeg with desired configuration' +build() { + test -d "${DL_DIR}" || mkdir -p "${DL_DIR}" + test -d "${CCACHE_DIR}" || mkdir -p "${CCACHE_DIR}" + test -d "${BUILD_DIR}" || mkdir -p "${BUILD_DIR}" + test -d "${PREFIX}/bin/" || mkdir -p "${PREFIX}/bin/" + + testfile="${PREFIX}/ffmpeg-build-testfile" + if ! touch "${testfile}" 2>/dev/null; then + # we cannot modify the install prefix + # so we need to use sudo + ${SUDO} mkdir -p "${PREFIX}/bin/" + fi + test -f "${testfile}" && ${SUDO} rm "${testfile}" + + for build in "${FFMPEG_ENABLES[@]}"; do + do_build "${build}" || return 1 + done + do_build "ffmpeg" || return 1 + + return 0 +} + +build_hdr10plus_tool() { + ccache cargo build --release || return 1 + ${SUDO} cp target/release/hdr10plus_tool "${PREFIX}/bin/" || return 1 + + # build libhdr10plus + cd hdr10plus || return 1 + ccache cargo cbuild --release || return 1 + ${SUDO_CARGO} bash -lc "cargo cinstall --prefix=${PREFIX} --release" || return 1 +} + +build_dovi_tool() { + ccache cargo build --release || return 1 + ${SUDO} cp target/release/dovi_tool "${PREFIX}/bin/" || return 1 + + # build libdovi + cd dolby_vision || return 1 + ccache cargo cbuild --release || return 1 + ${SUDO_CARGO} bash -lc "cargo cinstall --prefix=${PREFIX} --release" || return 1 +} + +build_libsvtav1() { + cmake \ + "${CMAKE_FLAGS[@]}" \ + -DSVT_AV1_LTO="${LTO_SWITCH}" \ + -DENABLE_AVX512=ON \ + -DBUILD_TESTING=OFF \ + -DCOVERAGE=OFF || return 1 + ccache make -j"${JOBS}" || return 1 + ${SUDO} make -j"${JOBS}" install || return 1 +} + +build_libsvtav1_psy() { + cmake \ + "${CMAKE_FLAGS[@]}" \ + -DSVT_AV1_LTO="${LTO_SWITCH}" \ + -DBUILD_TESTING=OFF \ + -DENABLE_AVX512=ON \ + -DCOVERAGE=OFF \ + -DLIBDOVI_FOUND=1 \ + -DLIBHDR10PLUS_RS_FOUND=1 \ + -DLIBHDR10PLUS_RS_LIBRARY="${MACHINE_LIB}/libhdr10plus-rs.${LIB_SUFF}" \ + -DLIBDOVI_LIBRARY="${MACHINE_LIB}/libdovi.${LIB_SUFF}" || return 1 + ccache make -j"${JOBS}" || return 1 + ${SUDO} make -j"${JOBS}" install || return 1 +} + +build_librav1e() { + ccache cargo build --release || return 1 + ${SUDO} cp target/release/rav1e "${PREFIX}/bin/" || return 1 + + ccache cargo cbuild --release || return 1 + ${SUDO_CARGO} bash -lc "cargo cinstall --prefix=${PREFIX} --release" || return 1 +} + +build_libaom() { + cmake \ + "${CMAKE_FLAGS[@]}" \ + -B build.user \ + -DENABLE_TESTS=OFF || return 1 + cd build.user || return 1 + ccache make -j"${JOBS}" || return 1 + ${SUDO} make -j"${JOBS}" install || return 1 +} + +build_libopus() { + ./configure \ + "${CONFIGURE_FLAGS[@]}" \ + --disable-doc || return 1 + ccache make -j"${JOBS}" || return 1 + ${SUDO} make -j"${JOBS}" install || return 1 + return 0 +} + +build_libdav1d() { + meson \ + setup . build.user \ + "${MESON_FLAGS[@]}" || return 1 + ccache ninja -vC build.user || return 1 + ${SUDO} ninja -vC build.user install || return 1 +} + +build_libvmaf() { + cd libvmaf || return 1 + python3 -m virtualenv .venv + ( + source .venv/bin/activate + meson \ + setup . build.user \ + "${MESON_FLAGS[@]}" \ + -Denable_float=true || exit 1 + ccache ninja -vC build.user || exit 1 + ${SUDO} ninja -vC build.user install || exit 1 + ) || return 1 +} + +build_ffmpeg() { + for enable in "${FFMPEG_ENABLES[@]}"; do + test "${enable}" == 'libsvtav1_psy' && enable='libsvtav1' + CONFIGURE_FLAGS+=("--enable-${enable}") + done + ./configure \ + "${CONFIGURE_FLAGS[@]}" \ + "${FFMPEG_EXTRA_FLAGS[@]}" \ + --pkg-config='pkg-config' \ + --pkg-config-flags="${PKG_CFG_FLAGS}" \ + --cpu="${CPU}" --arch="${ARCH}" \ + --enable-gpl --enable-version3 \ + --enable-nonfree \ + --disable-htmlpages \ + --disable-podpages \ + --disable-txtpages \ + --disable-autodetect || return 1 + ccache make -j"${JOBS}" || return 1 + ${SUDO} make -j"${JOBS}" install || return 1 + return 0 +} diff --git a/lib/common.sh b/lib/common.sh new file mode 100644 index 0000000..eef3c1b --- /dev/null +++ b/lib/common.sh @@ -0,0 +1,129 @@ +#!/usr/bin/env bash + +# shellcheck disable=SC2034 + +# ANSI colors +RED='\e[0;31m' +CYAN='\e[0;36m' +GREEN='\e[0;32m' +YELLOW='\e[0;33m' +NC='\e[0m' + +# echo wrappers +echo_fail() { echo -e "${RED}FAIL${NC}:" "$@"; } +echo_info() { echo -e "${CYAN}INFO${NC}:" "$@"; } +echo_pass() { echo -e "${GREEN}PASS${NC}:" "$@"; } +echo_warn() { echo -e "${YELLOW}WARN${NC}:" "$@"; } +echo_exit() { + echo_fail "$@" + exit 1 +} +void() { echo "$@" >/dev/null; } + +echo_if_fail() { + local cmd=("$@") + local out="${TMP_DIR}/.stdout-${RANDOM}" + local err="${TMP_DIR}/.stderr-${RANDOM}" + test -d "${TMP_DIR}" || mkdir -p "${TMP_DIR}" + + # set trace to the cmdEvalTrace and open file descriptor + local cmdEvalTrace="${TMP_DIR}/.cmdEvalTrace-${RANDOM}" + exec 5>"${cmdEvalTrace}" + export BASH_XTRACEFD=5 + + set -x + "${cmd[@]}" >"${out}" 2>"${err}" + local retval=$? + + # unset and close file descriptor + set +x + exec 5>&- + + # parse out relevant part of the trace + local cmdEvalLines=() + cmd=() + while IFS= read -r line; do + cmdEvalLines+=("${line}") + done <"${cmdEvalTrace}" + local cmdEvalLineNum=${#cmdEvalLines[@]} + for ((i = 1; i < cmdEvalLineNum - 2; i++)); do + local trimmedCmd="${cmdEvalLines[${i}]}" + trimmedCmd="${trimmedCmd/+ /}" + cmd+=("${trimmedCmd}") + done + + if ! test ${retval} -eq 0; then + echo + echo_fail "command failed:" + printf "%s\n" "${cmd[@]}" + echo_warn "command output:" + tail -n 10 "${out}" + tail -n 10 "${err}" + echo + fi + rm "${out}" "${err}" "${cmdEvalTrace}" + return ${retval} +} + +dump_arr() { + arr_name="$1" + declare -n arr + arr="${arr_name}" + arr_exp=("${arr[@]}") + test "${#arr_exp}" -gt 0 || return 0 + echo + echo_info "${arr_name}" + printf "\t%s\n" "${arr_exp[@]}" +} + +has_cmd() { + local cmd="$1" + command -v "${cmd}" >/dev/null 2>&1 +} + +missing_cmd() { + local cmd="$1" + rv=1 + if ! has_cmd "${cmd}"; then + echo_warn "missing ${cmd}" + rv=0 + fi + return $rv +} + +bash_dirname() { + local tmp=${1:-.} + + [[ $tmp != *[!/]* ]] && { + printf '/\n' + return + } + + tmp=${tmp%%"${tmp##*[!/]}"} + + [[ $tmp != */* ]] && { + printf '.\n' + return + } + + tmp=${tmp%/*} + tmp=${tmp%%"${tmp##*[!/]}"} + + printf '%s\n' "${tmp:-/}" +} + +bash_basename() { + local tmp + path="$1" + suffix="${2:-''}" + + tmp=${path%"${path##*[!/]}"} + tmp=${tmp##*/} + tmp=${tmp%"${suffix/"$tmp"/}"} + + printf '%s\n' "${tmp:-/}" +} + +fb_running_as_script() { + return "$FB_RUNNING_AS_SCRIPT" +} diff --git a/lib/compile_opts.sh b/lib/compile_opts.sh new file mode 100644 index 0000000..89d8b61 --- /dev/null +++ b/lib/compile_opts.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +# variables used externally +# shellcheck disable=SC2034 + +# complete clean before building +CLEAN=true +# enable link time optimization +LTO=false +# optimization level (0-3) +OPT_LVL=0 +# static or shared build +STATIC=true +# CPU type (x86_v{1,2,3}...) +CPU=native +# architecture type +ARCH=native +# prefix to install, leave empty for non-system install (local) +PREFIX='' +# configure what ffmpeg enables +FFMPEG_ENABLES=( + libopus + libdav1d + libsvtav1_psy + # libaom +) diff --git a/lib/install_deps.sh b/lib/install_deps.sh new file mode 100644 index 0000000..2c9fa71 --- /dev/null +++ b/lib/install_deps.sh @@ -0,0 +1,132 @@ +#!/usr/bin/env bash + +determine_pkg_mgr() { + # sudo used externally + # shellcheck disable=SC2034 + test "$(id -u)" -eq 0 && SUDO='' || SUDO=sudo + + # pkg-mgr update-cmd install-cmd check-cmd + # shellcheck disable=SC2016 + local PKG_MGR_MAP=' +brew:brew update:brew install:brew list --formula ${pkg} +apt-get:${SUDO} apt-get update: ${SUDO} apt-get install -y:dpkg -l ${pkg} +pacman:${SUDO} pacman -Syy:${SUDO} pacman -S --noconfirm --needed:pacman -Qi ${pkg} +dnf:${SUDO} dnf check-update:${SUDO} dnf install -y:dnf list -q --installed ${pkg} +' + local supported_pkg_mgr=() + unset pkg_mgr pkg_mgr_update pkg_install pkg_check + while read -r line; do + test "${line}" == '' && continue + IFS=':' read -r pkg_mgr pkg_mgr_update pkg_install pkg_check <<<"${line}" + supported_pkg_mgr+=("${pkg_mgr}") + if ! has_cmd "${pkg_mgr}"; then + pkg_mgr='' + continue + fi + # update/install may use SUDO + eval "pkg_mgr_update=\"${pkg_mgr_update}\"" + eval "pkg_install=\"${pkg_install}\"" + break + done <<<"${PKG_MGR_MAP}" + + if [[ ${pkg_mgr} == '' ]]; then + echo_fail "system does not use a supported package manager" "${supported_pkg_mgr[@]}" + return 1 + fi + + return 0 +} + +check_for_req_pkgs() { + echo_info "checking for required packages" + local common_pkgs=( + autoconf automake cmake libtool + texinfo nasm yasm python3 + meson doxygen jq ccache gawk + ) + # shellcheck disable=SC2034 + local brew_pkgs=( + "${common_pkgs[@]}" pkgconf + mkvtoolnix pipx wget + ) + local common_linux_pkgs=( + "${common_pkgs[@]}" clang valgrind + curl bc lshw xxd pkgconf + ) + # shellcheck disable=SC2034 + local apt_get_pkgs=( + "${common_linux_pkgs[@]}" build-essential + git-core libass-dev libfreetype6-dev + libsdl2-dev libva-dev libvdpau-dev + libvorbis-dev libxcb1-dev pipx + libxcb-shm0-dev libxcb-xfixes0-dev + zlib1g-dev libssl-dev ninja-build + gobjc++ mawk libnuma-dev wget + mediainfo mkvtoolnix libgtest-dev + ) + # shellcheck disable=SC2034 + local pacman_pkgs=( + "${common_linux_pkgs[@]}" base-devel + python-pipx ninja wget + ) + # shellcheck disable=SC2034 + local dnf_pkgs=( + "${common_linux_pkgs[@]}" openssl-devel + pipx ninja-build wget2 + ) + + local req_pkgs_env_name="${pkg_mgr/-/_}_pkgs" + declare -n req_pkgs="${req_pkgs_env_name}" + local missing_pkgs=() + for pkg in "${req_pkgs[@]}"; do + # pkg_check has ${pkg} unexpanded + eval "pkg_check=\"${pkg_check}\"" + ${pkg_check} "${pkg}" >/dev/null 2>&1 || missing_pkgs+=("${pkg}") + done + + if [[ ${#missing_pkgs[@]} -gt 0 ]]; then + echo_warn "missing packages:" "${missing_pkgs[@]}" + # shellcheck disable=SC2086 + ${pkg_mgr_update} + # shellcheck disable=SC2086 + ${pkg_install} "${missing_pkgs[@]}" || return 1 + fi + + echo_pass "packages from ${pkg_mgr} installed" + echo_if_fail pipx install virtualenv || return 1 + echo_if_fail pipx ensurepath || return 1 + echo_pass "pipx is installed" + + # shellcheck disable=SC1091 + test -f "${HOME}/.cargo/env" && source "${HOME}/.cargo/env" + + if missing_cmd cargo; then + if missing_cmd rustup; then + echo_warn "installing rustup" + curl https://sh.rustup.rs -sSf | sh -s -- -y + # shellcheck disable=SC2016 + grep -q 'source "${HOME}/.cargo/env"' "${HOME}/.bashrc" || + echo 'source "${HOME}/.cargo/env"' >>"${HOME}/.bashrc" + # shellcheck disable=SC1091 + source "${HOME}/.bashrc" + fi + fi + + echo_if_fail rustup default stable || return 1 + echo_if_fail rustup update stable || return 1 + echo_pass "rustup is installed" + echo_if_fail cargo install cargo-c || return 1 + echo_pass "cargo-c is installed" + echo_pass "all required packages installed" + + return 0 +} + +FB_FUNC_NAMES+=('install_deps') +# FB_FUNC_DESCS used externally +# shellcheck disable=SC2034 +FB_FUNC_DESCS['install_deps']='install required dependencies' +install_deps() { + determine_pkg_mgr || return 1 + check_for_req_pkgs || return 1 +} diff --git a/main.sh b/main.sh index 448e8cf..375508d 100755 --- a/main.sh +++ b/main.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -REPO_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +REPO_DIR="$(cd "${BASH_SOURCE[0]//'main.sh'/}" && echo "$PWD")" IGN_DIR="${REPO_DIR}/gitignore" TMP_DIR="${IGN_DIR}/tmp" DL_DIR="${IGN_DIR}/downloads" @@ -13,51 +13,64 @@ FB_FUNC_NAMES=() declare -A FB_FUNC_DESCS declare -A FB_FUNC_COMPLETION -BUILD_CFG="${REPO_DIR}/"cfgs/builds.json -COMPILE_CFG="${REPO_DIR}/"cfgs/compile_opts.json -mapfile -t BUILDS < <(jq -r 'keys[]' "$BUILD_CFG") -export BUILD_CFG COMPILE_CFG BUILDS +# can't have recursive generation +FB_RUNNING_AS_SCRIPT=${FB_RUNNING_AS_SCRIPT:-0} -# enable what ffmpeg builds -unset FFMPEG_ENABLES -export FFMPEG_ENABLES -for enable in $(jq -r '.ffmpeg_enable[]' "$COMPILE_CFG"); do - FFMPEG_ENABLES+=("${enable}") -done +# no undefined variables +set -u +SCRIPT_DIR="${REPO_DIR}/scripts" +ENTRY_SCRIPT="${SCRIPT_DIR}/entry.sh" src_scripts() { - for script in "${REPO_DIR}/scripts/"*.sh; do - # shellcheck disable=SC1090 - source "${script}" - done + local SCRIPT_DIR="${REPO_DIR}/scripts" + rm "${SCRIPT_DIR}"*.sh + + if [[ $FB_RUNNING_AS_SCRIPT -eq 0 ]]; then + # shellcheck disable=SC2016 + echo '#!/usr/bin/env bash +cd "$(dirname "$(readlink -f $0)")/.." +. main.sh +FB_RUNNING_AS_SCRIPT=1 +scr_name="$(bash_basename $0)" +cmd="${scr_name//.sh}" +$cmd $@' >"${ENTRY_SCRIPT}" + chmod +x "${ENTRY_SCRIPT}" + fi + + for script in "${REPO_DIR}/lib/"*.sh; do + # shellcheck disable=SC1090 + source "${script}" + done } FB_FUNC_NAMES+=('print_cmds') FB_FUNC_DESCS['print_cmds']='print usable commands' print_cmds() { - echo -e "\n~~~ Usable Commands ~~~" - for funcname in "${FB_FUNC_NAMES[@]}"; do - echo -e "${CYAN}${funcname}${NC}:\n\t" "${FB_FUNC_DESCS[${funcname}]}" - done - echo -e "~~~~~~~~~~~~~~~~~~~~~~~\n" + echo -e "\n~~~ Usable Commands ~~~" + for funcname in "${FB_FUNC_NAMES[@]}"; do + echo -e "${CYAN}${funcname}${NC}:\n\t" "${FB_FUNC_DESCS[${funcname}]}" + fb_running_as_script || (cd "$SCRIPT_DIR" && ln -sf entry.sh "${funcname}.sh") + done + echo -e "~~~~~~~~~~~~~~~~~~~~~~~\n" } set_completions() { - for funcname in "${FB_FUNC_NAMES[@]}"; do - complete -W "${FB_FUNC_COMPLETION[${funcname}]}" "${funcname}" - done + set +u + for funcname in "${FB_FUNC_NAMES[@]}"; do + complete -W "${FB_FUNC_COMPLETION[${funcname}]}" "${funcname}" + done + set -u } # shellcheck disable=SC1091 source "${HOME}/.bashrc" src_scripts || return 1 -determine_os || return 1 +determine_pkg_mgr || return 1 -unset PREFIX -PREFIX="$(jq -r '.prefix' "${COMPILE_CFG}")" -test "${PREFIX}" == 'null' && PREFIX="${IGN_DIR}/${OS}_sysroot" -export PREFIX +# pkg_mgr initialized in determine_pkg_mgr +# shellcheck disable=SC2154 +test "${PREFIX}" == '' && PREFIX="${IGN_DIR}/${pkg_mgr}_sysroot" set_compile_opts || return 1 -print_cmds || return 1 +fb_running_as_script || print_cmds || return 1 set_completions || return 1 diff --git a/scripts/build-docker-images.sh b/scripts/build-docker-images.sh deleted file mode 100755 index dcdc35a..0000000 --- a/scripts/build-docker-images.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -FB_FUNC_NAMES+=('build_docker_images') -FB_FUNC_DESCS['build_docker_images']='build docker images with required dependencies pre-installed' -build_docker_images() { - DISTROS=( debian ubuntu archlinux fedora ) - DOCKERFILE_DIR="${IGN_DIR}/Dockerfiles" - test -d "${DOCKERFILE_DIR}" || mkdir -p "${DOCKERFILE_DIR}" - for distro in "${DISTROS[@]}"; do - echo "\ -FROM ${distro} -COPY scripts/ /ffmpeg-builder/scripts/ -COPY main.sh /ffmpeg-builder/ -RUN bash -c 'source /ffmpeg-builder/main.sh ; install_deps' || exit 1" \ - > "${DOCKERFILE_DIR}/Dockerfile_${distro}" - image_tag="ffmpeg_builder_${distro}" - dockerfile="Dockerfile_${distro}" - echo_info "building ${image_tag}" - docker build \ - -t "${image_tag}" \ - -f "${DOCKERFILE_DIR}/${dockerfile}" \ - "${REPO_DIR}/" - done -} \ No newline at end of file diff --git a/scripts/build.sh b/scripts/build.sh deleted file mode 100644 index 081f11a..0000000 --- a/scripts/build.sh +++ /dev/null @@ -1,409 +0,0 @@ -#!/usr/bin/env bash - -set_compile_opts() { - unset CLEAN OPT_LVL LDFLAGS \ - C_FLAGS CXX_FLAGS CPP_FLAGS \ - CONFIGURE_FLAGS MESON_FLAGS \ - RUSTFLAGS CMAKE_FLAGS \ - FFMPEG_EXTRA_FLAGS \ - PKG_CONFIG_PATH - export CLEAN OPT_LVL LDFLAGS \ - C_FLAGS CXX_FLAGS CPP_FLAGS \ - CONFIGURE_FLAGS MESON_FLAGS \ - RUSTFLAGS CMAKE_FLAGS \ - FFMPEG_EXTRA_FLAGS \ - PKG_CONFIG_PATH - - # set job count for all builds - JOBS="$(nproc)" - export JOBS - - MACHINE="$(cc -dumpmachine)" - test "${MACHINE}" != '' || return 1 - export MACHINE - MACHINE_LIB="${PREFIX}/lib/${MACHINE}" - - # set prefix flags - CONFIGURE_FLAGS+=("--prefix=${PREFIX}") - MESON_FLAGS+=("--prefix" "${PREFIX}") - CMAKE_FLAGS+=("-DCMAKE_PREFIX_PATH=${PREFIX}") - CMAKE_FLAGS+=("-DCMAKE_INSTALL_PREFIX=${PREFIX}") - PKG_CONFIG_PATH="${PREFIX}/lib/pkgconfig:${PKG_CONFIG_PATH}" - PKG_CONFIG_PATH="${MACHINE_LIB}/pkgconfig:${PKG_CONFIG_PATH}" - echo_info "PKG_CONFIG_PATH = ${PKG_CONFIG_PATH}" - - # add prefix include - C_FLAGS+=("-I${PREFIX}/include") - - # enabling a clean build - if test "$(jq .clean "${COMPILE_CFG}")" == 'true'; then - CLEAN="${SUDO} rm -rf" - echo_info "performing clean build" - else - CLEAN='void' - fi - - # enabling link-time optimization - # shellcheck disable=SC2034 - unset LTO_SWITCH LTO_FLAG LTO_BOOL - export LTO_SWITCH LTO_FLAG LTO_BOOL - if test "$(jq .lto "${COMPILE_CFG}")" == 'true'; then - echo_info "building with LTO" - LTO_SWITCH='ON' - LTO_FLAG='-flto' - C_FLAGS+=("${LTO_FLAG}") - CONFIGURE_FLAGS+=('--enable-lto') - MESON_FLAGS+=("-Db_lto=true") - RUSTFLAGS+=("-C lto=yes" "-C inline-threshold=1000" "-C codegen-units=1") - else - echo_info "building without LTO" - LTO_SWITCH='OFF' - LTO_FLAG='' - MESON_FLAGS+=("-Db_lto=false") - RUSTFLAGS+=("-C lto=no") - fi - - # setting optimization level - OPT_LVL="$(jq .optimization "${COMPILE_CFG}")" - if test "${OPT_LVL}" == ''; then - OPT_LVL='0' - fi - C_FLAGS+=("-O${OPT_LVL}") - RUSTFLAGS+=("-C opt-level=${OPT_LVL}") - MESON_FLAGS+=("--optimization=${OPT_LVL}") - echo_info "building with optimization: ${OPT_LVL}" - - # static/shared linking - unset PKG_CFG_FLAGS LIB_SUFF - export PKG_CFG_FLAGS LIB_SUFF - isStatic="$(test "$(jq .static "${COMPILE_CFG}")" == 'true' ; echo $?)" - isShared="$(test "$(jq .shared "${COMPILE_CFG}")" == 'true' ; echo $?)" - if test $((isStatic + isShared)) -eq 2; then - echo_exit "Cannot have static and shared compile options" - fi - if test "${isStatic}" -eq 0; then - LDFLAGS+=('-static') - CONFIGURE_FLAGS+=('--enable-static') - CMAKE_FLAGS+=("-DBUILD_SHARED_LIBS=OFF") - MESON_FLAGS+=('--default-library=static') - CMAKE_FLAGS+=("-DCMAKE_EXE_LINKER_FLAGS='-static'") - PKG_CFG_FLAGS='--static' - LIB_SUFF='a' - fi - if test "${isShared}" -eq 0; then - LDFLAGS+=("-Wl,-rpath,${MACHINE_LIB}") - CONFIGURE_FLAGS+=('--enable-shared') - CMAKE_FLAGS+=("-DBUILD_SHARED_LIBS=ON") - CMAKE_FLAGS+=("-DCMAKE_INSTALL_RPATH=${PREFIX}/lib;${MACHINE_LIB}") - FFMPEG_EXTRA_FLAGS+=('--enable-rpath') - LIB_SUFF='so' - fi - - # architecture/cpu compile flags - export CPU ARCH - CPU="$(jq -r .cpu "${COMPILE_CFG}")" - ARCH="$(jq -r .arch "${COMPILE_CFG}")" - # arm prefers -mcpu over -march - # https://community.arm.com/arm-community-blogs/b/tools-software-ides-blog/posts/compiler-flags-across-architectures-march-mtune-and-mcpu - arch_flags="" - test_arch="$(uname -m)" - if [[ "${test_arch}" == "x86_64" ]]; then - arch_flags="-march=${CPU}" - elif [[ "${test_arch}" == "aarch64" || \ - "${test_arch}" == "arm64" ]] - then - arch_flags="-mcpu=${CPU}" - fi - - C_FLAGS+=("${arch_flags}") - CXX_FLAGS=("${C_FLAGS[@]}") - CPP_FLAGS=("${C_FLAGS[@]}") - RUSTFLAGS+=("-C target-cpu=${CPU}") - CMAKE_FLAGS+=("-DCMAKE_C_FLAGS='${C_FLAGS[*]}'") - CMAKE_FLAGS+=("-DCMAKE_CXX_FLAGS='${CXX_FLAGS[*]}'") - MESON_FLAGS+=("-Dc_args=${C_FLAGS[*]}" "-Dcpp_args=${CPP_FLAGS[*]}") - dump_arr CONFIGURE_FLAGS - dump_arr C_FLAGS - dump_arr RUSTFLAGS - dump_arr CMAKE_FLAGS - dump_arr MESON_FLAGS - dump_arr PKG_CFG_FLAGS - - # extra ffmpeg flags - if [[ "${TARGET_WINDOWS}" == '1' ]]; then - FFMPEG_EXTRA_FLAGS+=( - '--cross-prefix=x86_64-w64-mingw32-' - '--target-os=mingw32' - '--cc=x86_64-w64-mingw32-gcc' - '--cxx=x86_64-w64-mingw32-g++' - '--ar=x86_64-w64-mingw32-gcc-ar' - '--ranlib=x86_64-w64-mingw32-gcc-ranlib' - '--nm=x86_64-w64-mingw32-gcc-nm' - ) - fi - FFMPEG_EXTRA_FLAGS+=( - "--extra-cflags=\"${C_FLAGS[*]}\"" - "--extra-cxxflags=\"${CXX_FLAGS[*]}\"" - "--extra-ldflags=\"${LDFLAGS[*]}\"" - ) - dump_arr FFMPEG_EXTRA_FLAGS - - # shellcheck disable=SC2178 - RUSTFLAGS="${RUSTFLAGS[*]}" - - # make sure RUSTUP_HOME and CARGO_HOME are defined - if [[ "${RUSTUP_HOME}" == '' ]]; then - RUSTUP_HOME="${HOME}/.rustup" - test -d "${RUSTUP_HOME}" || echo_exit "RUSTUP_HOME does not exist" - fi - if [[ "${CARGO_HOME}" == '' ]]; then - CARGO_HOME="${HOME}/.rustup" - test -d "${CARGO_HOME}" || echo_exit "CARGO_HOME does not exist" - fi - export RUSTUP_HOME CARGO_HOME - unset SUDO_CARGO - if [[ "${SUDO}" != '' ]]; then - export SUDO_CARGO="${SUDO} --preserve-env=PATH,RUSTUP_HOME,CARGO_HOME" - fi - echo -} - -get_json_conf() { - local build="${1}" - # make sure there is a build config for the enabled build - test "$(jq -r ".${build}" "$BUILD_CFG")" == 'null' && return 1 - - unset ver ext url deps extracted_dir - export ver ext url deps extracted_dir - ver="$(jq -r ".${build}.ver" "$BUILD_CFG")" - ext="$(jq -r ".${build}.ext" "$BUILD_CFG")" - eval "url=\"$(jq -r ".${build}.url" "$BUILD_CFG")\"" - jq -r ".${build}.deps[]" "$BUILD_CFG" >/dev/null 2>/dev/null && \ - mapfile -t deps < <(jq -r ".${build}.deps[]" "$BUILD_CFG") - jq -r ".${build}.deps[]" "$BUILD_CFG" >/dev/null 2>/dev/null && \ - mapfile -t deps < <(jq -r ".${build}.deps[]" "$BUILD_CFG") - extracted_dir="${BUILD_DIR}/${build}-v${ver}" -} - -download_release() { - local build="${1}" - # set env for wget download - get_json_conf "${build}" || return 1 - local base_path="${build}-v${ver}" - local base_dl_path="${DL_DIR}/${base_path}" - - # remove other versions of a download - for wrong_ver_dl in "${DL_DIR}/${build}"*; do - if [[ "${wrong_ver_dl}" =~ ${base_path} ]]; then - continue - fi - test -f "${wrong_ver_dl}" || continue - echo_warn "removing wrong version: ${wrong_ver_dl}" - rm -rf "${wrong_ver_dl}" - done - # remove other versions of a build - for wrong_ver_build in "${BUILD_DIR}/${build}"*; do - if [[ "${wrong_ver_build}" =~ ${base_path} ]]; then - continue - fi - test -d "${wrong_ver_build}" || continue - echo_warn "removing wrong version: ${extracted_dir}" - rm -rf "${wrong_ver_build}" - done - - # create new build dir for clean builds - test -d "${extracted_dir}" && \ - ${CLEAN} "${extracted_dir}" - - if test "${ext}" != "git"; then - wget_out="${base_dl_path}.${ext}" - - # download archive if not present - if ! test -f "${wget_out}"; then - echo_info "downloading ${build}" - echo_if_fail wget "${url}" -O "${wget_out}" - fi - - # create new build directory - test -d "${extracted_dir}" || \ - { - mkdir "${extracted_dir}" - tar -xf "${wget_out}" --strip-components=1 -C "${extracted_dir}" - } - else - # for git downloads - test -d "${base_dl_path}" || \ - git clone "${url}" "${base_dl_path}" || return 1 - - # create new build directory - test -d "${extracted_dir}" || \ - cp -r "${base_dl_path}" "${extracted_dir}" || return 1 - fi -} - -FB_FUNC_NAMES+=('do_build') -# shellcheck disable=SC2034 -FB_FUNC_DESCS['do_build']='build a specific project' -# shellcheck disable=SC2034 -FB_FUNC_COMPLETION['do_build']="${BUILDS[*]}" -do_build() { - local build="${1}" - download_release "${build}" || return 1 - get_json_conf "${build}" || return 1 - for dep in "${deps[@]}"; do - do_build "${dep}" || return 1 - done - get_json_conf "${build}" || return 1 - echo_info "building ${build}" - pushd "$extracted_dir" >/dev/null || return 1 - echo_if_fail build_"${build}" - retval=$? - popd >/dev/null || return 1 - test ${retval} -eq 0 || return ${retval} - echo_pass "built ${build}" -} - -FB_FUNC_NAMES+=('build') -# shellcheck disable=SC2034 -FB_FUNC_DESCS['build']='build ffmpeg with desired configuration' -build() { - test -d "${DL_DIR}" || mkdir -p "${DL_DIR}" - test -d "${CCACHE_DIR}" || mkdir -p "${CCACHE_DIR}" - test -d "${BUILD_DIR}" || mkdir -p "${BUILD_DIR}" - test -d "${PREFIX}/bin/" || mkdir -p "${PREFIX}/bin/" - - unset SUDO - testfile="${PREFIX}/ffmpeg-build-testfile" - if ! touch "${testfile}" 2> /dev/null; then - # we cannot modify the install prefix - # so we need to use sudo - test "$(id -u)" -eq 0 || SUDO=sudo - export SUDO - ${SUDO} mkdir -p "${PREFIX}/bin/" - fi - test -f "${testfile}" && ${SUDO} rm "${testfile}" - - for build in "${FFMPEG_ENABLES[@]}"; do - do_build "${build}" || return 1 - done - do_build "ffmpeg" || return 1 - - return 0 -} - -build_hdr10plus_tool() { - ccache cargo build --release || return 1 - ${SUDO} cp target/release/hdr10plus_tool "${PREFIX}/bin/" || return 1 - - # build libhdr10plus - cd hdr10plus || return 1 - ccache cargo cbuild --release || return 1 - ${SUDO_CARGO} bash -lc "cargo cinstall --prefix=${PREFIX} --release" || return 1 -} - -build_dovi_tool() { - ccache cargo build --release || return 1 - ${SUDO} cp target/release/dovi_tool "${PREFIX}/bin/" || return 1 - - # build libdovi - cd dolby_vision || return 1 - ccache cargo cbuild --release || return 1 - ${SUDO_CARGO} bash -lc "cargo cinstall --prefix=${PREFIX} --release" || return 1 -} - -build_libsvtav1() { - cmake \ - "${CMAKE_FLAGS[@]}" \ - -DSVT_AV1_LTO="${LTO_SWITCH}" \ - -DENABLE_AVX512=ON \ - -DBUILD_TESTING=OFF \ - -DCOVERAGE=OFF || return 1 - ccache make -j"${JOBS}" || return 1 - ${SUDO} make -j"${JOBS}" install || return 1 -} - -build_libsvtav1_psy() { - cmake \ - "${CMAKE_FLAGS[@]}" \ - -DSVT_AV1_LTO="${LTO_SWITCH}" \ - -DBUILD_TESTING=OFF \ - -DENABLE_AVX512=ON \ - -DCOVERAGE=OFF \ - -DLIBDOVI_FOUND=1 \ - -DLIBHDR10PLUS_RS_FOUND=1 \ - -DLIBHDR10PLUS_RS_LIBRARY="${MACHINE_LIB}/libhdr10plus-rs.${LIB_SUFF}" \ - -DLIBDOVI_LIBRARY="${MACHINE_LIB}/libdovi.${LIB_SUFF}" || return 1 - ccache make -j"${JOBS}" || return 1 - ${SUDO} make -j"${JOBS}" install || return 1 -} - -build_librav1e() { - ccache cargo build --release || return 1 - ${SUDO} cp target/release/rav1e "${PREFIX}/bin/" || return 1 - - ccache cargo cbuild --release || return 1 - ${SUDO_CARGO} bash -lc "cargo cinstall --prefix=${PREFIX} --release" || return 1 -} - -build_libaom() { - cmake \ - "${CMAKE_FLAGS[@]}" \ - -B build.user \ - -DENABLE_TESTS=OFF || return 1 - cd build.user || return 1 - ccache make -j"${JOBS}" || return 1 - ${SUDO} make -j"${JOBS}" install || return 1 -} - -build_libopus() { - ./configure \ - "${CONFIGURE_FLAGS[@]}" \ - --disable-doc || return 1 - ccache make -j"${JOBS}" || return 1 - ${SUDO} make -j"${JOBS}" install || return 1 - return 0 -} - -build_libdav1d() { - meson \ - setup . build.user \ - "${MESON_FLAGS[@]}" || return 1 - ccache ninja -vC build.user || return 1 - ${SUDO} ninja -vC build.user install || return 1 -} - -build_libvmaf() { - cd libvmaf || return 1 - python3 -m virtualenv .venv - ( - source .venv/bin/activate - meson \ - setup . build.user \ - "${MESON_FLAGS[@]}" \ - -Denable_float=true || exit 1 - ccache ninja -vC build.user || exit 1 - ${SUDO} ninja -vC build.user install || exit 1 - ) || return 1 -} - -build_ffmpeg() { - for enable in "${FFMPEG_ENABLES[@]}"; do - test "${enable}" == 'libsvtav1_psy' && enable='libsvtav1' - CONFIGURE_FLAGS+=("--enable-${enable}") - done - ./configure \ - "${CONFIGURE_FLAGS[@]}" \ - "${FFMPEG_EXTRA_FLAGS[@]}" \ - --pkg-config='pkg-config' \ - --pkg-config-flags="${PKG_CFG_FLAGS}" \ - --cpu="${CPU}" --arch="${ARCH}" \ - --enable-gpl --enable-version3 \ - --enable-nonfree \ - --disable-htmlpages \ - --disable-podpages \ - --disable-txtpages \ - --disable-autodetect || return 1 - ccache make -j"${JOBS}" || return 1 - ${SUDO} make -j"${JOBS}" install || return 1 - return 0 -} \ No newline at end of file diff --git a/scripts/build.sh b/scripts/build.sh new file mode 120000 index 0000000..44b7711 --- /dev/null +++ b/scripts/build.sh @@ -0,0 +1 @@ +entry.sh \ No newline at end of file diff --git a/scripts/build_docker_images.sh b/scripts/build_docker_images.sh new file mode 120000 index 0000000..44b7711 --- /dev/null +++ b/scripts/build_docker_images.sh @@ -0,0 +1 @@ +entry.sh \ No newline at end of file diff --git a/scripts/common.sh b/scripts/common.sh deleted file mode 100644 index 7ad1817..0000000 --- a/scripts/common.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env bash - -# shellcheck disable=SC2034 - -# ANSI colors -RED='\e[0;31m' -CYAN='\e[0;36m' -GREEN='\e[0;32m' -YELLOW='\e[0;33m' -NC='\e[0m' - -# echo wrappers -echo_fail() { echo -e "${RED}FAIL${NC}:" "$@" ; } -echo_info() { echo -e "${CYAN}INFO${NC}:" "$@" ; } -echo_pass() { echo -e "${GREEN}PASS${NC}:" "$@" ; } -echo_warn() { echo -e "${YELLOW}WARN${NC}:" "$@" ; } -echo_exit() { echo_fail "$@" ; exit 1 ; } -void() { echo "$@" >/dev/null ; } - -echo_if_fail() { - local cmd=("$@") - local out="${TMP_DIR}/.stdout-${RANDOM}" - local err="${TMP_DIR}/.stderr-${RANDOM}" - test -d "${TMP_DIR}" || mkdir -p "${TMP_DIR}" - - # set trace to the cmdEvalTrace and open file descriptor - local cmdEvalTrace="${TMP_DIR}/.cmdEvalTrace-${RANDOM}" - exec 5> "${cmdEvalTrace}" - export BASH_XTRACEFD=5 - - set -x - "${cmd[@]}" >"${out}" 2>"${err}" - local retval=$? - - # unset and close file descriptor - set +x - exec 5>&- - - # parse out relevant part of the trace - local cmdEvalLines=() - cmd=() - while IFS= read -r line; do - cmdEvalLines+=("${line}") - done < "${cmdEvalTrace}" - local cmdEvalLineNum=${#cmdEvalLines[@]} - for ((i=1; i < cmdEvalLineNum-2; i++)); do - local trimmedCmd="${cmdEvalLines[${i}]}" - trimmedCmd="${trimmedCmd/+ /}" - cmd+=("${trimmedCmd}") - done - - if ! test ${retval} -eq 0; then - echo - echo_fail "command failed:" - printf "%s\n" "${cmd[@]}" - echo_warn "command output:" - tail -n 10 "${out}" - tail -n 10 "${err}" - echo - fi - rm "${out}" "${err}" "${cmdEvalTrace}" - return ${retval} -} - -dump_arr() { - arr_name="$1" - declare -n arr - arr="${arr_name}" - arr_exp=("${arr[@]}") - test "${#arr_exp}" -gt 0 || return 0 - echo_info "${arr_name}" - printf "\t%s\n" "${arr_exp[@]}" -} \ No newline at end of file diff --git a/scripts/do_build.sh b/scripts/do_build.sh new file mode 120000 index 0000000..44b7711 --- /dev/null +++ b/scripts/do_build.sh @@ -0,0 +1 @@ +entry.sh \ No newline at end of file diff --git a/scripts/docker_build_images.sh b/scripts/docker_build_images.sh new file mode 120000 index 0000000..44b7711 --- /dev/null +++ b/scripts/docker_build_images.sh @@ -0,0 +1 @@ +entry.sh \ No newline at end of file diff --git a/scripts/entry.sh b/scripts/entry.sh new file mode 100755 index 0000000..21a12bd --- /dev/null +++ b/scripts/entry.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +cd "$(dirname "$(readlink -f $0)")/.." +. main.sh +FB_RUNNING_AS_SCRIPT=1 +scr_name="$(bash_basename $0)" +cmd="${scr_name//.sh/}" +$cmd $@ diff --git a/scripts/install_deps.sh b/scripts/install_deps.sh deleted file mode 100644 index af2f987..0000000 --- a/scripts/install_deps.sh +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env bash - -# shellcheck disable=SC2317 -# shellcheck disable=SC2034 - -determine_darwin_pkg_mgr() { - if ! command -v brew >/dev/null; then - echo_warning "brew not found" - echo_info "install brew:" - # shellcheck disable=SC2016 - echo '/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"' - return 1 - fi - PKG_MGR=brew - check_pkg_exists() { - "${PKG_MGR}" list --formula "${1}" >/dev/null - } - PKG_MGR_UPD="${PKG_MGR} update" - PKG_MGR_INST="${PKG_MGR} install" -} - -determine_linux_debian_pkg_mgr() { - if command -v apt-get >/dev/null; then - PKG_MGR=apt-get - else - echo_fail "no package manager found" - return 1 - fi - check_pkg_exists() { - dpkg -l "${1}" >/dev/null 2>/dev/null - } - export DEBIAN_FRONTEND=noninteractive - PKG_MGR_UPD="${SUDO} ${PKG_MGR} update" - PKG_MGR_INST="${SUDO} ${PKG_MGR} install -y" -} - -determine_linux_arch_pkg_mgr() { - if command -v pacman >/dev/null; then - PKG_MGR=pacman - else - echo_fail "no package manager found" - return 1 - fi - check_pkg_exists() { - local pkg_check="${1}" - "${PKG_MGR}" -Qi "${pkg_check}" >/dev/null 2>/dev/null - } - PKG_MGR_UPD="${SUDO} ${PKG_MGR} -Syy" - PKG_MGR_INST="${SUDO} ${PKG_MGR} -S --noconfirm --needed" -} - -determine_linux_fedora_pkg_mgr() { - if command -v dnf >/dev/null; then - PKG_MGR=dnf - else - echo_fail "no package manager found" - return 1 - fi - check_pkg_exists() { - local pkg_check="${1}" - test "${pkg_check}" == 'wget' && pkg_check=wget2 - "${PKG_MGR}" list -q --installed "${pkg_check}" >/dev/null 2>/dev/null - } - PKG_MGR_UPD="${SUDO} ${PKG_MGR} check-update" - PKG_MGR_INST="${SUDO} ${PKG_MGR} install -y" -} - -determine_os() { - unset OS - local UNAME="$(uname)" - if test "${UNAME}" == 'Linux'; then - # shellcheck disable=SC1091 - source /etc/os-release - OS="${UNAME}-${ID_LIKE}" - elif test "${UNAME}" == 'Darwin'; then - OS="${UNAME}" - else - echo_exit "Unable to determine OS for ${UNAME}" - fi - export OS - OS="${OS,,}" - OS="${OS//-/_}" - - if test "$(jq -r ".target_windows" "$COMPILE_CFG")" == 'true'; then - echo_info "targeting windows" - export TARGET_WINDOWS=1 - OS+="_windows" - fi - - return 0 -} - -determine_pkg_mgr() { - unset PKG_MGR PKG_MGR_UPD PKG_MGR_INST - determine_"${OS}"_pkg_mgr - return 0 -} - -check_for_req_pkgs() { - echo_info "OS=${OS}" - echo_info "checking for required packages" - local common_pkgs=( - autoconf automake cmake libtool - texinfo wget nasm yasm python3 - meson doxygen jq ccache gawk - ) - # shellcheck disable=SC2034 - local brew_pkgs=( - "${common_pkgs[@]}" pkgconf - mkvtoolnix pipx - ) - local common_linux_pkgs=( - "${common_pkgs[@]}" clang valgrind - curl bc lshw xxd pkgconf - ) - # shellcheck disable=SC2034 - local apt_get_pkgs=( - "${common_linux_pkgs[@]}" build-essential - git-core libass-dev libfreetype6-dev - libsdl2-dev libva-dev libvdpau-dev - libvorbis-dev libxcb1-dev pipx - libxcb-shm0-dev libxcb-xfixes0-dev - zlib1g-dev libssl-dev ninja-build - gobjc++ mawk libnuma-dev - mediainfo mkvtoolnix libgtest-dev - ) - # shellcheck disable=SC2034 - local pacman_pkgs=( - "${common_linux_pkgs[@]}" base-devel - python-pipx ninja - ) - # shellcheck disable=SC2034 - local dnf_pkgs=( - "${common_linux_pkgs[@]}" openssl-devel - pipx ninja-build - ) - - if [[ "${TARGET_WINDOWS}" -eq 1 ]]; then - apt_get_pkgs+=(gcc-mingw-w64 g++-mingw-w64) - fi - - local req_pkgs_env_name="${PKG_MGR/-/_}_pkgs" - declare -n req_pkgs="${req_pkgs_env_name}" - local missing_pkgs=() - for pkg in "${req_pkgs[@]}"; do - check_pkg_exists "${pkg}" || missing_pkgs+=("${pkg}") - done - if ! test "${#missing_pkgs}" -eq 0; then - echo_warn "missing packages:" "${missing_pkgs[@]}" - # shellcheck disable=SC2086 - ${PKG_MGR_UPD} - # shellcheck disable=SC2086 - ${PKG_MGR_INST} "${missing_pkgs[@]}" || return 1 - fi - echo_pass "packages from ${PKG_MGR} installed" - echo_if_fail pipx install virtualenv || return 1 - echo_if_fail pipx ensurepath || return 1 - echo_pass "pipx is installed" - # shellcheck disable=SC1091 - test -f "${HOME}/.cargo/env" && source "${HOME}/.cargo/env" - - if ! command -v cargo >/dev/null; then - echo_warn "missing cargo" - - if ! command -v rustup >/dev/null; then - echo_warn "missing rustup" - echo_warn "installing rustup" - curl https://sh.rustup.rs -sSf | sh -s -- -y - # shellcheck disable=SC2016 - grep -q 'source "${HOME}/.cargo/env"' "${HOME}/.bashrc" || \ - echo 'source "${HOME}/.cargo/env"' >> "${HOME}/.bashrc" - # shellcheck disable=SC1091 - source "${HOME}/.bashrc" - fi - fi - - echo_if_fail rustup default stable - echo_if_fail rustup update stable - echo_pass "rustup is installed" - echo_if_fail cargo install cargo-c || return 1 - echo_pass "cargo-c is installed" - echo_pass "all required packages installed" - return 0 -} - -FB_FUNC_NAMES+=('install_deps') -FB_FUNC_DESCS['install_deps']='install required dependencies' -install_deps() { - unset SUDO - test "$(id -u)" -eq 0 || SUDO=sudo - determine_pkg_mgr || return 1 - check_for_req_pkgs || return 1 -} \ No newline at end of file diff --git a/scripts/install_deps.sh b/scripts/install_deps.sh new file mode 120000 index 0000000..44b7711 --- /dev/null +++ b/scripts/install_deps.sh @@ -0,0 +1 @@ +entry.sh \ No newline at end of file diff --git a/scripts/print_cmds.sh b/scripts/print_cmds.sh new file mode 120000 index 0000000..44b7711 --- /dev/null +++ b/scripts/print_cmds.sh @@ -0,0 +1 @@ +entry.sh \ No newline at end of file diff --git a/shfmt-watcher.sh b/shfmt-watcher.sh new file mode 100755 index 0000000..606409b --- /dev/null +++ b/shfmt-watcher.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +base="$(dirname "$(readlink -f "$0")")" + +inotifywait -m -r \ + -e close_write \ + -e moved_to \ + --format '%w%f' "$base" | while read -r file; do + if [[ -f $file && $file =~ .sh ]]; then + shfmt --write --simplify "$file" + fi +done