892 lines
27 KiB
Bash
892 lines
27 KiB
Bash
#!/bin/bash
|
|
# FUNCTIONS
|
|
|
|
if [ "$(id -u)" = "0" ]; then
|
|
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;31m\]\u\[\033[01;33m\]@\[\033[01;36m\]\h \[\033[01;33m\]\w \[\033[01;35m\]\$ \[\033[00m\]'
|
|
else
|
|
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u\[\033[01;33m\]@\[\033[01;36m\]\h \[\033[01;33m\]\w \[\033[01;35m\]\$ \[\033[00m\]'
|
|
fi
|
|
|
|
if [ "$(id -u)" != "0" ]; then
|
|
IS_SUDOERS=$(groups | grep -q sudo)
|
|
if [ -n "$(command -v sudo)" ] && [ -n "$IS_SUDOERS" ]; then
|
|
NEED_SUDO="$(command -v sudo)"
|
|
else
|
|
NEED_SUDO=""
|
|
fi
|
|
fi
|
|
|
|
command_exists() {
|
|
command -v "$@" >/dev/null 2>&1
|
|
}
|
|
|
|
_run() {
|
|
if [ -n "$2" ]; then
|
|
echo -ne "${TPUT_ECHO}${2}${TPUT_RESET}\t"
|
|
fi
|
|
if ! { "$1"; }; then
|
|
if [ -n "$2" ]; then
|
|
echo -e "${TPUT_FAIL}[KO]${TPUT_RESET}"
|
|
fi
|
|
else
|
|
if [ -n "$2" ]; then
|
|
echo -e "[${TPUT_OK}OK${TPUT_RESET}]"
|
|
fi
|
|
fi
|
|
|
|
}
|
|
|
|
apt_install() {
|
|
DEBIAN_FRONTEND=noninteractive apt-get install --option=Dpkg::options::=--force-confmiss --option=Dpkg::options::=--force-confold "$@" --assume-yes >/dev/null 2>&1
|
|
}
|
|
|
|
transfer_vtbox_net() {
|
|
{ curl --progress-bar --upload-file "$1" "https://transfer.vtbox.net/$(basename $1)" && echo ""; } | tee -a $HOME/.transfer.log && echo ""
|
|
}
|
|
|
|
compress_pigz() {
|
|
tar -I pigz -cvf "$1.tar.gz" "$1"
|
|
}
|
|
|
|
compress_zstd() {
|
|
tar -I zstd -cvf "$1.tar.zst" "$1"
|
|
}
|
|
|
|
decompress_pigz() {
|
|
tar -I pigz -xvf "$1"
|
|
}
|
|
|
|
update_git_mybashrc() {
|
|
if [ -d "$HOME/.mybashrc/.git" ]; then
|
|
git -C "$HOME/.mybashrc" pull -q
|
|
$HOME/.mybashrc/setup.sh >/dev/null 2>&1
|
|
fi
|
|
}
|
|
|
|
EE_MYSQL_OPTIMIZATION() {
|
|
|
|
if [ -f "$HOME/.my.cnf" ]; then
|
|
/usr/bin/mysqlcheck -Aos --auto-repair
|
|
elif [ -f /etc/psa/.psa.shadow ]; then
|
|
MYSQL_PWD="$(cat /etc/psa/.psa.shadow)" /usr/bin/mysqlcheck -Aos -uadmin --auto-repair
|
|
else
|
|
echo "$HOME/.my.cnf or /etc/psa/.psa.shadow doesn't exist"
|
|
fi
|
|
}
|
|
|
|
encrypt_gpg() {
|
|
gpg -c "$1"
|
|
}
|
|
|
|
decrypt_gpg() {
|
|
gpg --output "${1%.gpg}" -d "$1"
|
|
}
|
|
|
|
LIST_BY_SIZE() {
|
|
du -sh ./* | sort -h
|
|
}
|
|
|
|
EE_DOCKER_SETUP() {
|
|
if [ ! -d "$HOME/.ee" ]; then
|
|
mkdir $HOME/.ee
|
|
fi
|
|
curl -fsSL get.docker.com -o $HOME/.ee/get-docker.sh
|
|
chmod +x $HOME/.ee/get-docker.sh
|
|
$HOME/.ee/get-docker.sh
|
|
|
|
}
|
|
|
|
SET_TINC_UP() {
|
|
|
|
sudo tincd -n "$1"
|
|
|
|
}
|
|
|
|
SET_TINC_DOWN() {
|
|
|
|
sudo tincd -n "$1" -k
|
|
|
|
}
|
|
|
|
DD_BENCHMARK_DISK() {
|
|
|
|
dd if=/dev/zero bs=1024 count=1000000 of=file_1GB
|
|
dd if=file_1GB of=/dev/null bs=1024
|
|
rm file_1GB
|
|
|
|
}
|
|
|
|
RANDOM_GIT_COMMIT() {
|
|
|
|
[ -z "$(command -v w3m)" ] && {
|
|
apt-get install -y w3m
|
|
}
|
|
|
|
git add .
|
|
git commit -m "$(w3m whatthecommit.com | head -1)"
|
|
|
|
}
|
|
|
|
GIT_COMMIT() {
|
|
git add .
|
|
git commit -am "$1"
|
|
}
|
|
|
|
_find_duplicates() {
|
|
if [ "$#" = "0" ]; then
|
|
echo "duplicate-find <path> [--force]"
|
|
echo "use --force to delete files"
|
|
else
|
|
if ! command_exists rdfind; then
|
|
apt-get install rdfind -y
|
|
fi
|
|
if [ "$2" = "--force" ]; then
|
|
rdfind -ignoreempty false -deleteduplicates true "$1"
|
|
else
|
|
rdfind -dryrun true -ignoreempty false "$1"
|
|
fi
|
|
fi
|
|
}
|
|
|
|
MAINTENANCE_APT() {
|
|
|
|
# Colors
|
|
# Colors
|
|
CSI='\033['
|
|
CEND="${CSI}0m"
|
|
CGREEN="${CSI}1;32m"
|
|
|
|
if [ "$(id -u)" = "0" ] || [ -n "$IS_SUDOERS" ]; then
|
|
export DEBIAN_FRONTEND=noninteractive
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' APT UPDATE '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
if ! {
|
|
$NEED_SUDO apt-get update --allow-releaseinfo-change
|
|
}; then
|
|
$NEED_SUDO apt-get update
|
|
fi
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' APT FULL-UPGRADE '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
$NEED_SUDO apt-get --option=Dpkg::options::=--force-confmiss --option=Dpkg::options::=--force-confold --option=Dpkg::options::=--force-unsafe-io -y dist-upgrade
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' APT-GET AUTOREMOVE '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
$NEED_SUDO apt-get -o Dpkg::Options::="--force-confmiss" -o Dpkg::Options::="--force-confold" -y --purge autoremove
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' APT AUTOCLEAN '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
$NEED_SUDO apt-get -o Dpkg::Options::="--force-confmiss" -o Dpkg::Options::="--force-confold" -y autoclean
|
|
$NEED_SUDO apt-get -y clean
|
|
## clean packages in deinstall state
|
|
DEINSTALLED=$($NEED_SUDO dpkg --get-selections | grep deinstall | cut -f1)
|
|
if [ -n "$DEINSTALLED" ]; then
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' CLEAN DEINSTALLED PACKAGES '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
$NEED_SUDO dpkg --get-selections | grep deinstall | cut -f1 | xargs dpkg --purge
|
|
fi
|
|
if [ "$1" = "--docker" ]; then
|
|
if command_exists docker; then
|
|
list_images=$(docker images --filter "dangling=true" -q --no-trunc)
|
|
list_volumes=$(docker volume ls -qf dangling=true)
|
|
if [ -n "$list_images" ]; then
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' DOCKER IMAGES CLEANUP '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
docker rmi "$list_images"
|
|
fi
|
|
if [ -n "$list_volumes" ]; then
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' DOCKER VOLUMES CLEANUP '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
docker volume rm "$list_volumes"
|
|
fi
|
|
fi
|
|
fi
|
|
OLD_LOGS=$($NEED_SUDO find /var/log/ -type f -mtime +30 -iname "*.gz")
|
|
if [ -n "$OLD_LOGS" ]; then
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' CLEANUP OLD LOGS '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
$NEED_SUDO find /var/log/ -type f -mtime +30 -iname "*.gz" -exec rm '{}' \;
|
|
fi
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
echo -e ' EE-BASHRC UPDATE '
|
|
echo -e "${CGREEN}#############################################${CEND}"
|
|
update_git_mybashrc
|
|
else
|
|
echo "you need to be root or sudoers to launch the maintenance"
|
|
fi
|
|
|
|
}
|
|
|
|
EE_NGINX_COMPILE() {
|
|
if [ ! -d $HOME/.scripts ]; then
|
|
mkdir $HOME/.scripts
|
|
fi
|
|
wget -qO $HOME/.scripts/nginx-build.sh https://raw.githubusercontent.com/VirtuBox/nginx-ee/master/nginx-build.sh
|
|
chmod +x $HOME/.scripts/nginx-build.sh
|
|
$HOME/.scripts/nginx-build.sh "$@"
|
|
}
|
|
|
|
EE_SHOW_LOG() {
|
|
|
|
if ! command_exists ccze; then
|
|
apt install ccze -y
|
|
fi
|
|
if echo "$1" | grep -q ".gz"; then
|
|
zcat "$1" | ccze -A -p syslog -C
|
|
else
|
|
tail -n 500 "$1" | ccze -A -p syslog -C
|
|
fi
|
|
|
|
}
|
|
|
|
_PYTHON_VIRTUALENV() {
|
|
python3 -m venv "$1"
|
|
source "$1/bin/activate"
|
|
}
|
|
|
|
CHEAT_CHECK() {
|
|
{
|
|
CHECK_CHT=$(command -v cht.sh)
|
|
[ -z "$CHECK_CHT" ] && {
|
|
wget -O /usr/local/bin/cht.sh https://cht.sh/:cht.sh
|
|
chmod +x /usr/local/bin/cht.sh
|
|
}
|
|
ND_CHECK_CHT="$(command -v cht.sh)"
|
|
$ND_CHECK_CHT "$@"
|
|
}
|
|
|
|
}
|
|
|
|
EE_SHOW_FPM() {
|
|
|
|
top -bn1 | grep -c "php-fpm"
|
|
|
|
}
|
|
|
|
MAGENTO_UPGRADE() {
|
|
|
|
if [ -x bin/magento ]; then
|
|
bin/magento maintenance:enable
|
|
bin/magento cache:flush
|
|
bin/magento setup:upgrade
|
|
rm -Rf "./generated/metadata/*" "./generated/code/*" "./var/cache/*" "./var/page_cache/*"
|
|
bin/magento setup:di:compile
|
|
/usr/bin/composer dump-autoload -o --apcu
|
|
rm -Rf "./pub/static/frontend/*" "./pub/static/adminhtml/*" "./var/view_preprocessed/*" "./pub/static/_cache/*" "./pub/static/_requirejs/*"
|
|
bin/magento setup:static-content:deploy fr_FR
|
|
bin/magento setup:static-content:deploy en_US --theme="Magento/backend"
|
|
bin/magento cache:enable
|
|
bin/magento maintenance:disable
|
|
else
|
|
echo "you are not in a magento root folder"
|
|
fi
|
|
|
|
}
|
|
|
|
_WP_PERMISSIONS() {
|
|
if [ -f ./wp-config.php ] || [ -f ../wp-config.php ]; then
|
|
find . -type d -exec chmod 750 {} \;
|
|
find . -type f -exec chmod 640 {} \;
|
|
else
|
|
echo "not a wordpress directory"
|
|
fi
|
|
}
|
|
|
|
_PERM_FILES() {
|
|
find . -type f -exec chmod "$1" {} \;
|
|
}
|
|
|
|
_PERM_FOLDER() {
|
|
find . -type d -exec chmod "$1" {} \;
|
|
}
|
|
|
|
_NGINX_EE() {
|
|
|
|
wget -qO /tmp/nginx-ee vtb.cx/nginx-ee
|
|
chmod +x /tmp/nginx-ee
|
|
/tmp/nginx-ee "$@"
|
|
|
|
}
|
|
|
|
_INSTALL_NODEJS() {
|
|
if [ "$#" -eq 0 ]; then
|
|
echo "Usage : setup-nodejs <version>"
|
|
echo "Example : setup-nodejs 12"
|
|
else
|
|
wget -O nodejs.sh https://deb.nodesource.com/setup_"$1".x
|
|
chmod +x nodejs.sh
|
|
./nodejs.sh
|
|
rm -f nodejs.sh
|
|
$NEED_SUDO apt-get install -y nodejs
|
|
fi
|
|
}
|
|
|
|
_UPDATE_NPM() {
|
|
$NEED_SUDO npm install -g npm
|
|
}
|
|
|
|
_PPA_INSTALL() {
|
|
if [ "$#" -eq 0 ]; then
|
|
echo "Usage : IPPA <ppa-name>"
|
|
echo " PPA : "
|
|
echo " - ubuntu backports : --jonathonf"
|
|
echo " - virtubox backports : --backports"
|
|
echo " - ondrej php : --php"
|
|
echo " - ondrej nginx : --nginx"
|
|
echo " - WordOps nginx : --nginx-wo"
|
|
echo " - ondrej apache : --apache"
|
|
echo " - redis-server : --redis"
|
|
echo " - ffmpeg4 : --ffmpeg"
|
|
echo " - gcc8 : --gcc"
|
|
echo " - tinc : --tinc"
|
|
echo " - goaccess : --goaccess"
|
|
echo " - handbrake : --handbrake"
|
|
else
|
|
PPA=""
|
|
while [ "$#" -gt 0 ]; do
|
|
case "$1" in
|
|
--jonathonf)
|
|
PPA="$PPA ppa:jonathonf/backports"
|
|
;;
|
|
--backports)
|
|
PPA="$PPA ppa:virtubox/backports"
|
|
;;
|
|
--php)
|
|
PPA="$PPA ppa:ondrej/php"
|
|
;;
|
|
--nginx)
|
|
PPA="$PPA ppa:ondrej/nginx-mainline"
|
|
;;
|
|
--nginx-wo)
|
|
PPA="$PPA ppa:virtubox/nginx-wo"
|
|
;;
|
|
--apache)
|
|
PPA="$PPA ppa:ondrej/apache2"
|
|
;;
|
|
--redis)
|
|
PPA="$PPA ppa:chris-lea/redis-server"
|
|
;;
|
|
--ffmpeg)
|
|
PPA="$PPA ppa:jonathonf/ffmpeg-4"
|
|
;;
|
|
--gcc)
|
|
PPA="$PPA ppa:jonathonf/gcc"
|
|
;;
|
|
--tinc)
|
|
PPA="$PPA ppa:virtubox/tinc"
|
|
;;
|
|
--goaccess)
|
|
PPA="$PPA ppa:alex-p/goaccess"
|
|
;;
|
|
--handbrake)
|
|
PPA="$PPA ppa:stebbins/handbrake-git-snapshots"
|
|
;;
|
|
*) ;;
|
|
esac
|
|
shift
|
|
done
|
|
if [ -n "$PPA" ]; then
|
|
for UPPA in $PPA; do
|
|
$NEED_SUDO add-apt-repository "$UPPA" -y
|
|
done
|
|
fi
|
|
fi
|
|
}
|
|
|
|
_INSTALL_NANORC() {
|
|
if [ ! -d /usr/share/nano-syntax-highlighting ]; then
|
|
git clone --depth 1 https://github.com/scopatz/nanorc.git /usr/share/nano-syntax-highlighting -q
|
|
fi
|
|
if ! grep -q "/usr/share/nano-syntax-highlighting" /etc/nanorc; then
|
|
echo "include /usr/share/nano-syntax-highlighting/*.nanorc" >>/etc/nanorc
|
|
fi
|
|
}
|
|
|
|
_SYSINFO() {
|
|
# Reading out system information...
|
|
# Reading CPU model
|
|
cname=$(awk -F: '/model name/ {name=$2} END {print name}' /proc/cpuinfo | sed 's/^[ \t]*//;s/[ \t]*$//')
|
|
# Reading amount of CPU cores
|
|
cores=$(awk -F: '/model name/ {core++} END {print core}' /proc/cpuinfo)
|
|
# Reading CPU frequency in MHz
|
|
freq=$(awk -F: ' /cpu MHz/ {freq=$2} END {print freq}' /proc/cpuinfo | sed 's/^[ \t]*//;s/[ \t]*$//')
|
|
# Reading total memory in MB
|
|
tram=$(free -m | awk 'NR==2 {print $2}')
|
|
# Reading Swap in MB
|
|
vram=$(free -m | awk 'NR==4 {print $2}')
|
|
# Reading system uptime
|
|
up=$(uptime | awk '{ $1=$2=$(NF-6)=$(NF-5)=$(NF-4)=$(NF-3)=$(NF-2)=$(NF-1)=$NF=""; print }' | sed 's/^[ \t]*//;s/[ \t]*$//')
|
|
# Reading operating system and version (simple, didn't filter the strings at the end...)
|
|
opsy=$(cat /etc/issue.net | awk 'NR==1 {print}') # Operating System & Version
|
|
arch=$(uname -m) # Architecture
|
|
lbit=$(getconf LONG_BIT) # Architecture in Bit
|
|
hn=$(hostname -f) # Hostname
|
|
kern=$(uname -r)
|
|
echo ""
|
|
# Output of results
|
|
echo "System Info"
|
|
echo "Server : $hn"
|
|
echo "-----------"
|
|
echo "Processor : $cname"
|
|
echo "CPU Cores : $cores"
|
|
echo "Frequency : $freq MHz"
|
|
echo "Memory : $tram MB"
|
|
echo "Swap : $vram MB"
|
|
echo "Uptime : $up"
|
|
echo "-----------"
|
|
echo "OS : $opsy"
|
|
echo "Arch : $arch ($lbit Bit)"
|
|
echo "Kernel : $kern"
|
|
echo ""
|
|
}
|
|
|
|
_SITESPEED() {
|
|
if [ "$#" = "0" ]; then
|
|
echo "Usage : sitespeed <url>"
|
|
else
|
|
curl -s -w \
|
|
'\nLookup time:\t\t%{time_namelookup}\nConnect time:\t\t%{time_connect}\nSSL handshake time:\t%{time_appconnect}\nPre-Transfer time:\t%{time_pretransfer}\nRedirect time:\t\t%{time_redirect}\nStart transfer time:\t%{time_starttransfer}\n\nTotal time:\t\t%{time_total}\n' -o /dev/null "$@"
|
|
fi
|
|
}
|
|
|
|
_SETUP_TOOLS() {
|
|
DEBIAN_FRONTEND=noninteractive $NEED_SUDO apt-get -o Dpkg::Options::="--force-confmiss" -o Dpkg::Options::="--force-confold" -y install haveged curl git unzip zip htop nload nmon ntp gnupg gnupg2 wget pigz tree ccze
|
|
}
|
|
|
|
_RSYSLOG_UFW() {
|
|
[ -f /etc/rsyslog.d/20-ufw.conf ] && {
|
|
sed -i 's/\#\& stop/\& stop/' /etc/rsyslog.d/20-ufw.conf
|
|
service rsyslog restart
|
|
}
|
|
}
|
|
|
|
_START_SSH_AGENT() {
|
|
rm -f /root/.ssh/ssh_auth_sock
|
|
ssh-agent -a /root/.ssh/ssh_auth_sock
|
|
export SSH_AUTH_SOCK="/root/.ssh/ssh_auth_sock"
|
|
ssh-add -l >/dev/null || ssh-add
|
|
}
|
|
|
|
_APT_REPO_UBUNTU() {
|
|
wget -O /etc/apt/sources.list https://vtb.cx/$(lsb_release -sc)-list
|
|
}
|
|
|
|
_APT_BIONIC_KERNEL() {
|
|
if [ "$(lsb_release -sc)" = "bionic" ]; then
|
|
apt update && apt install --install-recommends linux-generic-hwe-18.04 --assume-yes
|
|
fi
|
|
}
|
|
|
|
_FTP_ADD() {
|
|
if [ "$#" = "0" ] || [ "$#" = "1" ]; then
|
|
echo "Usage : ftpadd <user> <domain>"
|
|
else
|
|
if ! command_exists pwgen; then
|
|
$NEED_SUDO apt-get install pwgen -y >/dev/null 2>&1
|
|
fi
|
|
if [ -d /var/www/"$2"/htdocs ]; then
|
|
ftpaccountpass=$(_PWGEN)
|
|
$NEED_SUDO useradd -d "/var/www/$2/htdocs" -M -s /bin/false -G www-data "$1"
|
|
echo "$1:$ftpaccountpass" | $NEED_SUDO chpasswd -m
|
|
chmod -R g+rw "/var/www/$2/htdocs"
|
|
echo -e "\n\n[/var/www/$2/htdocs]" | tee -a "$HOME/.ftpaccounts"
|
|
echo -e "user : $1" | tee -a "$HOME/.ftpaccounts"
|
|
echo "password : $ftpaccountpass" | tee -a "$HOME/.ftpaccounts"
|
|
else
|
|
echo "site directory doesn't exist"
|
|
fi
|
|
fi
|
|
}
|
|
|
|
_UFW_MINIMAL() {
|
|
|
|
ufw logging low
|
|
ufw default allow outgoing
|
|
ufw default deny incoming
|
|
ufw limit 22
|
|
ufw limit 10022
|
|
ufw allow 53
|
|
ufw allow http
|
|
ufw allow https
|
|
|
|
ufw allow 68
|
|
ufw allow 655
|
|
ufw allow 873
|
|
ufw allow 123
|
|
ufw allow 22222
|
|
|
|
ufw allow from 144.76.159.118 to any port 35621
|
|
ufw allow from 144.76.159.118 to any port 35622
|
|
ufw allow from 144.76.159.118 to any port 35623
|
|
ufw allow from 159.69.0.216 to any port 10050
|
|
|
|
}
|
|
|
|
_SSH_SECURE() {
|
|
CURRENT_SSH_PORT=$(grep "Port" /etc/ssh/sshd_config | awk -F " " '{print $2}')
|
|
wget https://raw.githubusercontent.com/VirtuBox/ubuntu-nginx-web-server/master/etc/ssh/sshd_config -O /etc/ssh/sshd_config
|
|
if [ -n "$1" ]; then
|
|
sed -i "s/Port 22/Port $1/" /etc/ssh/sshd_config
|
|
else
|
|
sed -i "s/Port 22/Port $CURRENT_SSH_PORT/" /etc/ssh/sshd_config
|
|
fi
|
|
}
|
|
|
|
_PWGEN() {
|
|
if ! command_exists pwgen; then
|
|
apt_install pwgen
|
|
fi
|
|
pwgen -s 24 1
|
|
}
|
|
|
|
_FD() {
|
|
if [ -z "$(command -v fd)" ]; then
|
|
echo "downloading fd ..."
|
|
wget -qO https://github.com/sharkdp/fd/releases/download/v7.3.0/fd-musl_7.3.0_amd64.deb -O /tmp/fd.deb >/dev/null
|
|
echo "installing fd ..."
|
|
dpkg -i /tmp/fd.deb && rm /tmp/db.deb >/dev/null
|
|
fi
|
|
fd "$@"
|
|
}
|
|
|
|
_LXC_LOGIN() {
|
|
lxc exec "$@" /usr/bin/env bash
|
|
}
|
|
|
|
_LXC_LAUNCH() {
|
|
local RANDOM
|
|
RANDOM=$(date +%s | sha256sum | base64 | head -c 4)
|
|
if [ "$#" = "0" ]; then
|
|
lxc launch ubuntu-daily:18.04 "ctn-$RANDOM"
|
|
else
|
|
lxc launch ubuntu-daily:18.04 "$1"
|
|
fi
|
|
}
|
|
|
|
_BACKPORT_PACKAGE() {
|
|
if [ "$#" = "0" ]; then
|
|
echo "Usage :"
|
|
echo "bppackage <ppa (optional)> <.dsc url> "
|
|
echo "exemple : bppackage ppa:virtubox/backports http://url.dsc"
|
|
fi
|
|
local RANDOM
|
|
RANDOM=$(date +%s | sha256sum | base64 | head -c 4)
|
|
if [ "$PWD" = "/root" ]; then
|
|
mkdir -p "backport-$RANDOM"
|
|
cd "backport-$RANDOM" || exit 1
|
|
fi
|
|
if [ "$#" = "1" ]; then
|
|
backportpackage -r -w . -d bionic -u ppa:virtubox/backports -kE3CC41E7F354756B94A7DF4322EB296C97BAD476 "$1"
|
|
elif [ "$#" = "2" ]; then
|
|
backportpackage -r -w . -d bionic -u "$1" -kE3CC41E7F354756B94A7DF4322EB296C97BAD476 "$2"
|
|
fi
|
|
}
|
|
|
|
_WO_FULL_UPGRADE() {
|
|
[ ! -f "$HOME/.gitconfig" ] && {
|
|
sudo bash -c 'echo -e "[user]\n\tname = $USER\n\temail = root@wops.cc" > $HOME/.gitconfig'
|
|
}
|
|
rm -f wo
|
|
wget -qO wo https://raw.githubusercontent.com/WordOps/WordOps/updating-configuration/install && sudo bash wo -b updating-configuration
|
|
|
|
}
|
|
|
|
_SPEEDTEST() {
|
|
if ! command_exists speedtest; then
|
|
DEB_DISTRO=$(lsb_release -sc)
|
|
apt-get install gnupg1 apt-transport-https dirmngr -y
|
|
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 379CE192D401AB61
|
|
echo "deb https://ookla.bintray.com/debian $DEB_DISTRO main" | sudo tee /etc/apt/sources.list.d/speedtest.list
|
|
apt-get update
|
|
apt-get install speedtest -y
|
|
fi
|
|
speedtest
|
|
}
|
|
|
|
_INSTALL_WPCLI() {
|
|
curl -O https://raw.githubusercontent.com/wp-cli/builds/gh-pages/phar/wp-cli.phar
|
|
chmod +x wp-cli.phar
|
|
$NEED_SUDO mv wp-cli.phar /usr/local/bin/wp
|
|
wget -O /etc/bash_completion.d/wp-completion.bash https://raw.githubusercontent.com/wp-cli/wp-cli/master/utils/wp-completion.bash
|
|
}
|
|
|
|
_SETUP_CONTROLLER() {
|
|
if [ "$#" -eq 0 ]; then
|
|
echo "Usage : SSETUP <ppa-name>"
|
|
echo " PPA : "
|
|
echo " - nanorc : --nanorc"
|
|
echo " - wpcli : --wpcli"
|
|
else
|
|
INSTALL=""
|
|
while [ "$#" -gt 0 ]; do
|
|
case "$1" in
|
|
--nanorc)
|
|
INSTALL="$INSTALL _INSTALL_NANORC"
|
|
;;
|
|
--wpcli)
|
|
INSTALL="$INSTALL _INSTALL_WPCLI"
|
|
;;
|
|
*) ;;
|
|
esac
|
|
shift
|
|
done
|
|
if [ -n "$INSTALL" ]; then
|
|
for APP in $INSTALL; do
|
|
$NEED_SUDO $APP
|
|
done
|
|
fi
|
|
fi
|
|
|
|
}
|
|
|
|
_ZABBIX_UPDATE() {
|
|
local distro
|
|
if ! command_exists lsb_release; then
|
|
apt install lsb-release -y >/dev/null
|
|
fi
|
|
distro="$(lsb_release -si)"
|
|
wget -O /tmp/zabbix.deb https://repo.zabbix.com/zabbix/4.4/"${distro,,}"/pool/main/z/zabbix-release/zabbix-release_4.4-1+"$(lsb_release -sc)"_all.deb
|
|
dpkg -i /tmp/zabbix.deb
|
|
rm -f /tmp/zabbix.deb
|
|
}
|
|
|
|
_BACK_FILE() {
|
|
if [ ! -f "$1.bak" ]; then
|
|
cp -f "$1" "$1.bak"
|
|
else
|
|
cp -f "$1" "$1_1.bak"
|
|
fi
|
|
}
|
|
|
|
_URBACKUP_CLIENT() {
|
|
TF=$(mktemp) && wget "https://hndl.urbackup.org/Client/2.4.9/UrBackup%20Client%20Linux%202.4.9.sh" -O "$TF" && sudo sh "$TF"
|
|
rm -f "$TF"
|
|
sed -i 's/RESTORE=disabled/RESTORE="server-confirms"/' /etc/default/urbackupclient
|
|
wget -O "$HOME/mysqldump.sh" vtb.cx/mysqldump
|
|
chmod +x "$HOME/mysqldump.sh"
|
|
if command_exists ufw; then
|
|
ufw allow from 144.76.159.118 to any port 35621
|
|
ufw allow from 144.76.159.118 to any port 35622
|
|
ufw allow from 144.76.159.118 to any port 35623
|
|
ufw reload
|
|
fi
|
|
}
|
|
|
|
_DELETE_WO_SITES() {
|
|
sites=$(wo site list 2>&1)
|
|
for site in $sites; do
|
|
echo -ne " deleting $site [..]\r"
|
|
if {
|
|
wo site delete $site --force
|
|
}; then
|
|
echo -e '\t[OK]'
|
|
fi
|
|
done
|
|
}
|
|
|
|
##################################
|
|
# ffmpeg
|
|
##################################
|
|
|
|
ffmpeg_start_time() {
|
|
|
|
if [ "$#" = "0" ]; then
|
|
echo "ffmpeg-cut-start <start-time> <input> <duration(optional)>"
|
|
echo ""
|
|
echo "Example : ffmpeg-cut-start 00:34 file.mp4 300"
|
|
echo ""
|
|
echo "----------------------------------------------------------"
|
|
else
|
|
if [ -n "$3" ]; then
|
|
bash -c "ffmpeg -ss '$1' -t '$3' -y -i '$2' -c copy -movflags +faststart '${2%.mp4}-output.mp4';"
|
|
else
|
|
bash -c "ffmpeg -ss '$1' -y -i '$2' -c copy -movflags +faststart '${2%.mp4}-output.mp4';"
|
|
fi
|
|
fi
|
|
}
|
|
|
|
_STABILISE_VIDEO() {
|
|
|
|
for i in "$PWD"/*; do
|
|
echo "Processing vid $i ...${i%.*}"
|
|
ffmpeg -i "$i" -vf vidstabdetect=stepsize=6:shakiness=5:accuracy=15:result=/tmp/transform_vectors.trf -f null -
|
|
ffmpeg -i "$i" -vf vidstabtransform=input=/tmp/transform_vectors.trf:zoom=2:smoothing=10,unsharp=5:5:0.8:3:3:0.4 -c:v libx264 -preset slow -crf 18 -c:a copy -pix_fmt yuv420p -movflags +faststart "${i%.*}-stable.mp4"
|
|
rm /tmp/transform_vectors.trf
|
|
rm -f "$i"
|
|
done
|
|
|
|
}
|
|
|
|
_MP4_CONVERT() {
|
|
|
|
for i in "$PWD"/*; do
|
|
echo "Processing vid $i ...${i%.*}"
|
|
ffmpeg -i "$i" -c:v libx264 -preset slow -crf 18 -c:a aac -b:a 192k -pix_fmt yuv420p -movflags +faststart "${i%.*}-converted.mp4"
|
|
rm -f "$i"
|
|
done
|
|
|
|
}
|
|
|
|
_YOUTUBE_ENCODE() {
|
|
for i in "$PWD"/*; do
|
|
ffmpeg -i "$i" -c:v libx264 -preset slow -crf 18 -c:a aac -b:a 192k -pix_fmt yuv420p -movflags +faststart "${i%.*}-yt.mp4"
|
|
done
|
|
}
|
|
|
|
_MERGE_VIDEO() {
|
|
rm -f mylist
|
|
if [ "$#" -eq 0 ]; then
|
|
for i in "$PWD"/*; do
|
|
echo "file '$i'" >>mylist.txt
|
|
done
|
|
else
|
|
videos="$*"
|
|
for video in $videos; do
|
|
echo "file '$video'" >>mylist.txt
|
|
done
|
|
fi
|
|
ffmpeg -f concat -safe 0 -i mylist.txt -c copy "output-merged.mp4"
|
|
rm -f mylist.txt
|
|
}
|
|
|
|
_TRANSFER_AUDIO() {
|
|
if [ "$#" -eq 0 ]; then
|
|
echo "Usage : ffmpeg-audio <file-with-audio> <file-without-audio>"
|
|
else
|
|
rm -f "${1%.*}.aac"
|
|
ffmpeg -i "$1" -vn -acodec copy "${1%.*}.aac"
|
|
if [ -f "${1%.*}.aac" ]; then
|
|
rm -f "${2%.*}-sound.mp4"
|
|
ffmpeg -i "$2" -i "${1%.*}.aac" -map 0:v -map 1:a -c copy -y "${2%.*}-sound.mp4"
|
|
fi
|
|
fi
|
|
}
|
|
|
|
_x265_encode() {
|
|
if [ "$#" -eq 0 ]; then
|
|
echo "Usage : ffmpeg-x265 <file> <options>"
|
|
echo "Options :"
|
|
echo " -d : process all files in the current directory"
|
|
elif [ "$#" -eq 1 ]; then
|
|
if [ "$1" = "-d" ]; then
|
|
for i in "$PWD"/*; do
|
|
ffmpeg -i "$i" -c:v libx265 -crf 28 -c:a aac -b:a 160k "${i%.*}-x265.mp4"
|
|
done
|
|
else
|
|
if [ -f "$1" ]; then
|
|
ffmpeg -i "$1" -c:v libx265 -crf 28 -c:a aac -b:a 160k "${1%.*}-x265.mp4"
|
|
fi
|
|
fi
|
|
fi
|
|
}
|
|
|
|
_identify_resolution() {
|
|
|
|
for i in "$PWD"/*.mp4; do
|
|
width=$(ffprobe -v error -print_format json -show_format -show_streams -show_chapters "$i" | jq .streams[0] | jq -r .width)
|
|
mv "$i" "${i%.*}-$width.mp4"
|
|
done
|
|
|
|
}
|
|
|
|
# enable color support of ls and also add handy aliases
|
|
# some more ls aliases
|
|
#alias wp='/usr/bin/wp --allow-root'
|
|
alias .....="cd ../../../.."
|
|
alias ....="cd ../../.."
|
|
alias ...="cd ../.."
|
|
alias ..="cd .."
|
|
alias allservices='service --status-all'
|
|
alias apt-bionic-kernel=_APT_BIONIC_KERNEL
|
|
alias apt-repo-ubuntu=_APT_REPO_UBUNTU
|
|
alias aptremove='apt-get autoremove -y --purge'
|
|
alias arsync_hard='rsync -rLptgoD --human-readable --progress'
|
|
alias arsync='rsync -avz -h --progress'
|
|
alias bppackage=_BACKPORT_PACKAGE
|
|
alias cheat=CHEAT_CHECK
|
|
alias commit=GIT_COMMIT
|
|
alias dd-benchmark=DD_BENCHMARK_DISK
|
|
alias dir='dir --color=auto'
|
|
alias docker-setup=EE_DOCKER_SETUP
|
|
alias duplicate-finder=_find_duplicates
|
|
alias ee-bashrc-update=update_git_mybashrc
|
|
alias ee-ls=LIST_BY_SIZE
|
|
alias ee-mysql-optimize=EE_MYSQL_OPTIMIZATION
|
|
alias ee-syslog='tail -n 250 /var/log/syslog | ccze -A'
|
|
alias egrep='egrep --color=auto'
|
|
alias ffmpeg-cut-start=ffmpeg_start_time
|
|
alias ffmpeg-merge=_MERGE_VIDEO
|
|
alias ffmpeg-audio=_TRANSFER_AUDIO
|
|
alias ffmpeg-x265=_x265_encode
|
|
alias ffmpeg-resolution=_identify_resolution
|
|
alias ftpadd=_FTP_ADD
|
|
alias fgrep='fgrep --color=auto'
|
|
alias gg="ping google.fr"
|
|
alias gpg-crypt=encrypt_gpg
|
|
alias gpg-decrypt=decrypt_gpg
|
|
alias gpigz=compress_pigz
|
|
alias grep='grep --color=auto'
|
|
alias gunpigz=decompress_pigz
|
|
alias install-nanorc=_INSTALL_NANORC
|
|
alias IPPA=_PPA_INSTALL
|
|
alias l='ls -CF'
|
|
alias la='ls -A'
|
|
alias ld='du -sh ./* | sort -h'
|
|
alias lh="stat -c '%A %a %n' ./*"
|
|
alias ll='ls -alhF'
|
|
alias ls='ls --color=auto'
|
|
alias lxclogin=_LXC_LOGIN
|
|
alias lxclaunch=_LXC_LAUNCH
|
|
alias magento-upgrade=MAGENTO_UPGRADE
|
|
alias maintenance=MAINTENANCE_APT
|
|
alias nano='nano -E'
|
|
alias netdata-fix=_FIX_NETDATA
|
|
alias nginx-ee=_NGINX_EE
|
|
alias npm-update=_UPDATE_NPM
|
|
alias passwdgen=_PWGEN
|
|
alias pip='python3 -m pip'
|
|
alias random-commit=RANDOM_GIT_COMMIT
|
|
alias rsyslog-ufw=_RSYSLOG_UFW
|
|
alias setdirperm=_PERM_FOLDER
|
|
alias setfileperm=_PERM_FILES
|
|
alias setup-nodejs=_INSTALL_NODEJS
|
|
alias setup-tools=_SETUP_TOOLS
|
|
alias show-fpm-process=EE_SHOW_FPM
|
|
alias showlog=EE_SHOW_LOG
|
|
alias sitespeed=_SITESPEED
|
|
alias ssh-secure=_SSH_SECURE
|
|
alias start-ssh=_START_SSH_AGENT
|
|
alias sysinfo=_SYSINFO
|
|
alias tinc-down=SET_TINC_DOWN
|
|
alias tinc-up=SET_TINC_UP
|
|
alias transfer=transfer_vtbox_net
|
|
alias ufw-minimal=_UFW_MINIMAL
|
|
alias vb-virtualenv=_PYTHON_VIRTUALENV
|
|
alias vdir='vdir --color=auto'
|
|
alias wo-virtualenv=_PYTHON_VIRTUALENV
|
|
alias wp-fix-perms=_WP_PERMISSIONS
|
|
alias upgrade-wo-full=_WO_FULL_UPGRADE
|
|
alias speedtestnet="_run _SPEEDTEST 'Installing speedtest'"
|
|
alias SSETUP=_SETUP_CONTROLLER
|
|
alias journalctfail='journalctl -b -p err -S "yesterday"'
|
|
alias zabbix-update=_ZABBIX_UPDATE
|
|
alias bak=_BACK_FILE
|
|
alias update-wo-fast='python3 -m pip install -I "git+git://github.com/WordOps/WordOps.git@updating-configuration#egg=wordops"'
|
|
alias clone-wordops='git clone https://github.com/WordOps/WordOps.git -b updating-configuration'
|
|
alias urbackup-client-setup=_URBACKUP_CLIENT
|
|
alias delete-all-site=_DELETE_WO_SITES
|
|
alias stabilise-video=_STABILISE_VIDEO
|
|
alias yt-encode=_YOUTUBE_ENCODE
|
|
alias mp4convert=_MP4_CONVERT
|
|
alias suroot='sudo su - root'
|
|
alias apt-playbook='ansible-playbook /etc/ansible/playbooks/include/update-apt-servers.yml'
|
|
alias full-upgrade-playbook='ansible-playbook /etc/ansible/playbooks/update-all-servers.yml'
|
|
alias ssh-key-convert='ssh-keygen -i -f'
|
|
alias gzstd=compress_zstd
|
|
alias plesk-upgrade='plesk installer --select-release-latest --upgrade-installed-components'
|