Clean up all scripts to match bash linting rules

pg_db_dump_file log parameter never set the log folder, it was always overwritten

pg_drop_restore had a double ;; inside which stopped the script from working

All: remove -a flag for Amazon Linux OS. This was only for V1 type and we do no longer support this (or use it) and V2 AWS Linux is based more on a redhat type, so the -r flag will work

Various other formatting updates

Change all strange ls to find calls

Change all exec calls with params to params in array
This commit is contained in:
2025-04-15 08:40:21 +09:00
parent 7d2f75366d
commit 806247996d
3 changed files with 341 additions and 259 deletions

View File

@@ -1,11 +1,15 @@
#!/bin/bash
# Author: Clemens Schwaighofer
# Description:
# Drop and restore databases from a folder for dump files created by pg_db_dump_file.sh
function usage ()
{
cat <<- EOT
Restores a list of database dumps from a folder to a database server
Usage: ${0##/*/} -f <dump folder> [-j <JOBS>] [-e <ENCODING>] [-h <HOST>] [-r|-a] [-g] [-n]
Usage: ${0##/*/} -f <dump folder> [-j <JOBS>] [-e <ENCODING>] [-h <HOST>] [-r] [-g] [-n]
-e <ENCODING>: override global encoding, will be overruled by per file encoding
-p <PORT>: override default port from file.
@@ -13,7 +17,6 @@ function usage ()
-f: dump folder source. Where the database dump files are located. This is a must set option
-j <JOBS>: Run how many jobs Parallel. If not set, 2 jobs are run parallel
-r: use redhat base paths instead of debian
-a: use amazon base paths instead of debian
-g: do not import globals file
-n: dry run, do not import or change anything
EOT
@@ -26,7 +29,6 @@ HOST='';
_encoding='UTF8';
set_encoding='';
REDHAT=0;
AMAZON=0;
IMPORT_GLOBALS=1;
TEMPLATEDB='template0'; # truly empty for restore
DUMP_FOLDER='';
@@ -35,8 +37,10 @@ BC='/usr/bin/bc';
PORT_REGEX="^[0-9]{4,5}$";
OPTARG_REGEX="^-";
DRY_RUN=0;
PG_PARAM_HOST=();
PG_PARAM_PORT=();
# options check
while getopts ":f:j:h:p:e:granm" opt; do
while getopts ":f:j:h:p:e:grnm" opt; do
# pre test for unfilled
if [ "${opt}" = ":" ] || [[ "${OPTARG-}" =~ ${OPTARG_REGEX} ]]; then
if [ "${opt}" = ":" ]; then
@@ -68,47 +72,53 @@ while getopts ":f:j:h:p:e:granm" opt; do
esac
fi;
case $opt in
f|file)
# f|file)
f)
DUMP_FOLDER=$OPTARG;
;;
j|jobs)
# j|jobs)
j)
MAX_JOBS=${OPTARG};
;;
e|encoding)
# e|encoding)
e)
if [ -z "$encoding" ]; then
encoding=$OPTARG;
fi;
;;
h|hostname)
# h|hostname)
h)
if [ -z "$host" ]; then
# do not set if local name (uses socket)
if [ "$OPTARG" != "local" ]; then
host='-h '$OPTARG;
PG_PARAM_HOST=("-h" "${OPTARG}");
fi;
_host=$OPTARG;
HOST=$OPRTARG;
fi;
;;
p|port)
# p|port)
p)
if [ -z "$port" ]; then
port='-p '$OPTARG;
PG_PARAM_PORT=("-p" "${OPTARG}");
_port=$OPTARG;
PORT=$OPTARG;
fi;
;;
g|globals)
# g|globals)
g)
IMPORT_GLOBALS=0;
;;
r|redhat)
# r|redhat)
r)
REDHAT=1;
;;
a|amazon)
AMAZON=1;
;;
n|dry-run)
# n|dry-run)
n)
DRY_RUN=1;
;;
m|help)
# m|help)
m)
usage;
exit 0;
;;
@@ -120,19 +130,16 @@ while getopts ":f:j:h:p:e:granm" opt; do
esac;
done;
if [ "$REDHAT" -eq 1 ] && [ "$AMAZON" -eq 1 ]; then
echo "You cannot set the -a and -r flag at the same time";
if [ "${ERROR}" -eq 1 ]; then
exit 0;
fi;
if [ "$REDHAT" -eq 1 ]; then
# Redhat base path (for non official ones would be '/usr/pgsql-'
DBPATH_BASE='/usr/pgsql-'
elif [ "$AMAZON" -eq 1 ]; then
# Amazon paths (lib64 default amazon package)
DBPATH_BASE='/usr/lib64/pgsql';
PG_BASE_PATH='/usr/pgsql-'
else
# Debian base path
DBPATH_BASE='/usr/lib/postgresql/';
PG_BASE_PATH='/usr/lib/postgresql/';
fi;
# check that the port is a valid number
@@ -145,16 +152,15 @@ NUMBER_REGEX="^[0-9]{1,}$";
# find the max allowed jobs based on the cpu count
# because setting more than this is not recommended
# so this fails in vmware hosts were we have random cpus assigned
cpu=$(cat /proc/cpuinfo | grep "processor" | wc -l);
_max_jobs=${cpu##*: };
_max_jobs=$(nproc --all);
# if the MAX_JOBS is not number or smaller 1 or greate _max_jobs
if [ ! -z "${MAX_JOBS}" ]; then
if [ -n "${MAX_JOBS}" ]; then
# check that it is a valid number
if [[ ! ${MAX_JOBS} =~ ${NUMBER_REGEX} ]]; then
echo "Please enter a number for the -j option";
exit 1;
fi;
if [ ${MAX_JOBS} -lt 1 ] || [ ${MAX_JOBS} -gt ${_max_jobs} ]; then
if [ "${MAX_JOBS}" -lt 1 ] || [ "${MAX_JOBS}" -gt "${_max_jobs}" ]; then
echo "The value for the jobs option -j cannot be smaller than 1 or bigger than ${_max_jobs}";
exit 1;
fi;
@@ -202,15 +208,15 @@ function convert_time
{
timestamp=${1};
# round to four digits for ms
timestamp=$(printf "%1.4f" $timestamp);
timestamp=$(printf "%1.4f" "$timestamp");
# get the ms part and remove any leading 0
ms=$(echo ${timestamp} | cut -d "." -f 2 | sed -e 's/^0*//');
timestamp=$(echo ${timestamp} | cut -d "." -f 1);
ms=$(echo "${timestamp}" | cut -d "." -f 2 | sed -e 's/^0*//');
timestamp=$(echo "${timestamp}" | cut -d "." -f 1);
timegroups=(86400 3600 60 1); # day, hour, min, sec
timenames=("d" "h" "m" "s"); # day, hour, min, sec
output=( );
time_string='';
for timeslice in ${timegroups[@]}; do
for timeslice in "${timegroups[@]}"; do
# floor for the division, push to output
if [ ${BC_OK} -eq 1 ]; then
output[${#output[*]}]=$(echo "${timestamp}/${timeslice}" | bc);
@@ -222,16 +228,18 @@ function convert_time
done;
for ((i=0; i<${#output[@]}; i++)); do
if [ ${output[$i]} -gt 0 ] || [ ! -z "$time_string" ]; then
if [ ! -z "${time_string}" ]; then
if [ "${output[$i]}" -gt 0 ] || [ -n "$time_string" ]; then
if [ -n "${time_string}" ]; then
time_string=${time_string}" ";
fi;
time_string=${time_string}${output[$i]}${timenames[$i]};
fi;
done;
if [ ! -z ${ms} ]; then
if [ ${ms} -gt 0 ]; then
time_string=${time_string}" "${ms}"ms";
# milliseconds must be filled, but we also check that they are non "nan" string
# that can appear in the original value
if [ -n "${ms}" ] && [ "${ms}" != "nan" ]; then
if [ "${ms}" -gt 0 ]; then
time_string="${time_string} ${ms}ms";
fi;
fi;
# just in case the time is 0
@@ -242,25 +250,22 @@ function convert_time
}
# default version (for folder)
DBPATH_VERSION='9.6/';
# if amazon remove "." from version
if [ "${AMAZON}" -eq 1 ]; then
DBPATH_VERSION=$(echo "${DBPATH_VERSION}" | sed -e 's/\.//');
fi;
DBPATH_BIN='bin/';
PG_PATH_VERSION='15/';
PG_PATH_BIN='bin/';
# postgresql binaries
DROPDB="dropdb";
CREATEDB="createdb";
CREATELANG="createlang";
PGRESTORE="pg_restore";
CREATEUSER="createuser";
PSQL="psql";
PG_DROPDB="dropdb";
PG_CREATEDB="createdb";
PG_CREATELANG="createlang";
PG_RESTORE="pg_restore";
PG_CREATEUSER="createuser";
PG_PSQL="psql";
# default port and host
EXCLUDE_LIST="pg_globals"; # space separated
LOGFILE="tee -a $LOGS/PG_RESTORE_DB_FILE.`date +"%Y%m%d_%H%M%S"`.log";
LOGFILE="tee -a "$LOGS/PG_RESTORE_DB_FILE.$(date +"%Y%m%d_%H%M%S").log"";
# get the count for DBs to import
db_count=`find $DUMP_FOLDER -name "*.sql" -print | wc -l`;
db_count=$(find "${DUMP_FOLDER}" -name "*.sql" -print | wc -l);
# start info
if [ "${DUMP_FOLDER}" = "." ]; then
_DUMP_FOLDER="[current folder]";
@@ -284,88 +289,94 @@ echo "= import logs: $LOGS" | $LOGFILE;
echo "" | $LOGFILE;
pos=1;
# go through all the files an import them into the database
MASTERSTART=`date +'%s'`;
master_start_time=`date +"%F %T"`;
MASTERSTART=$(date +"%s");
master_start_time=$(date +"%F %T");
# first import the pg_globals file if this is requested, default is yes
if [ "$IMPORT_GLOBALS" -eq 1 ]; then
start_time=`date +"%F %T"`;
START=`date +'%s'`;
start_time=$(date +"%F %T");
START=$(date +"%s");
# get the pg_globals file
echo "=[Globals Restore]=START=[$start_time]==================================================>" | $LOGFILE;
# get newest and only the first one
file=`ls -1t $DUMP_FOLDER/pg_global* | head -1`;
filename=`basename $file`;
# file=$(ls -1t "$DUMP_FOLDER/pg_global"* | head -1);
file=$(find "$DUMP_FOLDER" -name "pg_global*" -type f -printf "%Ts\t%p\n" | sort -nr | head -1);
filename=$(basename "$file");
# the last _ is for version 10 or higher
version=`echo $filename | cut -d "." -f 4 | cut -d "-" -f 2 | cut -d "_" -f 1`; # db version, without prefix of DB type
# db version, without prefix of DB type
version=$(echo "$filename" | cut -d "." -f 4 | cut -d "-" -f 2 | cut -d "_" -f 1);
# if this is < 10 then we need the second part too
if [ ${version} -lt 10 ]; then
version=$version'.'`echo $filename | cut -d "." -f 5 | cut -d "_" -f 1`; # db version, second part (after .)
if [ "${version}" -lt 10 ]; then
# db version, second part (after .)
version=$version'.'$(echo "$filename" | cut -d "." -f 5 | cut -d "_" -f 1);
fi;
# if amazon remove "." from version
if [ "${AMAZON}" -eq 1 ]; then
version=$(echo "${version}" | sed -e 's/\.//');
fi;
__host=`echo $filename | cut -d "." -f 5 | cut -d "_" -f 2`; # hostname of original DB, can be used as target host too
__port=`echo $filename | cut -d "." -f 5 | cut -d "_" -f 3`; # port of original DB, can be used as target port too
# hostname of original DB, can be used as target host too
__host=$(echo "$filename" | cut -d "." -f 5 | cut -d "_" -f 2);
# port of original DB, can be used as target port too
__port=$(echo "$filename" | cut -d "." -f 5 | cut -d "_" -f 3);
# override file port over given port if it differs and is valid
if [ -z $_port ] && [ "$__port" != $_port ] && [[ $__port =~ $PORT_REGEX ]] ; then
if [ -z "$_port" ] && [ "$__port" != "$_port" ] && [[ "$__port" =~ $PORT_REGEX ]] ; then
_port=$__port;
port='-p '$_port;
PG_PARAM_PORT=("-p" "$_port");
fi;
if [ -z "$_host" ] && [ "$__host" != "local" ]; then
_host=$__host;
host='-h '$_host;
PG_PARAM_HOST=("-h" "${_host}");
fi;
# create the path to the DB from the DB version in the backup file
if [ ! -z "$version" ]; then
DBPATH_VERSION_LOCAL=$version'/';
if [ -n "$version" ]; then
PG_PATH_VERSION_LOCAL="${version}/";
else
DBPATH_VERSION_LOCAL=$DBPATH_VERSION;
PG_PATH_VERSION_LOCAL="${PG_PATH_VERSION}";
fi;
DBPATH=$DBPATH_BASE$DBPATH_VERSION_LOCAL$DBPATH_BIN;
echo "+ Restore globals file: $filename to [$_host:$_port] @ `date +"%F %T"`" | $LOGFILE;
PG_PATH="${PG_BASE_PATH}${PG_PATH_VERSION_LOCAL}${PG_PATH_BIN}";
echo "+ Restore globals file: $filename to [$_host:$_port] @ $(date +"%F %T")" | $LOGFILE;
_PG_PARAMS=("-U" "postgres");
_PG_PARAMS+=("${PG_PARAM_HOST[@]}");
_PG_PARAMS+=("${PG_PARAM_PORT[@]}");
_PG_PARAMS+=("-f" "$file" "-e" "-q" "-X" "template1");
if [ ${DRY_RUN} -eq 0 ]; then
$DBPATH$PSQL -U postgres $host $port -f $file -e -q -X template1 | $LOGFILE;
"${PG_PATH}${PG_PSQL}" "${_PG_PARAMS[@]}" | $LOGFILE;
else
echo "$DBPATH$PSQL -U postgres $host $port -f $file -e -q -X template1" | $LOGFILE;
echo "${PG_PATH}${PG_PSQL} ${_PG_PARAMS[*]}" | $LOGFILE;
fi;
DURATION=$[ `date +'%s'`-$START ];
DURATION=$(($(date +"%s")-START));
printf "=[Globals Restore]=END===[%s]========================================================>\n" "$(convert_time ${DURATION})" | $LOGFILE;
fi;
for file in $DUMP_FOLDER/*.sql; do
start_time=`date +"%F %T"`;
START=`date +'%s'`;
for file in "$DUMP_FOLDER/"*.sql; do
start_time=$(date +"%F %T");
START=$(date +"%s");
echo "=[$pos/$db_count]=START=[$start_time]==================================================>" | $LOGFILE;
# the encoding
set_encoding='';
# get the filename
filename=`basename $file`;
filename=$(basename "$file");
# get the databse, user
# default file name is <database>.<owner>.<encoding>.<type>-<version>_<host>_<port>_<date>_<time>_<sequence>
database=`echo $filename | cut -d "." -f 1`;
owner=`echo $filename | cut -d "." -f 2`;
__encoding=`echo $filename | cut -d "." -f 3`;
database=$(echo "$filename" | cut -d "." -f 1);
owner=$(echo "$filename" | cut -d "." -f 2);
__encoding=$(echo "$filename" | cut -d "." -f 3);
# the last _ part if for version 10
version=`echo $filename | cut -d "." -f 4 | cut -d "-" -f 2 | cut -d "_" -f 1`; # db version, without prefix of DB type
# db version, without prefix of DB type
version=$(echo "$filename" | cut -d "." -f 4 | cut -d "-" -f 2 | cut -d "_" -f 1);
# if this is < 10 then we need the second part too
if [ ${version} -lt 10 ]; then
version=$version'.'`echo $filename | cut -d "." -f 5 | cut -d "_" -f 1`; # db version, second part (after .)
if [ "${version}" -lt 10 ]; then
# db version, second part (after .)
version=$version'.'$(echo "$filename" | cut -d "." -f 5 | cut -d "_" -f 1);
fi;
# if amazon remove "." from version
if [ "${AMAZON}" -eq 1 ]; then
version=$(echo "${version}" | sed -e 's/\.//');
fi;
__host=`echo $filename | cut -d "." -f 5 | cut -d "_" -f 2`; # hostname of original DB, can be used as target host too
__port=`echo $filename | cut -d "." -f 5 | cut -d "_" -f 3`; # port of original DB, can be used as target port too
other=`echo $filename | cut -d "." -f 5 | cut -d "_" -f 2-`; # backup date and time, plus sequence
# hostname of original DB, can be used as target host too
__host=$(echo "$filename" | cut -d "." -f 5 | cut -d "_" -f 2);
# port of original DB, can be used as target port too
__port=$(echo "$filename" | cut -d "." -f 5 | cut -d "_" -f 3);
# backup date and time, plus sequence
# other=$(echo "$filename" | cut -d "." -f 5 | cut -d "_" -f 2-);
# override file port over given port if it differs and is valid
if [ -z $_port ] && [ "$__port" != $_port ] && [[ $__port =~ $PORT_REGEX ]] ; then
if [ -z "$_port" ] && [ "$__port" != "$_port" ] && [[ "$__port" =~ $PORT_REGEX ]] ; then
_port=$__port;
port='-p '$_port;
PG_PARAM_PORT=("-p" "$_port");
fi;
if [ -z "$_host" ] && [ "$__host" != "local" ]; then
_host=$__host;
host='-h '$_host;
PG_PARAM_HOST=("-h" "${_host}");
fi;
# override encoding (dangerous)
# check if we have a master override
@@ -374,19 +385,19 @@ for file in $DUMP_FOLDER/*.sql; do
fi;
# if no override encoding set first from file, then from global
if [ ! "$set_encoding" ]; then
if [ ! -z "$__encoding" ]; then
if [ -n "$__encoding" ]; then
set_encoding=$__encoding;
else
set_encoding=$_encoding;
fi;
fi;
# create the path to the DB from the DB version in the backup file
if [ ! -z "$version" ]; then
DBPATH_VERSION_LOCAL=$version'/';
if [ -n "$version" ]; then
PG_PATH_VERSION_LOCAL="${version}/";
else
DBPATH_VERSION_LOCAL=$DBPATH_VERSION;
PG_PATH_VERSION_LOCAL="${PG_PATH_VERSION}";
fi;
DBPATH=$DBPATH_BASE$DBPATH_VERSION_LOCAL$DBPATH_BIN;
PG_PATH="${PG_BASE_PATH}${PG_PATH_VERSION_LOCAL}${PG_PATH_BIN}";
# check this is skip or not
exclude=0;
for exclude_db in $EXCLUDE_LIST; do
@@ -397,61 +408,90 @@ for file in $DUMP_FOLDER/*.sql; do
if [ $exclude -eq 0 ]; then
# create user if not exist yet
# check query for user
user_oid=`echo "SELECT oid FROM pg_roles WHERE rolname = '$owner';" | $PSQL -U postgres $host $port -A -F "," -t -q -X template1`;
if [ -z $user_oid ]; then
echo "+ Create USER '$owner' for DB '$database' [$_host:$_port] @ `date +"%F %T"`" | $LOGFILE;
# for all calls
_PG_PARAMS_ALL=("-U" "postgres");
_PG_PARAMS_ALL+=("${PG_PARAM_HOST[@]}");
_PG_PARAMS_ALL+=("${PG_PARAM_PORT[@]}");
# for the call
_PG_PARAMS=("${_PG_PARAMS_ALL[@]}");
_PG_PARAMS+=("-A" "-F" "," "-t" "-q" "-X" "-c" "SELECT oid FROM pg_roles WHERE rolname = '$owner';" "template1");
# user_oid=$(echo "SELECT oid FROM pg_roles WHERE rolname = '$owner';" | $PSQL -U postgres $host $port -A -F "," -t -q -X template1);
user_oid=$("$PG_PSQL" "${_PG_PARAMS[@]}");
if [ -z "$user_oid" ]; then
echo "+ Create USER '$owner' for DB '$database' [$_host:$_port] @ $(date +"%F %T")" | $LOGFILE;
_PG_PARAMS=("${_PG_PARAMS_ALL[@]}");
_PG_PARAMS+=("-D" "-R" "-S" "$owner");
if [ ${DRY_RUN} -eq 0 ]; then
$CREATEUSER -U postgres -D -R -S $host $port $owner;
# "${PG_PATH}${PG_CREATEUSER}" -U postgres -D -R -S $host $port $owner;
"${PG_PATH}${PG_CREATEUSER}" "${_PG_PARAMS[@]}";
else
echo "$CREATEUSER -U postgres -D -R -S $host $port $owner";
echo "${PG_PATH}${PG_CREATEUSER} ${_PG_PARAMS[*]}";
fi;
fi;
# before importing the data, drop this database
echo "- Drop DB '$database' [$_host:$_port] @ `date +"%F %T"`" | $LOGFILE;
echo "- Drop DB '$database' [$_host:$_port] @ $(date +"%F %T")" | $LOGFILE;
_PG_PARAMS=("${_PG_PARAMS_ALL[@]}");
_PG_PARAMS+=("$database");
if [ ${DRY_RUN} -eq 0 ]; then
$DBPATH$DROPDB -U postgres $host $port $database;
"${PG_PATH}${PG_DROPDB}" "${_PG_PARAMS[@]}";
else
echo "$DBPATH$DROPDB -U postgres $host $port $database";
echo "${PG_PATH}${PG_DROPDB} ${_PG_PARAMS[*]}";
fi;
echo "+ Create DB '$database' with '$owner' [$_host:$_port] @ `date +"%F %T"`" | $LOGFILE;
echo "+ Create DB '$database' with '$owner' [$_host:$_port] @ $(date +"%F %T")" | $LOGFILE;
_PG_PARAMS=("${_PG_PARAMS_ALL[@]}");
_PG_PARAMS+=("-O" "$owner" "-E" "$set_encoding" "-T" "$TEMPLATEDB" "$database");
if [ ${DRY_RUN} -eq 0 ]; then
$DBPATH$CREATEDB -U postgres -O $owner -E $set_encoding -T $TEMPLATEDB $host $port $database;
# "${PG_PATH}${PG_CREATEDB}" -U postgres -O $owner -E $set_encoding -T $TEMPLATEDB $host $port $database;
"${PG_PATH}${PG_CREATEDB}" "${_PG_PARAMS[@]}";
else
echo "$DBPATH$CREATEDB -U postgres -O $owner -E $set_encoding -T $TEMPLATEDB $host $port $database";
echo "${PG_PATH}${PG_CREATEDB} ${_PG_PARAMS[*]}";
fi;
if [ -f $DBPATH$CREATELANG ]; then
echo "+ Create plpgsql lang in DB '$database' [$_host:$_port] @ `date +"%F %T"`" | $LOGFILE;
if [ -f "${PG_PATH}${PG_CREATELANG}" ]; then
echo "+ Create plpgsql lang in DB '$database' [$_host:$_port] @ $(date +"%F %T")" | $LOGFILE;
_PG_PARAMS=("${_PG_PARAMS_ALL[@]}");
_PG_PARAMS+=("plpgsql" "$database");
if [ ${DRY_RUN} -eq 0 ]; then
$DBPATH$CREATELANG -U postgres plpgsql $host $port $database;
# "${PG_PATH}${PG_CREATELANG}" -U postgres plpgsql $host $port $database;
"${PG_PATH}${PG_CREATELANG}" "${_PG_PARAMS[@]}";
else
echo "$DBPATH$CREATELANG -U postgres plpgsql $host $port $database";
echo "${PG_PATH}${PG_CREATELANG} ${_PG_PARAMS[*]}";
fi;
fi;
echo "% Restore data from '$filename' to DB '$database' using $MAX_JOBS jobs [$_host:$_port] @ `date +"%F %T"`" | $LOGFILE;
echo "% Restore data from '$filename' to DB '$database' using $MAX_JOBS jobs [$_host:$_port] @ $(date +"%F %T")" | $LOGFILE;
_PG_PARAMS=("${_PG_PARAMS_ALL[@]}");
_PG_PARAMS+=("-d" "$database" "-F" "c" "-v" "-c" "-j" "$MAX_JOBS" "$file");
if [ ${DRY_RUN} -eq 0 ]; then
$DBPATH$PGRESTORE -U postgres -d $database -F c -v -c -j $MAX_JOBS $host $port $file 2>$LOGS'/errors.'$database'.'$(date +"%Y%m%d_%H%M%S".log);
# "${PG_PATH}${PG_RESTORE}" -U postgres -d $database -F c -v -c -j $MAX_JOBS $host $port $file 2>"$LOGS/errors.${database}.$(date +"%Y%m%d_%H%M%S").log";
"${PG_PATH}${PG_RESTORE}" "${_PG_PARAMS[@]}" 2>"$LOGS/errors.${database}.$(date +"%Y%m%d_%H%M%S").log";
else
echo "$DBPATH$PGRESTORE -U postgres -d $database -F c -v -c -j $MAX_JOBS $host $port $file 2>$LOGS'/errors.'$database'.'$(date +"%Y%m%d_%H%M%S".log)";
echo "${PG_PATH}${PG_RESTORE} ${_PG_PARAMS[*]} 2>${LOGS}/errors.${database}.$(date +"%Y%m%d_%H%M%S").log";
fi;
# BUG FIX FOR POSTGRESQL 9.6.2 db_dump
# it does not dump the default public ACL so the owner of the DB cannot access the data, check if the ACL dump is missing and do a basic restore
if [ -z "$($DBPATH$PGRESTORE -l $file | grep -- "ACL - public postgres")" ]; then
echo "? Fixing missing basic public schema ACLs from DB $database [$_host:$_port] @ `date +"%F %T"`";
# it does not dump the default public ACL so the owner of the DB cannot access the data,
# check if the ACL dump is missing and do a basic restore
if ! "$("${PG_PATH}${PG_RESTORE}" -l "$file" | grep -q -- "ACL - public postgres")"; then
echo "? Fixing missing basic public schema ACLs from DB $database [$_host:$_port] @ $(date +"%F %T")";
# echo "GRANT USAGE ON SCHEMA public TO public;" | "${PG_PATH}${PG_PSQL}" -U postgres -Atq $host $port $database;
# echo "GRANT CREATE ON SCHEMA public TO public;" | "${PG_PATH}${PG_PSQL}" -U postgres -Atq $host $port $database;
# grant usage on schema public to public;
_PG_PARAMS=("${PG_PARAMS[@]}");
_PG_PARAMS+=("-Atq" "-c" "GRANT USAGE ON SCHEMA public TO public;" "${database}");
"${PG_PSQL}" "${_PG_PARAMS[@]}";
# grant create on schema public to public;
echo "GRANT USAGE ON SCHEMA public TO public;" | $DBPATH$PSQL -U postgres -Atq $host $port $database;
echo "GRANT CREATE ON SCHEMA public TO public;" | $DBPATH$PSQL -U postgres -Atq $host $port $database;
_PG_PARAMS=("${PG_PARAMS[@]}");
_PG_PARAMS+=("-Atq" "-c" "GRANT CREATE ON SCHEMA public TO public;" "${database}");
"${PG_PSQL}" "${_PG_PARAMS[@]}";
fi;
echo "$ Restore of data '$filename' for DB '$database' [$_host:$_port] finished" | $LOGFILE;
DURATION=$[ `date +'%s'`-$START ];
echo "* Start at $start_time and end at `date +"%F %T"` and ran for $(convert_time ${DURATION}) seconds" | $LOGFILE;
DURATION=$(($(date "+%s")-START));
echo "* Start at $start_time and end at $(date +"%F %T") and ran for $(convert_time ${DURATION}) seconds" | $LOGFILE;
else
DURATION=0;
echo "# Skipped DB '$database'" | $LOGFILE;
fi;
printf "=[$pos/$db_count]=END===[%s]========================================================>\n" "$(convert_time ${DURATION})" | $LOGFILE;
pos=$[ $pos+1 ];
pos=$((pos+1));
done;
DURATION=$[ `date +'%s'`-$MASTERSTART ];
DURATION=$(($(date "+%s")-MASTERSTART));
echo "" | $LOGFILE;
echo "= Start at $master_start_time and end at `date +"%F %T"` and ran for $(convert_time ${DURATION}) seconds. Imported $db_count databases." | $LOGFILE;
echo "= Start at $master_start_time and end at $(date +"%F %T") and ran for $(convert_time ${DURATION}) seconds. Imported $db_count databases." | $LOGFILE;