Compare commits
merge into: 0k:master
0k:0k/dev/master
0k:backup
0k:bgallet/nextcloud
0k:boris/smtp-extern
0k:charm-codimd-new
0k:cups_service_alpha
0k:dev
0k:dev1
0k:dhcp
0k:element
0k:etherpad-upd
0k:framadate
0k:get-version
0k:lokavaluto/dev/master
0k:master
0k:matomo
0k:new-mailhog-charms
0k:new-monujo-options
0k:nj-collabra-office
0k:nj-keycloak-17.0
0k:nj-organice-charm
0k:nj-vaulwarden-migrate
0k:ntfy-install
0k:odoo_fix_webhook_url
0k:postgres
0k:rsync-stats
0k:test
0k:upd-docker
0k:update-latest-synapse
0k:wip
pull from: 0k:rsync-stats
0k:0k/dev/master
0k:backup
0k:bgallet/nextcloud
0k:boris/smtp-extern
0k:charm-codimd-new
0k:cups_service_alpha
0k:dev
0k:dev1
0k:dhcp
0k:element
0k:etherpad-upd
0k:framadate
0k:get-version
0k:lokavaluto/dev/master
0k:master
0k:matomo
0k:new-mailhog-charms
0k:new-monujo-options
0k:nj-collabra-office
0k:nj-keycloak-17.0
0k:nj-organice-charm
0k:nj-vaulwarden-migrate
0k:ntfy-install
0k:odoo_fix_webhook_url
0k:postgres
0k:rsync-stats
0k:test
0k:upd-docker
0k:update-latest-synapse
0k:wip
3 Commits
master
...
rsync-stat
Author | SHA1 | Message | Date |
---|---|---|---|
Valentin Lab | 2fbfd0a8fa |
fix: dev: [logrotate] clean code !minor
|
3 weeks ago |
Valentin Lab | 08beea697d |
fix: [logrotate] rotate files with ``create`` command and numeric uid given
|
3 weeks ago |
Valentin Lab | c5b32875cc |
new: [rsync-backup-target] manage sqlite database of log chunks for stats
|
3 weeks ago |
12 changed files with 256 additions and 31 deletions
-
6bind/hooks/log_rotate-relation-joined
-
17logrotate/build/Dockerfile
-
7logrotate/build/src/entrypoint.sh
-
5odoo-tecnativa/hooks/init
-
6odoo-tecnativa/hooks/log_rotate-relation-joined
-
4odoo-tecnativa/hooks/postgres_database-relation-joined
-
10odoo-tecnativa/lib/common
-
17rsync-backup-target/build/Dockerfile
-
108rsync-backup-target/build/src/usr/local/sbin/import-log-chunks
-
75rsync-backup-target/hooks/init
-
24rsync-backup-target/hooks/log_rotate-relation-joined
-
8rsync-backup-target/metadata.yml
@ -1,13 +1,12 @@ |
|||
#!/bin/sh |
|||
|
|||
# Clean non existent log file entries from status file |
|||
cd /var/lib/logrotate |
|||
cd /var/lib/logrotate || return 1 |
|||
test -e status || touch status |
|||
head -1 status > status.clean |
|||
sed 's/"//g' status | while read logfile date |
|||
do |
|||
sed 's/"//g' status | while read -r logfile date; do |
|||
[ -e "$logfile" ] && echo "\"$logfile\" $date" |
|||
done >> status.clean |
|||
mv status.clean status |
|||
|
|||
/usr/sbin/logrotate -v -s /var/lib/logrotate/status /etc/logrotate.conf |
|||
exec /usr/sbin/logrotate -v -s /var/lib/logrotate/status /etc/logrotate.conf |
@ -0,0 +1,108 @@ |
|||
#!/bin/bash |
|||
|
|||
RSYNC_LOG_PATH="${RSYNC_LOG_PATH:-/var/log/rsync}" |
|||
RSYNC_DB_FILE="${RSYNC_DB_FILE:-$RSYNC_LOG_PATH/logchunks.db}" |
|||
RSYNC_FAILED_CHUNKS_PATH="${RSYNC_FAILED_CHUNKS_PATH:-$RSYNC_LOG_PATH/failed_chunks}" |
|||
|
|||
is_btrfs_subvolume() { |
|||
local dir=$1 |
|||
[ "$(stat -f --format="%T" "$dir")" == "btrfs" ] || return 1 |
|||
inode="$(stat --format="%i" "$dir")" |
|||
case "$inode" in |
|||
2|256) |
|||
return 0;; |
|||
*) |
|||
return 1;; |
|||
esac |
|||
} |
|||
|
|||
time_now() { date +%s.%3N; } |
|||
time_elapsed() { echo "scale=3; $2 - $1" | bc; } |
|||
|
|||
|
|||
|
|||
if ! [ -d "$RSYNC_LOG_PATH" ]; then |
|||
echo "Error: RSYNC_LOG_PATH is not a directory: $RSYNC_LOG_PATH" >&2 |
|||
exit 1 |
|||
fi |
|||
|
|||
if ! is_btrfs_subvolume "$RSYNC_LOG_PATH"; then |
|||
echo "Error: RSYNC_LOG_PATH is not a Btrfs subvolume: $RSYNC_LOG_PATH" >&2 |
|||
exit 1 |
|||
fi |
|||
|
|||
for cmd in btrfs logchunk; do |
|||
if ! type -p "$cmd" >/dev/null; then |
|||
echo "Error: $cmd command not found" >&2 |
|||
exit 1 |
|||
fi |
|||
done |
|||
|
|||
if ! [ -d "$RSYNC_FAILED_CHUNKS_PATH" ]; then |
|||
mkdir -p "$RSYNC_FAILED_CHUNKS_PATH" || { |
|||
echo "Error: Failed to create RSYNC_FAILED_CHUNKS_PATH directory: $RSYNC_FAILED_CHUNKS_PATH" >&2 |
|||
exit 1 |
|||
} |
|||
fi |
|||
|
|||
rsync_log_work_dir="${RSYNC_LOG_PATH}.logchunk" |
|||
if [ -e "$rsync_log_work_dir" ]; then |
|||
echo "Error: RSYNC_LOG_PATH work directory already exists: $rsync_log_work_dir" >&2 |
|||
exit 1 |
|||
fi |
|||
|
|||
btrfs subvolume snapshot -r "$RSYNC_LOG_PATH" "$rsync_log_work_dir" || { |
|||
echo "Error: Failed to create snapshot of RSYNC_LOG_PATH" >&2 |
|||
exit 1 |
|||
} |
|||
trap "btrfs subvolume delete '$rsync_log_work_dir'" EXIT |
|||
|
|||
start=$(time_now) |
|||
|
|||
for log_file in "$rsync_log_work_dir"/target_*_rsync.log; do |
|||
ident="${log_file##*/}" |
|||
ident="${ident#target_}" |
|||
ident="${ident%_rsync.log}" |
|||
errors=0 |
|||
chunks=0 |
|||
start_ident=$(time_now) |
|||
start_log_line="${start_ident%.*}" |
|||
echo "$ident:" |
|||
last_chunk_count=0 |
|||
last_error_count=0 |
|||
while true; do |
|||
logchunk next -c logchunk "$log_file" | |
|||
logchunk import "${RSYNC_DB_FILE}" "$ident" "$RSYNC_FAILED_CHUNKS_PATH" 2>&1 | |
|||
sed -r "s/^/ | /" |
|||
pipe_status=("${PIPESTATUS[@]}") |
|||
if [ "${pipe_status[0]}" == 1 ]; then |
|||
## no new chunks |
|||
break |
|||
fi |
|||
if [ "${pipe_status[0]}" == 127 ]; then |
|||
echo "Error: fatal !" >&2 |
|||
exit 1 |
|||
fi |
|||
errlvl="${pipe_status[1]}" |
|||
if [ "$errlvl" != 0 ]; then |
|||
errors=$((errors + 1)) |
|||
fi |
|||
chunks=$((chunks + 1)) |
|||
now=$(time_now) |
|||
now="${now%.*}" |
|||
if [ $((now - start_log_line)) -gt 15 ]; then |
|||
rate=$(echo "scale=2; ($chunks - $last_chunk_count) / ($now - $start_log_line)" | bc) |
|||
echo " |~ processed $((chunks - last_chunk_count)) chunks with $((errors - last_error_count)) errors ($rate chunks/s)" |
|||
start_log_line="$now" |
|||
last_chunk_count=$chunks |
|||
last_error_count=$errors |
|||
fi |
|||
done |
|||
if [ "$chunks" != 0 ]; then |
|||
elapsed_ident="$(time_elapsed "$start_ident" "$(time_now)")" || exit 1 |
|||
echo " .. processed $chunks chunks with $errors errors in ${elapsed_ident}s" |
|||
fi |
|||
done |
|||
|
|||
elapsed="$(time_elapsed "$start" "$(time_now)")" || exit 1 |
|||
echo "Processed all logs in ${elapsed}s" |
Write
Preview
Loading…
Cancel
Save
Reference in new issue