stl-statilque-litterateur/var/lib/stl/scripts/manage__publish
2022-08-23 16:48:33 +02:00

239 lines
6.1 KiB
Bash

#!/bin/bash
# Name: Statique Littérateur
# Type: Manage sync from wip to www
# file: manage__publish
# Folder: /var/lib/stl/scripts/
# By echolib (XMPP: im@echolib.re)
# License: GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007
#------------
# funny stats
#------------
# lines: 239
# functions: 5
#-----------------------------------------------------------------------
#**********************************************************************
#======================================================================
# Manage argument sync
# if article, sync only it,
# else:
# Get all wip status from all DB articles, and do sync
#======================================================================
publish__OPTIONS() {
domain__get
if [[ "$stl_install" == "local" ]];then
echo "! $stl_install Installation. Useless sync"
exit
fi
# Sync basis images, files, template folders from wip to www
sync__files "$domain_dir_wip_images/" \
"$domain_dir_www_images/" \
"WWW"
sync__files "$domain_dir_wip_files/" \
"$domain_dir_www_files/" \
"WWW"
sync__files "$domain_dir_wip_tpl/" \
"$domain_dir_www_tpl/" \
"WWW"
case "$2" in
all)
echo "! You are about to rsync --delete from wip/ to www/"
read -rp "- Are you sure (Y|*) ? " ask_publish
case "$ask_publish" in
Y|y)
echo
rsync -a --delete "$domain_dir_wip/" "$domain_dir_www/"
this_article="$domain_dir_wip/"
log__add -w -W -rs "To: $domain_dir_www/"
db__get_wip_articles "set_www_hash" # Create ALL www hash in DB
return
;;
*)
echo "# Maybe later..."
exit
;;
esac
;;
*".stl")
article__hash "$2"
if__article_db "$article_db"
if ! [[ $db_exists ]];then
echo "! $this_article must be checked first"
exit
elif ! [[ $article_wip_hash ]];then
echo "! $this_article must be converted first"
exit
elif [[ $old_wip ]] || [[ $www_is_wip ]];then
echo "$sync_mess"
read -rp "- Sync it online (Y|*) ? " sync_ask
case "$sync_ask" in
Y|y) true ;;
*) exit ;;
esac
fi
;;
*)
noarg "$2" "(ARTICLE), all"
;;
esac
# Start sync wip to www
get__article_files_sync WWW
# Write wip hash to DB
db__srv_status_hash WWW set
# RSS is created at publish command
# If local install, One has to use stl rss
create__RSS_feed && \
sync__files "$domain_dir_wip/rss.xml" "$domain_dir_www/rss.xml" "WWW"
# Statistics
if [[ $domain_stats == "yes" ]];then
stats__init
db__get_wip_articles "stats"
stats__statoolinfos
sync__files "$domain_dir_wip/stl-stats.properties" \
"$domain_dir_www/stl-stats.properties" \
"WWW"
fi
}
#======================================================================
# Called from
# - publish__OPTIONS()
# - create__HTML_page()
# Sync article files (.html, and included ones in article)
# $1: WIP, WWW
#======================================================================
get__article_files_sync() {
case "$1" in
WIP)
dir_source="$domain_dir_articles"
dir_target="$domain_dir_wip"
;;
WWW)
dir_source="$domain_dir_wip"
dir_target="$domain_dir_www"
# Copy html file from wip to www
if ! [[ "$article_dir_srv" ]];then
sync__files "$dir_source$article_uri_srv" \
"$dir_target$article_uri_srv" \
"$1"
else
sync__files "$dir_source$article_uri_srv" \
"$dir_target$article_uri_srv" \
"$1" \
"$dir_target$article_dir_srv"
fi
;;
esac
srv="$1"
# Add .stl source file to wip dir
sync__files "$dir_source$article_uri_src" \
"$dir_target$article_uri_src" \
"$1" \
"$dir_target$article_dir_srv/"
# Sync included files used by article
get__article_files_inluded
}
#======================================================================
# Search and get uri files used by article
# Sync, or remove thme
# Also called by remove__OPTIONS()
#======================================================================
get__article_files_inluded() {
images_nbr=`grep "Image_" "$db_file" | tail -n 1 | cut -c7-7`
if__article_file "Image_" "$images_nbr" "$srv"
files_nbr=`grep "File_" "$db_file" | tail -n 1 | cut -c6-6`
if__article_file "File_" "$files_nbr" "$srv"
}
#======================================================================
# Called from
# - get__article_file()
# - remove__OPTIONS()
# For each found file in article (image, file, code...)
# Rxecept for basis folders: sync or remove
# $1: type of file (var name)
# $2: number from type of file
# $3: WIP,WWW ($log_type)
#======================================================================
if__article_file() {
[[ "$2" ]] || return
for i in `seq 1 $2`
do
# Extract needed uri, removing root dir
file_uri=`grep "$1$i" $db_file | awk -F"'" '{print $2}'`
file_uri=`echo ${file_uri/$domain_dir_articles}`
uri_bsn=`basename "$file_uri"`
uri_dir=${file_uri/$uri_bsn}
# Useless sync if basis folders
if [[ "$file_uri" =~ ^"/images/" ]] || \
[[ "$file_uri" =~ ^"/files/" ]];then
continue
else
if [[ $wip_remove || $www_remove ]];then
if [[ -f "$dir_target$file_uri" ]];then
rm -f "$dir_target$file_uri"
this_article="$dir_target$file_uri"
log__add -w "$3" -rm \
"file from server"
fi
else
sync__files "$dir_source$file_uri" \
"$dir_target$file_uri" \
"$3" \
"$dir_target$uri_dir"
fi
fi
done
}
#======================================================================
# Sync Generic
# rsync and log if sync is needed, by checking diff
# $1: source
# $2: target
# $3: WIP,WWW ($log_type)
# $4: dirs (to create/check with mkdir -p)
#======================================================================
sync__files() {
[[ "$4" && ! -d "$4" ]] && mkdir -p "$4" # Create/check needed folders
diff -r -q $1 $2 &>/dev/null
(( $? == 0 )) && return
rsync -a --exclude ".*" --delete "$1" "$2"
this_article="$1"
log__add -i "$3" -rs "To: $2"
}