stl-statilque-litterateur/var/lib/stl/scripts/manage__db

340 lines
8.5 KiB
Plaintext
Raw Normal View History

2022-07-20 19:37:21 +02:00
#!/bin/bash
# Name: Statique Littérateur
# Type: Database manager
# file: manage__db
# Folder: /var/lib/stl/scripts/
# By echolib (XMPP: im@echolib.re)
# License: GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007
2022-08-23 16:48:33 +02:00
#------------
# funny stats
#------------
2022-08-24 12:38:38 +02:00
# lines: 339
2022-08-23 16:48:33 +02:00
# functions: 7
#-----------------------------------------------------------------------
#**********************************************************************
2022-07-20 19:37:21 +02:00
#======================================================================
# argument: db
#======================================================================
db__OPTIONS() {
case "$2" in
*".stl")
unset article_log
article__hash "$2"
if ! [[ -f "$domain_db_articles/$article_db" ]];then
echo "! No database yet for this article"
else
cat "$domain_db_articles/$article_db"
fi
2022-07-20 19:37:21 +02:00
exit
;;
2022-07-20 19:37:21 +02:00
''|list)
db__list
;;
2022-08-24 12:38:38 +02:00
sidebar)
db_sidebar=true
db__list
;;
2022-07-20 19:37:21 +02:00
*)
noarg "$2"
;;
esac
}
#======================================================================
# - Check if db exists for this article
# - Source database
# - Compare article hash and chk one in db
# $1: article_db (ID from uri_article)
#======================================================================
if__article_db() {
db_file="$domain_db_articles/$1"
if [[ -f "$db_file" ]];then
source "$db_file"
compare__db
2022-08-23 16:48:33 +02:00
db_exists=true
2022-07-20 19:37:21 +02:00
fi
}
2022-08-14 02:35:27 +02:00
#======================================================================
# Compare article from DB to know if check is needed
#======================================================================
2022-07-20 19:37:21 +02:00
compare__db() {
2022-08-23 16:48:33 +02:00
[[ $article_hash != $article_chk_hash ]] \
2022-08-14 02:35:27 +02:00
&& need_check=true
2022-08-23 16:48:33 +02:00
[[ $article_wip_hash ]] && \
[[ $article_chk_hash != $article_wip_hash ]] \
&& old_wip=true \
&& sync_mess="! $this_article is newer (WIP is older)"
if [[ $stl_install == "server" && $article_wip_hash ]] && \
[[ $article_wip_hash == $article_www_hash ]] && \
[[ -f $domain_dir_www$article_uri_srv ]];then
www_is_wip=true
sync_mess="! $this_article is already up to date online"
# Need to also check each "external" article's files
fi
[[ $article_error == 'yes' ]] \
&& check_force=true
2022-07-20 19:37:21 +02:00
}
#======================================================================
# Print article datas in database
# db_file is URI/$article_db (from article_id)
# $1: $db_file
#======================================================================
db__print() {
2022-08-23 16:48:33 +02:00
! [[ -f "$db_file" ]] \
&& log_db_written="New Datas written" \
|| log_db_written="Datas updated"
2022-07-20 19:37:21 +02:00
2022-08-14 02:35:27 +02:00
# Set some variables
article_page_srv=${article_name/.stl/.html}
article_uri_srv="$article_dir_srv/$article_page_srv"
2022-08-23 16:48:33 +02:00
article_uri_src="$article_dir_srv/$article_name"
2022-08-14 02:35:27 +02:00
[[ $stl_error ]] \
&& article_error='yes' \
|| article_error='no'
2022-08-14 02:35:27 +02:00
2022-07-20 19:37:21 +02:00
cat <<EODBPRINT > "$db_file"
# Database
database_id=$article_id
database_file='$article_id.db'
database_uri='$db_file'
# File metas
2022-08-23 16:48:33 +02:00
article_name='$article_name'
2022-07-20 19:37:21 +02:00
article_size=$article_size
2022-08-23 16:48:33 +02:00
# URI file
article_uri='$uri_article'
article_uri_src='$article_uri_src'
2022-07-20 19:37:21 +02:00
# URI Server
article_dir_srv='$article_dir_srv'
2022-08-14 02:35:27 +02:00
article_page_srv='$article_page_srv'
article_uri_srv='$article_uri_srv'
2022-08-13 23:52:30 +02:00
sub_genuri_srv='$sub_genuri_srv'
2022-07-20 19:37:21 +02:00
# Statuses
2022-08-14 02:35:27 +02:00
article_error='$article_error'
2022-07-20 19:37:21 +02:00
article_chk_hash=$article_hash
article_wip_hash=$article_wip_hash
$(
[[ "$stl_install" == "server" ]] \
&& echo "article_www_hash=$article_www_hash"
)
2022-08-23 16:48:33 +02:00
# Sidebar
2022-07-20 19:37:21 +02:00
sidebar_position=$sidebar_position
# Article Metas
article_date='$article_Date'
article_timestamp=$date_epoch
article_author="$article_Author"
article_title="$article_Title"
article_about="$article_About"
article_tags="$article_Tags"
# Include files
$(
cat "$article_tmp_db"
)
# Statistics
article_words=$article_words
2022-07-20 19:37:21 +02:00
article_titles=$stat_titles
article_paragraphs=$stat_paragraphs
article_links=$stat_links
2022-07-20 19:37:21 +02:00
article_quotes=$stat_quotes
article_lists=$stat_lists
article_bolds=$article_bolds
article_strongs=$article_strongs
article_emphasis=$article_emphasis
article_icodes=$article_icodes
article_cross=$article_cross
article_dels=$article_dels
files_images=$stat_images
files_links=$stat_link_files
files_codes=$stat_codes
files_bruts=$stat_bruts
EODBPRINT
2022-08-23 16:48:33 +02:00
this_article="$db_file"
log__add -i -db -W \
"'$article_Title'. $log_db_written"
2022-07-20 19:37:21 +02:00
}
#======================================================================
# Print all databases in a table with article datas
#======================================================================
db__list() {
domain__get
db_tmp_list=`mktemp`
[[ "$stl_install" == "server" ]] \
2022-08-24 12:38:38 +02:00
&& echo "0|ID|TITLE|ERROR|DATE|AUTHOR|CHECK|WIP|SIDEBAR|WWW" \
2022-07-20 19:37:21 +02:00
>> "$db_tmp_list" \
2022-08-24 12:38:38 +02:00
|| echo "0|ID|TITLE|ERROR|DATE|AUTHOR|CHECK|WIP|SIDEBAR" \
2022-07-20 19:37:21 +02:00
>> "$db_tmp_list"
while read -r "db_file"
do
2022-08-24 12:38:38 +02:00
2022-07-20 19:37:21 +02:00
source "$db_file"
2022-08-24 12:38:38 +02:00
if [[ $db_sidebar ]];then
if ! [[ $sidebar_position ]];then
continue
else
db_nbr=$sidebar_position
fi
else
((db_nbr++))
fi
2022-07-20 19:37:21 +02:00
[[ $article_chk_hash != $article_wip_hash ]] \
&& wip_status="${CY}$article_wip_hash${NC}" \
|| wip_status="$article_wip_hash"
[[ $article_error == "yes" ]] \
&& err_status="${CR}$article_error${NC}" \
|| err_status="$article_error"
printf '%s%s%s%b%s%s%s%b%s' \
"$db_nbr|" \
"$database_id|" \
"$article_title|" \
"$err_status|" \
"$article_date|" \
"$article_author|" \
"$article_chk_hash|" \
"$wip_status|" \
"$sidebar_position|" \
>> "$db_tmp_list"
if [[ "$stl_install" == "server" ]];then
printf '%s%s\n' \
"$article_www_hash" \
>> "$db_tmp_list"
fi
done < <(ls -1 "$domain_db_articles/"*.db 2>/dev/null)
2022-08-24 12:38:38 +02:00
if ! (( `cat "$db_tmp_list" | wc -l` > 1 ));then
exit
2022-07-20 19:37:21 +02:00
2022-08-24 12:38:38 +02:00
else
if [[ $db_sidebar ]];then
cat "$db_tmp_list" | column -t -s'|' -o' ' | sort -k1
else
cat "$db_tmp_list" | column -t -s'|' -o' '
fi
rm -f "$db_rmp_list"
fi
2022-07-20 19:37:21 +02:00
exit
}
2022-07-21 16:42:49 +02:00
2022-07-21 16:42:49 +02:00
#======================================================================
# Get articles from database, with wip status
# Generic function to :
2022-08-23 16:48:33 +02:00
# - "set_www_hash" for stl publish all
# - "stats" create stats
2022-07-21 16:42:49 +02:00
# $1: set_www_hash
#======================================================================
db__get_wip_articles() {
2022-08-23 16:48:33 +02:00
while IFS=: read -r 'db_file' 'article_wip'
2022-07-21 16:42:49 +02:00
do
[[ `awk -F= '{print $2}' <<<"$article_wip"` -gt 0 ]] || continue
2022-08-23 16:48:33 +02:00
source "$db_file"
this_article="$article_uri_src"
2022-07-21 16:42:49 +02:00
case "$1" in
# From sync_wip_to_www() ; manage__HTML
2022-07-21 16:42:49 +02:00
set_www_hash)
2022-08-23 16:48:33 +02:00
db__srv_status_hash WWW Set
2022-07-21 16:42:49 +02:00
;;
# Generate statistics
stats)
case "$stl_install" in
server)
[[ $article_www_hash -gt 0 ]] || continue
((stat_articles_www++))
article_words_www=$((article_words + article_words_www))
article_titles_www=$((article_titles + article_titles_www))
article_paragraphs_www=$((article_paragraphs + article_paragraphs_www))
article_links_www=$(( article_links + article_links_www))
article_quotes_www=$((article_quotes + article_quotes_www))
article_lists_www=$((article_lists + article_lists_www))
article_bolds_www=$((article_bolds + article_bolds_www))
article_strongs_www=$((article_strongs + article_strongs_www))
article_emphasis_www=$((article_emphasis + article_emphasis_www))
article_icodes_www=$((article_icodes+ article_icodes_www))
article_cross_www=$((article_cross + article_cross_www))
article_dels_www=$((article_dels + article_dels_www))
files_images_www=$((files_images + files_images_www))
files_links_www=$((files_links + files_links_www))
files_codes_www=$((files_codes + files_codes_www))
files_bruts_www=$((files_bruts + files_bruts_www))
;;
local)
((stat_articles_wip++))
;;
esac
;;
2022-07-21 16:42:49 +02:00
esac
2022-07-21 16:42:49 +02:00
done < <(grep -H "article_wip_hash" "$domain_db_articles/"*.db)
}
2022-08-23 16:48:33 +02:00
#======================================================================
# Set hash wip or www status
# $1: wip,www
# $2: Set, Unset
#======================================================================
db__srv_status_hash() {
[[ "$2" == "Unset" ]] \
&& unset article_hash article_wip_hash
case "$1" in
WIP)
log_hash=" $article_hash"
sed -i "s^article_wip_hash=.*^article_wip_hash=$article_hash^" \
"$db_file"
;;
WWW)
log_hash=" $article_wip_hash"
sed -i "s^article_www_hash=.*^article_www_hash=$article_wip_hash^" \
"$db_file"
;;
esac
# Log
log__add -i -db -A "$2 $1 status: $article_title.$log_hash"
}