stl-statilque-litterateur/var/lib/stl/scripts/manage__db

256 lines
6.5 KiB
Plaintext
Raw Normal View History

2022-07-20 19:37:21 +02:00
#!/bin/bash
# Name: Statique Littérateur
# Type: Database manager
# file: manage__db
# Folder: /var/lib/stl/scripts/
# By echolib (XMPP: im@echolib.re)
# License: GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007
#======================================================================
# argument: db
#======================================================================
db__OPTIONS() {
case "$2" in
*".stl")
unset article_log
article__hash "$2"
cat "$domain_db_articles/$article_db"
exit
;;
''|list)
db__list
;;
*)
noarg "$2"
;;
esac
}
#======================================================================
# - Check if db exists for this article
# - Source database
# - Compare article hash and chk one in db
# $1: article_db (ID from uri_article)
#======================================================================
if__article_db() {
db_file="$domain_db_articles/$1"
if [[ -f "$db_file" ]];then
source "$db_file"
compare__db
fi
}
# Compare article from DB
compare__db() {
if (( $article_hash == $article_chk_hash ));then
nocheck=true
else
need_check=true
fi
}
#======================================================================
# Print article datas in database
# db_file is URI/$article_db (from article_id)
# $1: $db_file
#======================================================================
db__print() {
if ! [[ -f "$db_file" ]];then
log__add -i -db $article_id \
"New, created"
else
log__add -i -db $article_id \
"Datas written"
fi
cat <<EODBPRINT > "$db_file"
# Database
database_id=$article_id
database_file='$article_id.db'
database_uri='$db_file'
# File metas
article_uri='$uri_article'
article_size=$article_size
# URI Server
article_dir_srv='$article_dir_srv'
article_page_srv='${article_name/.stl/.html}'
article_uri_srv='$article_dir_srv/$article_page_srv'
sub_genuri_srv='$suri'
# Statuses
$(
[[ $stl_error ]] \
&& echo "article_error='yes'" \
|| echo "article_error='no'"
)
article_chk_hash=$article_hash
article_wip_hash=$article_wip_hash
$(
[[ "$stl_install" == "server" ]] \
&& echo "article_www_hash=$article_www_hash"
)
sidebar_position=$sidebar_position
# Article Metas
article_date='$article_Date'
article_timestamp=$date_epoch
article_author="$article_Author"
article_title="$article_Title"
article_about="$article_About"
article_tags="$article_Tags"
# Include files
$(
cat "$article_tmp_db"
)
# Statistics
article_words=$article_words
2022-07-20 19:37:21 +02:00
article_titles=$stat_titles
article_paragraphs=$stat_paragraphs
article_links=$stat_links
2022-07-20 19:37:21 +02:00
article_quotes=$stat_quotes
article_lists=$stat_lists
article_bolds=$article_bolds
article_strongs=$article_strongs
article_emphasis=$article_emphasis
article_icodes=$article_icodes
article_cross=$article_cross
article_dels=$article_dels
files_images=$stat_images
files_links=$stat_link_files
files_codes=$stat_codes
files_bruts=$stat_bruts
EODBPRINT
}
#======================================================================
# Print all databases in a table with article datas
#======================================================================
db__list() {
domain__get
db_tmp_list=`mktemp`
[[ "$stl_install" == "server" ]] \
&& echo "NBR|ID|TITLE|ERROR|DATE|AUTHOR|CHECK|WIP|SIDEBAR|WWW" \
>> "$db_tmp_list" \
|| echo "NBR|ID|TITLE|ERROR|DATE|AUTHOR|CHECK|WIP|SIDEBAR" \
>> "$db_tmp_list"
while read -r "db_file"
do
((db_nbr++))
source "$db_file"
[[ $article_chk_hash != $article_wip_hash ]] \
&& wip_status="${CY}$article_wip_hash${NC}" \
|| wip_status="$article_wip_hash"
[[ $article_error == "yes" ]] \
&& err_status="${CR}$article_error${NC}" \
|| err_status="$article_error"
printf '%s%s%s%b%s%s%s%b%s' \
"$db_nbr|" \
"$database_id|" \
"$article_title|" \
"$err_status|" \
"$article_date|" \
"$article_author|" \
"$article_chk_hash|" \
"$wip_status|" \
"$sidebar_position|" \
>> "$db_tmp_list"
if [[ "$stl_install" == "server" ]];then
printf '%s%s\n' \
"$article_www_hash" \
>> "$db_tmp_list"
fi
done < <(ls -1 "$domain_db_articles/"*.db 2>/dev/null)
(( `cat "$db_tmp_list" | wc -l` > 1 )) \
&& cat "$db_tmp_list" | column -t -s'|' -o ' '
rm -f "$db_rmp_list"
exit
}
2022-07-21 16:42:49 +02:00
2022-07-21 16:42:49 +02:00
#======================================================================
# Get articles from database, with wip status
# Generic function to :
# - "set www_hash" for stl sync
# - get stats
# $1: set_www_hash
#======================================================================
db__get_wip_articles() {
while IFS=: read -r 'file' 'article_wip'
do
[[ `awk -F= '{print $2}' <<<"$article_wip"` -gt 0 ]] || continue
2022-07-21 16:42:49 +02:00
source "$file"
2022-07-21 16:42:49 +02:00
case "$1" in
# From sync_wip_to_www() ; manage__HTML
2022-07-21 16:42:49 +02:00
set_www_hash)
if ! [[ $article_www_hash == $article_wip_hash ]];then
sed -i "s^article_www_hash=.*^article_www_hash=$article_wip_hash^" \
"$file"
2022-07-21 16:42:49 +02:00
# Log
this_article="$domain_dir_www$article_uri_srv"
log__add -i -W -A "Added online: $article_title"
fi
;;
# Generate statistics
stats)
case "$stl_install" in
server)
[[ $article_www_hash -gt 0 ]] || continue
((stat_articles_www++))
article_words_www=$((article_words + article_words_www))
article_titles_www=$((article_titles + article_titles_www))
article_paragraphs_www=$((article_paragraphs + article_paragraphs_www))
article_links_www=$(( article_links + article_links_www))
article_quotes_www=$((article_quotes + article_quotes_www))
article_lists_www=$((article_lists + article_lists_www))
article_bolds_www=$((article_bolds + article_bolds_www))
article_strongs_www=$((article_strongs + article_strongs_www))
article_emphasis_www=$((article_emphasis + article_emphasis_www))
article_icodes_www=$((article_icodes+ article_icodes_www))
article_cross_www=$((article_cross + article_cross_www))
article_dels_www=$((article_dels + article_dels_www))
files_images_www=$((files_images + files_images_www))
files_links_www=$((files_links + files_links_www))
files_codes_www=$((files_codes + files_codes_www))
files_bruts_www=$((files_bruts + files_bruts_www))
;;
local)
((stat_articles_wip++))
;;
esac
;;
2022-07-21 16:42:49 +02:00
esac
2022-07-21 16:42:49 +02:00
done < <(grep -H "article_wip_hash" "$domain_db_articles/"*.db)
}