340 lines
8.5 KiB
Bash
340 lines
8.5 KiB
Bash
#!/bin/bash
|
|
# Name: Statique Littérateur
|
|
# Type: Database manager
|
|
# file: manage__db
|
|
# Folder: /var/lib/stl/scripts/
|
|
# By echolib (XMPP: im@echolib.re)
|
|
# License: GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007
|
|
|
|
#------------
|
|
# funny stats
|
|
#------------
|
|
# lines: 339
|
|
# functions: 7
|
|
#-----------------------------------------------------------------------
|
|
|
|
#**********************************************************************
|
|
|
|
#======================================================================
|
|
# argument: db
|
|
#======================================================================
|
|
db__OPTIONS() {
|
|
case "$2" in
|
|
*".stl")
|
|
unset article_log
|
|
article__hash "$2"
|
|
|
|
if ! [[ -f "$domain_db_articles/$article_db" ]];then
|
|
echo "! No database yet for this article"
|
|
|
|
else
|
|
cat "$domain_db_articles/$article_db"
|
|
fi
|
|
exit
|
|
;;
|
|
|
|
''|list)
|
|
db__list
|
|
;;
|
|
|
|
sidebar)
|
|
db_sidebar=true
|
|
db__list
|
|
;;
|
|
|
|
*)
|
|
noarg "$2"
|
|
;;
|
|
esac
|
|
}
|
|
|
|
|
|
#======================================================================
|
|
# - Check if db exists for this article
|
|
# - Source database
|
|
# - Compare article hash and chk one in db
|
|
# $1: article_db (ID from uri_article)
|
|
#======================================================================
|
|
if__article_db() {
|
|
db_file="$domain_db_articles/$1"
|
|
if [[ -f "$db_file" ]];then
|
|
source "$db_file"
|
|
compare__db
|
|
db_exists=true
|
|
fi
|
|
}
|
|
|
|
|
|
#======================================================================
|
|
# Compare article from DB to know if check is needed
|
|
#======================================================================
|
|
compare__db() {
|
|
[[ $article_hash != $article_chk_hash ]] \
|
|
&& need_check=true
|
|
|
|
[[ $article_wip_hash ]] && \
|
|
[[ $article_chk_hash != $article_wip_hash ]] \
|
|
&& old_wip=true \
|
|
&& sync_mess="! $this_article is newer (WIP is older)"
|
|
|
|
if [[ $stl_install == "server" && $article_wip_hash ]] && \
|
|
[[ $article_wip_hash == $article_www_hash ]] && \
|
|
[[ -f $domain_dir_www$article_uri_srv ]];then
|
|
www_is_wip=true
|
|
sync_mess="! $this_article is already up to date online"
|
|
|
|
# Need to also check each "external" article's files
|
|
fi
|
|
|
|
[[ $article_error == 'yes' ]] \
|
|
&& check_force=true
|
|
}
|
|
|
|
|
|
#======================================================================
|
|
# Print article datas in database
|
|
# db_file is URI/$article_db (from article_id)
|
|
# $1: $db_file
|
|
#======================================================================
|
|
db__print() {
|
|
! [[ -f "$db_file" ]] \
|
|
&& log_db_written="New Datas written" \
|
|
|| log_db_written="Datas updated"
|
|
|
|
|
|
# Set some variables
|
|
article_page_srv=${article_name/.stl/.html}
|
|
article_uri_srv="$article_dir_srv/$article_page_srv"
|
|
article_uri_src="$article_dir_srv/$article_name"
|
|
[[ $stl_error ]] \
|
|
&& article_error='yes' \
|
|
|| article_error='no'
|
|
|
|
cat <<EODBPRINT > "$db_file"
|
|
# Database
|
|
database_id=$article_id
|
|
database_file='$article_id.db'
|
|
database_uri='$db_file'
|
|
|
|
# File metas
|
|
article_name='$article_name'
|
|
article_size=$article_size
|
|
|
|
# URI file
|
|
article_uri='$uri_article'
|
|
article_uri_src='$article_uri_src'
|
|
|
|
# URI Server
|
|
article_dir_srv='$article_dir_srv'
|
|
article_page_srv='$article_page_srv'
|
|
article_uri_srv='$article_uri_srv'
|
|
sub_genuri_srv='$sub_genuri_srv'
|
|
|
|
# Statuses
|
|
article_error='$article_error'
|
|
article_chk_hash=$article_hash
|
|
article_wip_hash=$article_wip_hash
|
|
$(
|
|
[[ "$stl_install" == "server" ]] \
|
|
&& echo "article_www_hash=$article_www_hash"
|
|
)
|
|
|
|
# Sidebar
|
|
sidebar_position=$sidebar_position
|
|
|
|
# Article Metas
|
|
article_date='$article_Date'
|
|
article_timestamp=$date_epoch
|
|
article_author="$article_Author"
|
|
article_title="$article_Title"
|
|
article_about="$article_About"
|
|
article_tags="$article_Tags"
|
|
|
|
# Include files
|
|
$(
|
|
cat "$article_tmp_db"
|
|
)
|
|
|
|
# Statistics
|
|
article_words=$article_words
|
|
article_titles=$stat_titles
|
|
article_paragraphs=$stat_paragraphs
|
|
article_links=$stat_links
|
|
article_quotes=$stat_quotes
|
|
article_lists=$stat_lists
|
|
article_bolds=$article_bolds
|
|
article_strongs=$article_strongs
|
|
article_emphasis=$article_emphasis
|
|
article_icodes=$article_icodes
|
|
article_cross=$article_cross
|
|
article_dels=$article_dels
|
|
|
|
files_images=$stat_images
|
|
files_links=$stat_link_files
|
|
files_codes=$stat_codes
|
|
files_bruts=$stat_bruts
|
|
EODBPRINT
|
|
|
|
this_article="$db_file"
|
|
log__add -i -db -W \
|
|
"'$article_Title'. $log_db_written"
|
|
}
|
|
|
|
|
|
#======================================================================
|
|
# Print all databases in a table with article datas
|
|
#======================================================================
|
|
db__list() {
|
|
domain__get
|
|
db_tmp_list=`mktemp`
|
|
[[ "$stl_install" == "server" ]] \
|
|
&& echo "0|ID|TITLE|ERROR|DATE|AUTHOR|CHECK|WIP|SIDEBAR|WWW" \
|
|
>> "$db_tmp_list" \
|
|
|| echo "0|ID|TITLE|ERROR|DATE|AUTHOR|CHECK|WIP|SIDEBAR" \
|
|
>> "$db_tmp_list"
|
|
|
|
while read -r "db_file"
|
|
do
|
|
|
|
source "$db_file"
|
|
if [[ $db_sidebar ]];then
|
|
if ! [[ $sidebar_position ]];then
|
|
continue
|
|
|
|
else
|
|
db_nbr=$sidebar_position
|
|
fi
|
|
|
|
else
|
|
((db_nbr++))
|
|
fi
|
|
|
|
[[ $article_chk_hash != $article_wip_hash ]] \
|
|
&& wip_status="${CY}$article_wip_hash${NC}" \
|
|
|| wip_status="$article_wip_hash"
|
|
|
|
[[ $article_error == "yes" ]] \
|
|
&& err_status="${CR}$article_error${NC}" \
|
|
|| err_status="$article_error"
|
|
|
|
printf '%s%s%s%b%s%s%s%b%s' \
|
|
"$db_nbr|" \
|
|
"$database_id|" \
|
|
"$article_title|" \
|
|
"$err_status|" \
|
|
"$article_date|" \
|
|
"$article_author|" \
|
|
"$article_chk_hash|" \
|
|
"$wip_status|" \
|
|
"$sidebar_position|" \
|
|
>> "$db_tmp_list"
|
|
|
|
if [[ "$stl_install" == "server" ]];then
|
|
printf '%s%s\n' \
|
|
"$article_www_hash" \
|
|
>> "$db_tmp_list"
|
|
fi
|
|
|
|
done < <(ls -1 "$domain_db_articles/"*.db 2>/dev/null)
|
|
|
|
if ! (( `cat "$db_tmp_list" | wc -l` > 1 ));then
|
|
exit
|
|
|
|
else
|
|
if [[ $db_sidebar ]];then
|
|
cat "$db_tmp_list" | column -t -s'|' -o' ' | sort -k1
|
|
|
|
else
|
|
cat "$db_tmp_list" | column -t -s'|' -o' '
|
|
fi
|
|
|
|
rm -f "$db_rmp_list"
|
|
fi
|
|
exit
|
|
}
|
|
|
|
|
|
#======================================================================
|
|
# Get articles from database, with wip status
|
|
# Generic function to :
|
|
# - "set_www_hash" for stl publish all
|
|
# - "stats" create stats
|
|
# $1: set_www_hash
|
|
#======================================================================
|
|
db__get_wip_articles() {
|
|
while IFS=: read -r 'db_file' 'article_wip'
|
|
do
|
|
[[ `awk -F= '{print $2}' <<<"$article_wip"` -gt 0 ]] || continue
|
|
|
|
source "$db_file"
|
|
this_article="$article_uri_src"
|
|
|
|
case "$1" in
|
|
# From sync_wip_to_www() ; manage__HTML
|
|
set_www_hash)
|
|
db__srv_status_hash WWW Set
|
|
;;
|
|
|
|
# Generate statistics
|
|
stats)
|
|
case "$stl_install" in
|
|
server)
|
|
[[ $article_www_hash -gt 0 ]] || continue
|
|
|
|
((stat_articles_www++))
|
|
article_words_www=$((article_words + article_words_www))
|
|
article_titles_www=$((article_titles + article_titles_www))
|
|
article_paragraphs_www=$((article_paragraphs + article_paragraphs_www))
|
|
article_links_www=$(( article_links + article_links_www))
|
|
article_quotes_www=$((article_quotes + article_quotes_www))
|
|
article_lists_www=$((article_lists + article_lists_www))
|
|
article_bolds_www=$((article_bolds + article_bolds_www))
|
|
article_strongs_www=$((article_strongs + article_strongs_www))
|
|
article_emphasis_www=$((article_emphasis + article_emphasis_www))
|
|
article_icodes_www=$((article_icodes+ article_icodes_www))
|
|
article_cross_www=$((article_cross + article_cross_www))
|
|
article_dels_www=$((article_dels + article_dels_www))
|
|
files_images_www=$((files_images + files_images_www))
|
|
files_links_www=$((files_links + files_links_www))
|
|
files_codes_www=$((files_codes + files_codes_www))
|
|
files_bruts_www=$((files_bruts + files_bruts_www))
|
|
;;
|
|
|
|
local)
|
|
((stat_articles_wip++))
|
|
;;
|
|
esac
|
|
;;
|
|
esac
|
|
|
|
done < <(grep -H "article_wip_hash" "$domain_db_articles/"*.db)
|
|
}
|
|
|
|
|
|
#======================================================================
|
|
# Set hash wip or www status
|
|
# $1: wip,www
|
|
# $2: Set, Unset
|
|
#======================================================================
|
|
db__srv_status_hash() {
|
|
[[ "$2" == "Unset" ]] \
|
|
&& unset article_hash article_wip_hash
|
|
|
|
case "$1" in
|
|
WIP)
|
|
log_hash=" $article_hash"
|
|
sed -i "s^article_wip_hash=.*^article_wip_hash=$article_hash^" \
|
|
"$db_file"
|
|
;;
|
|
|
|
WWW)
|
|
log_hash=" $article_wip_hash"
|
|
sed -i "s^article_www_hash=.*^article_www_hash=$article_wip_hash^" \
|
|
"$db_file"
|
|
;;
|
|
esac
|
|
|
|
# Log
|
|
log__add -i -db -A "$2 $1 status: $article_title.$log_hash"
|
|
}
|