diff --git a/src/var/lib/tyto/program/check.py b/src/var/lib/tyto/program/check.py index 2bf064f..586c4bd 100644 --- a/src/var/lib/tyto/program/check.py +++ b/src/var/lib/tyto/program/check.py @@ -233,6 +233,9 @@ def check_article(post_uri,Force): # Check links (anchors) check_links_anchors(article) + # Check titles/comments in article + check_lists_contents(article.rsplit('\n')) + # Check other markers check_article_markers(article) @@ -378,10 +381,6 @@ def check_post_header(headers): if_mandat_marker('date:', date) if date: check_date(date) - - #------------------------------ - # Check valid Opitonal markers - #------------------------------ #===================# @@ -912,7 +911,6 @@ def check_links_anchors(article): #for ln, line in enumerate(article.rsplit('\n'), 1): anchors_link = re.findall(r'\>_(.*?)_\<', article) for anchor in anchors_link: - print('> Anchor found: %s'%(anchor)) anchor_id = anchor.rsplit(':',1)[0] if not re.search(r'\>\> %s'%anchor_id, article): msg_log = 'Unused anchor ID ">> %s" from link "%s"'%( @@ -920,8 +918,29 @@ def check_links_anchors(article): ) log.append_f(post_logs,msg_log,1) Post_Err = True - Post_Err = True +#====================================# +# Check inside list for only + and = # +# Multilines for one marker list # +# MUST have at leat one space (\s) # +#------------------------------------# +def check_lists_contents(article): + global Post_Err + + inlist = False + + for line in article: + + if re.match(markers_reg[3][0], line): + inlist = True + continue + elif re.match(markers_reg[3][1], line): + inlist = False + + if inlist and not re.match(r'^\+|^\=|\s', line): + msg_log = 'Content list not "+" or "=": %s'%line + log.append_f(post_logs,msg_log,1) + Post_Err = True #====================================# # Create Database file for this Post # diff --git a/src/var/lib/tyto/program/log.py b/src/var/lib/tyto/program/log.py index 2712169..cd92783 100644 --- a/src/var/lib/tyto/program/log.py +++ b/src/var/lib/tyto/program/log.py @@ -69,7 +69,7 @@ def nowdate(): # Append line to specific log file # #----------------------------------# def append_f(f,line,n): - smiley = [':)',':(\033[1;31m','\033[1;33m:|'] + smiley = [':D',':<\033[1;31m','\033[1;33m:|'] now = nowdate() # Open file to append line diff --git a/src/var/lib/tyto/program/wip.py b/src/var/lib/tyto/program/wip.py index e342e83..4220b50 100644 --- a/src/var/lib/tyto/program/wip.py +++ b/src/var/lib/tyto/program/wip.py @@ -70,6 +70,9 @@ def manage_wip(file_post, Force): # All is good, converting... #--------------------------- + # Source DB variables + post_db = exec(open(check.curr_post_db).read(),globals()) + # Send to log msg_log = 'Wip > Article: %s. logs: %s'%( check.post_uri, check.post_logs @@ -91,11 +94,15 @@ def manage_wip(file_post, Force): wip_begin_markers(wip_html.rsplit('\n')) wip_titles( wip_html.rsplit('\n')) wip_words_markers(wip_html) + wip_anchors( wip_html) + + # After all, convert protected contents + wip_links( wip_html) wip_quotes( wip_html.rsplit('\n')) print('> Article HTML:') print(wip_html) - + #============================# # HTML CONVERTERS # @@ -114,7 +121,7 @@ def wip_begin_markers(wip_lines): # Set marker (regex to find), HTML, Need CSS marks_html = [ ['^\|$|^\|\s' , '
' , True ], - ['^>>\s' , '' , True ], + ['^>>\s' , '' , True ], [r'^\($|\(\s' , '

' , True ], ['^\)$|^\)\s' , '

' , False], ['^\[\[$|^\[\[\s' , '_(.*?)_\<', article) + + for anchor in anchors_link: + anchor_id = anchor.rsplit(':',1)[0] + anchor_name = anchor.rsplit(':',1)[1] + article = article.replace('>_%s_<'%anchor, + anchor_fmt%(anchor_id, anchor_name) + ) + wip_html = article + +# +# Convert links +# +def wip_links(article): + global wip_html + + link_fmt = '%s' + all_vars = set(globals()) + + for var in all_vars: + if var.startswith('link_'): + link = globals()[var] + article = article.replace('_%s'%link[0], + link_fmt%( + domain.domain_css, link[1], + link[2], link[0] + ) + ) + + wip_html = article + #======================================# # Words Markers (strongs, emphasis...) # #--------------------------------------#