-*- mode: org -*-
#+TITLE: spine (doc_reform) markup source raw
#+DESCRIPTION: documents - structuring, publishing in multiple formats & search
#+FILETAGS: :spine:sourcefile:read:
#+AUTHOR: Ralph Amissah
#+EMAIL: [[mailto:ralph.amissah@gmail.com][ralph.amissah@gmail.com]]
#+COPYRIGHT: Copyright (C) 2015 - 2019 Ralph Amissah
#+LANGUAGE: en
#+STARTUP: indent content hideblocks hidestars
#+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t
#+OPTIONS: TeX:t LaTeX:t skip:nil d:nil todo:t pri:nil tags:not-in-toc
#+OPTIONS: author:nil email:nil creator:nil timestamp:nil
#+PROPERTY: header-args :padline no :exports code :cache no :noweb yes
#+EXPORT_SELECT_TAGS: export
#+EXPORT_EXCLUDE_TAGS: noexport
#+TAGS: assert(a) class(c) debug(d) mixin(m) spine(s) tangle(T) template(t) WEB(W) noexport(n)
[[./spine.org][spine]] [[./][org/]]
* imports
#+name: imports_std
#+BEGIN_SRC d
import
doc_reform.meta,
doc_reform.io_in.paths_source,
std.file,
std.path;
#+END_SRC
* A. get _config file_ (read in)
** _module template_ :module:config_files:
#+BEGIN_SRC d :tangle "../src/doc_reform/io_in/read_config_files.d"
/++
read configuration files
- read config files
meta_config_files.d
+/
module doc_reform.io_in.read_config_files;
<>
<>
#+END_SRC
*** 0. read config files (config_local_site & dr_document_make) (yaml)
**** 1. site configuration
#+name: meta_config_file_hub
#+BEGIN_SRC d
static template readConfigSite() {
import
doc_reform.meta.rgx;
<>
mixin spineRgxInit;
final auto readConfigSite(C)(C _conf_file_details) {
static auto rgx = Rgx();
string conf_filename = "NONE";
string config_file_str;
string default_config_file_str = format(q"┃
flag:
act0: --html
act1: --html --epub
#output:
# path: ""
default:
language: "en"
papersize: "a4"
text_wrap: "80"
digest: "sha256"
search:
title: ""
flag: ""
action: ""
db: ""
webserv:
url_domain: "not-configured"
url_root: "doc"
doc_path: "doc"
images: ""
cgi: ""
cgi_host: ""
cgi_host_path: ""
cgi_port: ""
cgi_user: ""
┃");
foreach(conf_fn; [_conf_file_details.config_filename_site]) {
foreach(pth; _conf_file_details.possible_config_path_locations.config_local_site) {
char[] conf_file;
conf_filename = conf_fn;
if (exists(pth)) {
auto f_attrib = pth.getLinkAttributes;
if (
_conf_file_details.possible_config_path_locations.config_local_site.length == 1
&& f_attrib.attrIsFile
) {
conf_file = pth.to!(char[]);
conf_filename = pth.baseName;
} else if (f_attrib.attrIsDir) {
conf_file = ((chainPath(pth.to!string, conf_fn)).asNormalizedPath).array;
conf_filename = conf_fn;
}
try {
if (exists(conf_file)) {
if (conf_file.getLinkAttributes.attrIsFile) {
config_file_str = conf_file.readText;
break;
}
}
} catch (ErrnoException ex) {
} catch (FileException ex) {
}
}
}
if (config_file_str.length > 0) { break; }
}
if (config_file_str.length > 0) {
import dyaml;
Node yaml_root;
try {
yaml_root = Loader.fromString(config_file_str).load();
} catch {
import std.stdio;
writeln("ERROR failed to read config file content, not parsed as yaml, program default used");
conf_filename = "VIRTUAL";
config_file_str = default_config_file_str;
}
}
if (config_file_str.length == 0) { /+ create dummy default config file +/
writeln("WARNING config file NOT found, default provided");
conf_filename = "VIRTUAL";
config_file_str = default_config_file_str;
}
struct _ConfContent {
string filename() {
return conf_filename;
}
string filetype() {
string _ft = "";
if (content.match(rgx.yaml_config)) {
_ft = "yaml";
}
return _ft;
}
string content() {
return config_file_str;
}
}
return _ConfContent();
}
}
#+END_SRC
**** 2. document make/config
#+name: meta_config_file_hub
#+BEGIN_SRC d
static template readConfigDoc() {
import
doc_reform.meta.rgx;
<>
mixin spineRgxInit;
final auto readConfigDoc(M,E)(M _manifested, E _env) {
static auto rgx = Rgx();
string config_file_str;
string conf_filename = "NONE";
auto _conf_file_details = ConfigFilePaths!()(_manifested, _env);
string[] possible_config_path_locations = _conf_file_details.possible_config_path_locations.dr_document_make;
foreach(conf_fn; [_conf_file_details.config_filename_document]) {
foreach(pth; possible_config_path_locations) {
auto conf_file = ((chainPath(pth.to!string, conf_fn)).asNormalizedPath).array;
conf_filename = conf_fn;
if (config_file_str.length > 0) {
break;
}
try {
if (exists(conf_file)) {
if (conf_file.getLinkAttributes.attrIsFile) {
config_file_str = conf_file.readText;
break;
}
}
} catch (ErrnoException ex) {
} catch (FileException ex) {
}
}
if (config_file_str.length > 0) { break; }
}
struct _ConfContent {
string filename() {
return conf_filename;
}
string content() {
return config_file_str;
}
string filetype() {
string _ft = "";
if (content.match(rgx.yaml_config)) {
_ft = "yaml";
}
return _ft;
}
}
return _ConfContent();
}
}
#+END_SRC
*** 1. YAML config files get
#+name: meta_config_file_yaml
#+BEGIN_SRC d
static template configYAML() {
import dyaml; //
<>
YAMLDocument configYAML(string configuration, string conf_yaml_filename) {
Node _yaml_conf;
try {
_yaml_conf = Loader.fromString(configuration).load()
} catch(ErrnoException e) {
stderr.writeln("Yaml problem with content for ", conf_yaml_filename);
stderr.writeln(e.msg);
}
return _yaml_conf;
}
}
#+END_SRC
*** 2. YAML config (config_local_site & dr_document_make) :file:config:hub:
#+name: meta_config_file_hub
#+BEGIN_SRC d
static template configReadSiteYAML() {
<>
final YAMLDocument configReadSiteYAML(M,E)(M _manifested, E _env) {
string _configuration = configReadInSiteYAML!()(_manifested, _env);
auto _conf_file_details = ConfigFilePaths!()(_manifested, _env);
string _conf_yaml_fn = _conf_file_details.config_filename_site;
YAMLDocument _yaml_conf = configYAML!()(_configuration, _conf_yaml_fn);
return _yaml_conf;
}
}
static template configReadDocYAML() {
<>
final YAMLDocument configReadDocYAML(M,E)(M _manifested, E _env) {
string _configuration = configReadInDocYAML!()(_manifested, _env);
auto _conf_file_details = ConfigFilePaths!()(_manifested, _env);
string _conf_yaml_fn = _conf_file_details.config_filename_document;
YAMLDocument _yaml_conf = configYAML!()(_configuration, _conf_yaml_fn);
return _yaml_conf;
}
}
#+END_SRC
* B. get _markup source_, read file :module:source_files:
** _module template_ (includes tuple)
#+BEGIN_SRC d :tangle "../src/doc_reform/io_in/read_source_files.d"
/++
module source_read_source_files;
- open markup files
- if master file scan for addional files to import/insert
+/
module doc_reform.io_in.read_source_files;
static template spineRawMarkupContent() {
import
doc_reform.meta.rgx;
<>
mixin spineRgxInit;
static auto rgx = Rgx();
string[] _images=[];
auto _extract_images(S)(S content_block) {
string[] images_;
string _content_block = content_block.to!string;
if (auto m = _content_block.matchAll(rgx.image)) {
images_ ~= m.captures[1].to!string;
}
return images_;
}
auto rawsrc = RawMarkupContent();
auto spineRawMarkupContent(O,Fn)(O _opt_action, Fn fn_src) {
auto _0_header_1_body_content_2_insert_filelist_tuple
= rawsrc.sourceContentSplitIntoHeaderAndBody(_opt_action, rawsrc.sourceContent(fn_src), fn_src);
return _0_header_1_body_content_2_insert_filelist_tuple;
}
struct RawMarkupContent {
final sourceContent(in string fn_src) {
auto raw = MarkupRawUnit();
auto source_txt_str
= raw.markupSourceReadIn(fn_src);
return source_txt_str;
}
final auto sourceContentSplitIntoHeaderAndBody(O)(O _opt_action, in string source_txt_str, in string fn_src="") {
auto raw = MarkupRawUnit();
string[] insert_file_list;
string[] images_list;
auto t
= raw.markupSourceHeaderContentRawLineTupleArray(source_txt_str);
auto header_raw = t[0];
auto sourcefile_body_content = t[1];
if (fn_src.match(rgx.src_fn_master)) { // filename with path needed if master file (.ssm) not otherwise
auto ins = Inserts();
auto tu
= ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src);
static assert(!isTypeTuple!(tu));
sourcefile_body_content = tu[0];
insert_file_list = tu[1].dup;
images_list = tu[2].dup;
} else if (_opt_action.source || _opt_action.pod) {
auto ins = Inserts();
auto tu
= ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src);
static assert(!isTypeTuple!(tu));
images_list = tu[2].dup;
}
string header_type = "";
if (header_raw.match(rgx.yaml_config)) {
header_type = "yaml";
}
t = tuple(
header_raw,
sourcefile_body_content,
header_type,
insert_file_list,
images_list
);
static assert(t.length==5);
return t;
}
}
struct MarkupRawUnit {
import std.file;
<>
<>
<>
<>
<>
<>
}
struct Inserts {
auto scan_subdoc_source(O)(
O _opt_action,
char[][] markup_sourcefile_insert_content,
string fn_src
) {
mixin spineRgxInitFlags;
<>
foreach (line; markup_sourcefile_insert_content) {
<>
} // end src subdoc (inserts) loop
<>
}
auto scan_master_src_for_insert_files_and_import_content(O)(
O _opt_action,
char[][] sourcefile_body_content,
string fn_src
) {
import std.algorithm;
mixin spineRgxInitFlags;
<>
foreach (line; sourcefile_body_content) {
<>
} // end src doc loop
<>
}
}
}
#+END_SRC
** get markup source, read file :source:markup:
*** read file, source string [#A] :string:
#+name: meta_markup_source_raw_read_file_source_string
#+BEGIN_SRC d
final private string readInMarkupSource(in char[] fn_src) {
enforce(
exists(fn_src)!=0,
"file not found: «" ~
fn_src ~ "»"
);
string source_txt_str;
try {
if (exists(fn_src)) {
if (fn_src.getLinkAttributes.attrIsFile) {
source_txt_str = fn_src.readText;
} else {
}
}
} catch (ErrnoException ex) {
} catch (UTFException ex) {
// Handle validation errors
} catch (FileException ex) {
// Handle errors
}
std.utf.validate(source_txt_str);
return source_txt_str;
}
#+END_SRC
*** document header & content, array.length == 2 [#A] :array:
here you split document header and body, an array.length == 2
split is on first match of level A~ (which is required)
#+name: meta_markup_source_raw_doc_header_and_content_split
#+BEGIN_SRC d
final private char[][] header0Content1(in string src_text) {
/+ split string on _first_ match of "^:?A~\s" into [header, content] array/tuple +/
char[][] header_and_content;
auto m = (cast(char[]) src_text).matchFirst(rgx.heading_a);
header_and_content ~= m.pre;
header_and_content ~= m.hit ~ m.post;
assert(header_and_content.length == 2,
"document markup is broken, header body split == "
~ header_and_content.length.to!string
~ "; (header / body array split should == 2 (split is on level A~))"
);
return header_and_content;
}
#+END_SRC
*** source line array :array:
#+name: meta_markup_source_raw_source_line_array
#+BEGIN_SRC d
final private char[][] markupSourceLineArray(in char[] src_text) {
char[][] source_line_arr
= (cast(char[]) src_text).split(rgx.newline_eol_strip_preceding);
return source_line_arr;
}
#+END_SRC
*** source content raw line array :array:
- used for regular .sst files; master .ssm files and; .ssi inserts
- regex is passed for relevant enforce match
**** read in file
#+name: meta_markup_source_raw_read_in_file
#+BEGIN_SRC d
auto markupSourceReadIn(in string fn_src) {
static auto rgx = Rgx();
enforce(
fn_src.match(rgx.src_pth_sst_or_ssm),
"not a dr markup filename: «" ~
fn_src ~ "»"
);
auto source_txt_str = readInMarkupSource(fn_src);
return source_txt_str;
}
#+END_SRC
**** tuple (a) header, (b) body content, (c) file insert list & (d) image list?
- header
- body content
- file insert list
- [image list?]
#+name: meta_markup_source_raw_tuple_of_header_and_body
#+BEGIN_SRC d
auto markupSourceHeaderContentRawLineTupleArray(in string source_txt_str) {
string[] file_insert_list = [];
string[] images_list = [];
char[][] hc = header0Content1(source_txt_str);
char[] header = hc[0];
char[] source_txt = hc[1];
auto source_line_arr = markupSourceLineArray(source_txt);
string header_type = "";
if (header.match(rgx.yaml_config)) {
header_type = "yaml";
}
auto t = tuple(
header,
source_line_arr,
header_type,
file_insert_list,
images_list
);
return t;
}
#+END_SRC
**** get insert source line array
#+name: meta_markup_source_raw_get_insert_source_line_array
#+BEGIN_SRC d
final char[][] getInsertMarkupSourceContentRawLineArray(
in char[] fn_src_insert,
Regex!(char) rgx_file
) {
enforce(
fn_src_insert.match(rgx_file),
"not a dr markup filename: «" ~
fn_src_insert ~ "»"
);
auto source_txt_str = readInMarkupSource(fn_src_insert);
auto source_line_arr = markupSourceLineArray(source_txt_str);
return source_line_arr;
}
#+END_SRC
** get markup source, master file & inserts :masterfile:inserts:
[[./spine.org][spine]] [[./][org/]]
*** scan inserts (sub-document) source :scan_insert_src:
**** scan subdoc source
#+name: meta_inserts_scan
#+BEGIN_SRC d
char[][] contents_insert;
int[string] type1 = flags_type_init;
auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm);
auto markup_src_file_path = fn_pth_full.captures[1];
#+END_SRC
**** loop insert (sub-document)
#+name: meta_inserts_scan_loop
#+BEGIN_SRC d
if (type1["curly_code"] == 1) {
type1["header_make"] = 0;
type1["header_meta"] = 0;
if (line.matchFirst(rgx.block_curly_code_close)) {
type1["curly_code"] = 0;
}
contents_insert ~= line;
} else if (line.matchFirst(rgx.block_curly_code_open)) {
type1["curly_code"] = 1;
type1["header_make"] = 0;
type1["header_meta"] = 0;
contents_insert ~= line;
} else if (type1["tic_code"] == 1) {
type1["header_make"] = 0;
type1["header_meta"] = 0;
if (line.matchFirst(rgx.block_tic_close)) {
type1["tic_code"] = 0;
}
contents_insert ~= line;
} else if (line.matchFirst(rgx.block_tic_code_open)) {
type1["tic_code"] = 1;
type1["header_make"] = 0;
type1["header_meta"] = 0;
contents_insert ~= line;
} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) {
type1["header_make"] = 0;
type1["header_meta"] = 0;
auto insert_fn = m.captures[2];
auto insert_sub_pth = m.captures[1];
auto fn_src_insert
= chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array;
auto raw = MarkupRawUnit();
auto markup_sourcesubfile_insert_content
= raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts);
debug(insert_file) {
writeln(line);
writeln(fn_src_insert);
writeln(
" length contents insert array: ",
markup_sourcesubfile_insert_content.length
);
}
if (_opt_action.source || _opt_action.pod) {
_images ~= _extract_images(markup_sourcesubfile_insert_content);
}
auto ins = Inserts();
/+
- 1. load file
- 2. read lines
- 3. scan lines
- a. if filename insert, and insert filename
- repeat 1
- b. else
- add line to new array;
- build image list, search for any image files to add to image list
+/
} else {
type1["header_make"] = 0;
type1["header_meta"] = 0;
contents_insert ~= line; // images to extract for image list?
if (_opt_action.source || _opt_action.pod) {
auto _image_linelist = _extract_images(line);
if (_image_linelist.length > 0) {
_images ~= _image_linelist;
}
}
}
#+END_SRC
**** post loop
#+name: meta_inserts_scan_post
#+BEGIN_SRC d
auto t = tuple(
contents_insert,
_images
);
return t;
#+END_SRC
*** scan document source :scan_src:
**** scan doc source
#+name: meta_master_doc_scan_for_insert_filenames
#+BEGIN_SRC d
char[][] contents;
int[string] type = flags_type_init;
auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm);
auto markup_src_file_path = fn_pth_full.captures[1];
char[][] contents_insert;
string[] _images =[];
string[] insert_file_list =[];
#+END_SRC
**** include inserts: _loop master_ scan for inserts (insert documents)
#+name: meta_master_doc_scan_for_insert_filenames_loop
#+BEGIN_SRC d
if (type["curly_code"] == 1) {
if (line.matchFirst(rgx.block_curly_code_close)) {
type["curly_code"] = 0;
}
contents ~= line;
} else if (line.matchFirst(rgx.block_curly_code_open)) {
type["curly_code"] = 1;
contents ~= line;
} else if (type["tic_code"] == 1) {
if (line.matchFirst(rgx.block_tic_close)) {
type["tic_code"] = 0;
}
contents ~= line;
} else if (line.matchFirst(rgx.block_tic_code_open)) {
type["tic_code"] = 1;
contents ~= line;
} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) {
auto insert_fn = m.captures[2];
auto insert_sub_pth = m.captures[1];
auto fn_src_insert
= chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array;
insert_file_list ~= fn_src_insert.to!string;
auto raw = MarkupRawUnit();
/+ TODO +/
auto markup_sourcefile_insert_content
= raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts);
debug(insert_file) {
writeln(line);
writeln(fn_src_insert);
writeln(
" length contents insert array: ",
markup_sourcefile_insert_content.length
);
}
auto ins = Inserts();
auto contents_insert_tu = ins.scan_subdoc_source(
_opt_action,
markup_sourcefile_insert_content,
fn_src_insert.to!string
);
contents ~= contents_insert_tu[0]; // images to extract for image list?
if (_opt_action.source || _opt_action.pod) {
auto _image_linelist = _extract_images(contents_insert_tu[0]);
if (_image_linelist.length > 0) {
_images ~= _image_linelist;
}
}
/+
- 1. load file
- 2. read lines
- 3. scan lines
- a. if filename insert, and insert filename
- repeat 1
- b. else
- add line to new array;
- build image list, search for any image files to add to image list
+/
} else {
contents ~= line;
if (_opt_action.source || _opt_action.pod) {
auto _image_linelist = _extract_images(line);
if (_image_linelist.length > 0) {
_images ~= _image_linelist;
}
}
}
#+END_SRC
**** post loop
#+name: meta_master_doc_scan_for_insert_filenames_post
#+BEGIN_SRC d
string[] images = [];
foreach(i; uniq(_images.sort())) {
images ~= i;
}
debug(insert_file) {
writeln(__LINE__);
writeln(contents.length);
}
auto t = tuple(
contents,
insert_file_list,
images
);
return t;
#+END_SRC
* __END__