From 8ab7e935913c102fb039110e20b71f698a68c6ee Mon Sep 17 00:00:00 2001
From: Ralph Amissah <ralph@amissah.com>
Date: Thu, 16 Jun 2016 01:49:06 -0400
Subject: step5 sdlang used for config files and doc headers

---
 org/ao_abstract_doc_source.org |  39 ++--
 org/ao_header_extract.org      | 438 -----------------------------------------
 org/ao_output_debugs.org       |   8 +-
 org/ao_read_source_files.org   | 178 ++++++++++++++++-
 org/output.org                 |  23 ---
 org/sdp.org                    | 190 ++++++++++++++++--
 6 files changed, 372 insertions(+), 504 deletions(-)
 delete mode 100644 org/ao_header_extract.org

(limited to 'org')

diff --git a/org/ao_abstract_doc_source.org b/org/ao_abstract_doc_source.org
index 557cda0..9e9e9e5 100644
--- a/org/ao_abstract_doc_source.org
+++ b/org/ao_abstract_doc_source.org
@@ -463,24 +463,24 @@ if (matchFirst(line, rgx.block_open)) {
     && ((type["para"] == State.off)
     && (type["heading"] == State.off))) {
       /+ heading or para but neither flag nor line exists +/
-      if ((to!string(dochead_make_json["make"]["headings"]).length > 2)
-      && (type["make_headings"] == State.off)) {
-        /+ heading found +/
-        auto dochead_make_headings =
-          to!string(dochead_make_json["make"]["headings"]);
-        heading_found(line, dochead_make_headings, heading_match_str, heading_match_rgx, type);
-      }
+      // if ((to!string(dochead_make["make"]["headings"]).length > 2)
+      // && (type["make_headings"] == State.off)) {
+      //   /+ heading found +/
+      //   auto dochead_make_headings =
+      //     to!string(dochead_make["make"]["headings"]);
+      //   heading_found(line, dochead_make_headings, heading_match_str, heading_match_rgx, type);
+      // }
       if ((type["make_headings"] == State.on)
       && ((line_occur["para"] == State.off)
       && (line_occur["heading"] == State.off))
       && ((type["para"] == State.off)
       && (type["heading"] == State.off))) {
         /+ heading make set +/
-        heading_make_set(line, line_occur, heading_match_rgx, type);
+        // heading_make_set(line, line_occur, heading_match_rgx, type);
       }
       if (matchFirst(line, rgx.heading)) {
         /+ heading match +/
-        heading_matched(line, line_occur, an_object, lv, collapsed_lev, type, dochead_meta_json);
+        heading_matched(line, line_occur, an_object, lv, collapsed_lev, type);
       } else if (line_occur["para"] == State.off) {
         /+ para match +/
         para_match(line, an_object, indent, bullet, type, line_occur);
@@ -2000,7 +2000,7 @@ auto book_index(
 ** heading or paragraph                                   :heading:paragraph:
 *** heading found                                                   :heading:
 
-#+name: abs_functions
+##+name: abs_functions
 #+BEGIN_SRC d
 auto heading_found(
   char[] line,
@@ -2164,8 +2164,8 @@ auto heading_matched(
   ref string[string] an_object,
   ref int[string] lv,
   ref int[string] collapsed_lev,
-  ref int[string] type,
-  ref JSONValue[string] dochead_meta_json
+  ref int[string] type
+  // ref JSONValue[string] dochead_meta_json
 ) {
   if (auto m = match(line, rgx.heading)) {
     /+ heading match +/
@@ -2179,10 +2179,10 @@ auto heading_matched(
     assertions_doc_structure(an_object, lv); // includes most of the logic for collapsed levels
     switch (an_object["lev"]) {
     case "A":
-      an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_title, to!string(dochead_meta_json["title"]["main"]));
-      an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_author, to!string(dochead_meta_json["creator"]["author"]));
-      // an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_title, to!string(parseJSON(dochead_meta_json["title"]["main"])));
-      // an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_author, to!string(parseJSON(dochead_meta_json["creator"]["author"])));
+      // an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_title, to!string(dochead_metadata["title"]["main"]));
+      // an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_author, to!string(dochead_metadata["creator"]["author"]));
+      // // an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_title, to!string(parseJSON(dochead_metadata["title"]["main"])));
+      // // an_object["obj"]=replaceFirst(an_object["obj"], rgx.head_value_author, to!string(parseJSON(dochead_metadata["creator"]["author"])));
       collapsed_lev["h0"] = 1;
       an_object["lev_collapsed_number"] =
         to!string(collapsed_lev["h0"]);
@@ -4039,11 +4039,8 @@ template SiSUdocAbstraction() {
     <<abs_init_struct>>
 
     /+ ↓ abstract marked up document +/
-    auto abstract_doc_source(
-      char[][] markup_sourcefile_content,
-      JSONValue[string] dochead_make_json,
-      JSONValue[string] dochead_meta_json
-    ) {
+    auto abstract_doc_source(char[][] markup_sourcefile_content) {
+
       /+ ↓ abstraction init +/
       <<abs_init_rest>>
       /+ abstraction init ↑ +/
diff --git a/org/ao_header_extract.org b/org/ao_header_extract.org
deleted file mode 100644
index d075c7c..0000000
--- a/org/ao_header_extract.org
+++ /dev/null
@@ -1,438 +0,0 @@
-#+TITLE: sdp header extract
-#+AUTHOR: Ralph Amissah
-#+EMAIL: ralph.amissah@gmail.com
-#+STARTUP: indent
-#+LANGUAGE: en
-#+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t
-#+OPTIONS: TeX:t LaTeX:t skip:nil d:nil todo:t pri:nil tags:not-in-toc
-#+OPTIONS: author:nil email:nil creator:nil timestamp:nil
-#+PROPERTY: header-args :padline no :exports code :noweb yes
-#+EXPORT_SELECT_TAGS: export
-#+EXPORT_EXCLUDE_TAGS: noexport
-#+FILETAGS: :sdp:niu:ao:
-#+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n)
-
-[[./sdp.org][sdp]]  [[./][org/]]
-* header
-
-// mixin SiSUheader;
-// auto set_header = HeaderDocMetadataMakeJson(); // reintroduce
-
-** header document metadata in json                                    :json:
-
-#+name: ao_markup_header_extract
-#+BEGIN_SRC d
-auto header_metadata_and_make_jsonstr(
-  string header,
-  JSONValue[string] dochead_meta,
-  JSONValue[string] dochead_make
-)
-in { }
-body {
-  scope(exit) {
-    destroy(header);
-    destroy(dochead_meta);
-    destroy(dochead_make);
-  }
-  if (auto t = match(header, rgx.head_main)) {
-    char[][] obj_spl = split(
-      cast(char[]) header,
-      rgx.line_delimiter_ws_strip
-    );
-    auto hm = to!string(t.captures[1]);
-    if (match(hm, rgx.main_headers)) {
-      foreach (line; obj_spl) {
-        if (auto m = match(line, rgx.head_main)) {
-          if (!empty(m.captures[2])) {
-            if (hm == "creator") {
-              dochead_meta[hm]["author"].str =
-                to!string(m.captures[2]);
-            } else if (hm == "title") {
-              dochead_meta[hm]["main"].str =
-                to!string(m.captures[2]);
-            } else if (hm == "publisher") {
-              dochead_meta[hm]["name"].str =
-                to!string(m.captures[2]);
-            }
-          }
-        } else if (auto s = match(line, rgx.head_sub)) {
-          if (!empty(s.captures[2])) {
-            auto hs = to!string(s.captures[1]);
-            if ((hm == "make" )
-            && (dochead_make[hm].type() == JSON_TYPE.OBJECT)) {
-              switch (hm) {
-              case "make":
-                if (match(hs, rgx.subhead_make)) {
-                  if (dochead_make[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_make[hm][hs].str = to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              default:
-                break;
-              }
-            } else if (dochead_meta[hm].type() == JSON_TYPE.OBJECT) {
-              switch (hm) {
-              case "creator":
-                if (match(hs, rgx.subhead_creator)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "title":
-                if (match(hs, rgx.subhead_title)) {
-                  if ((hs == "subtitle")
-                  && (dochead_meta[hm]["sub"].type() == JSON_TYPE.STRING)) {
-                    dochead_meta[hm]["sub"].str =
-                      to!string(s.captures[2]);
-                  } else if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "rights":
-                if (match(hs, rgx.subhead_rights)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "date":
-                if (match(hs, rgx.subhead_date)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "original":
-                if (match(hs, rgx.subhead_original)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "classify":
-                if (match(hs, rgx.subhead_classify)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "identifier":
-                if (match(hs, rgx.subhead_identifier)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "notes":
-                if (match(hs, rgx.subhead_notes)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "publisher":
-                if (match(hs, rgx.subhead_publisher)) {
-                  if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                    dochead_meta[hm][hs].str =
-                      to!string(s.captures[2]);
-                  }
-                } else {
-                  writeln("not a valid header type:", hm, ":", hs);
-                  destroy(hm);
-                  destroy(hs);
-                }
-                break;
-              case "links":
-                destroy(hm);
-                destroy(hs);
-                // if (match(hs, rgx.subhead_links)) {
-                //   if (dochead_meta[hm][hs].type() == JSON_TYPE.STRING) {
-                //     dochead_meta[hm][hs].str = to!string(s.captures[2]);
-                //   }
-                // } else {
-                //   writeln("not a valid header type:", hm, ":", hs);
-                //   destroy(hm);
-                //   destroy(hs);
-                // }
-                break;
-              default:
-                break;
-              }
-            }
-          }
-        }
-      }
-    } else {
-      writeln("not a valid header type:", hm);
-    }
-  }
-  auto t = tuple(dochead_meta, dochead_make);
-  static assert(!isTypeTuple!(t));
-  return t;
-}
-#+END_SRC
-
-** header extract
-#+name: ao_markup_header_extract
-#+BEGIN_SRC d
-private auto header_extract(
-  char[] line,
-  ref int[string] line_occur,
-  ref string[string] an_object,
-  ref int[string] type
-) {
-  if (matchFirst(line, rgx.header_make)) {
-    /+ matched header_make +/
-    debug(header1) {                          // header
-      // tell_l("yellow", line);
-    }
-    type["header"] = State.on;
-    type["header_make"] = State.on;
-    type["header_meta"] = State.off;
-    ++line_occur["header_make"];
-    an_object["obj"] ~= line ~= "\n";
-  } else if (matchFirst(line, rgx.header_meta)) {
-    /+ matched header_metadata +/
-    debug(header1) {                          // header
-      // tell_l("yellow", line);
-    }
-    type["header"] = State.on;
-    type["header_make"] = State.off;
-    type["header_meta"] = State.on;
-    ++line_occur["header_meta"];
-    an_object["obj"] ~= line ~= "\n";
-  } else if (type["header_make"] == State.on
-  && (line_occur["header_make"] > State.off)) {
-    /+ header_make flag set +/
-    if (matchFirst(line, rgx.header_sub)) {
-      /+ sub-header +/
-      debug(header1) {
-        // tell_l("yellow", line);
-      }
-      // type["header"] = State.on;
-      ++line_occur["header_make"];
-      an_object["obj"] ~= line ~= "\n";
-    }
-  } else if (type["header_meta"] == State.on
-  && (line_occur["header_meta"] > State.off)) {
-    /+ header_metadata flag set +/
-    if (matchFirst(line, rgx.header_sub)) {
-      /+ sub-header +/
-      debug(header1) {
-        // tell_l("yellow", line);
-      }
-      ++line_occur["header_meta"];
-      an_object["obj"] ~= line ~= "\n";
-    }
-  }
-  // return 0;
-  return an_object;
-}
-#+END_SRC
-
-** header array                                                      :header:
-#+name: ao_markup_header_extract
-#+BEGIN_SRC d
-auto header_set_common(
-  ref int[string] line_occur,
-  ref string[string] an_object,
-  ref int[string] type
-) {
-  // line_occur["header"] = State.off;
-  line_occur["header_make"] = State.off;
-  line_occur["header_meta"] = State.off;
-  type["header"] = State.off;
-  // type["header_make"] = State.off;
-  // type["header_meta"] = State.off;
-  an_object.remove("obj");
-  an_object.remove("is");
-  an_object.remove("attrib");
-}
-private auto headerContentJSON(in char[] src_header) {
-  auto type = flags_type_init;
-  type = [
-   "header"          : State.off,
-   "header_make"     : State.off,
-   "header_meta" : State.off,
-  ];
-  string[string] an_object;
-  int[string] line_occur;
-  auto dochead_make = parseJSON(header_make_jsonstr).object;
-  auto dochead_meta = parseJSON(header_meta_jsonstr).object;
-  auto set_header = HeaderDocMetadataMakeJson();
-  char[][] source_header_arr =
-    split(cast(char[]) src_header, rgx.line_delimiter);
-  foreach(header_line; source_header_arr) {
-    if (auto m = matchFirst(header_line, rgx.comment)) {
-      /+ matched comment +/
-      debug(comment) {
-        // tell_l("blue", header_line);
-      }
-      header_set_common(line_occur, an_object, type);
-      // type["header_make"] = State.off;
-      // type["header_meta"] = State.off;
-    } else if ((matchFirst(header_line, rgx.header))
-    || (type["header_make"] == State.on
-    && (line_occur["header_make"] > State.off))
-    || (type["header_meta"] == State.on
-    && (line_occur["header_meta"] > State.off))) {
-      if (header_line.length == 0) {
-        /+ header_make instructions (current line empty) +/
-        auto dochead_metadata_and_make =
-          set_header.header_metadata_and_make_jsonstr(strip(an_object["obj"]), dochead_meta, dochead_make);
-        static assert(!isTypeTuple!(dochead_metadata_and_make));
-        dochead_meta = dochead_metadata_and_make[0];
-        dochead_make = dochead_metadata_and_make[1];
-        header_set_common(line_occur, an_object, type);
-        type["header_make"] = State.off;
-        type["header_meta"] = State.off;
-        writeln(dochead_metadata_and_make);
-      } else {
-        an_object = header_extract(header_line, line_occur, an_object, type);
-      }
-    } else {
-      // writeln(__LINE__);
-    }
-  }
-  auto t = tuple(
-    dochead_make,
-    dochead_meta,
-  );
-  return t;
-}
-#+END_SRC
-
-** +header document metadata+                             :document:metadata:
-*** +within abstraction loop+
-
-**** +line exist: header make+                                    :header:make:
-# #+name: abs_in_loop_body_not_block_obj
-# #+BEGIN_SRC d
-#     } else if (line_occur["header_make"] > State.off) {
-#       /+ header_make +/
-#       // should be caught by sub-header
-#       debug(header) {
-#         tell_l("red", line);
-#       }
-#       an_object["obj"] ~= line ~= "\n";
-#       ++line_occur["header_make"];
-# #+END_SRC
-
-**** +line exist: header metadata+                            :header:metadata:
-# #+name: abs_in_loop_body_not_block_obj
-# #+BEGIN_SRC d
-#     } else if (line_occur["header_meta"] > State.off) {
-#       /+ header_metadata +/
-#       // should be caught by sub-header
-#       debug(header) {                          // para
-#         tell_l("red", line);
-#       }
-#       an_object["obj"] ~= line ~= "\n";
-#       ++line_occur["header_meta"];
-# #+END_SRC
-
-**** +header_make instructions+                      :header:make:instructions:
-
-# #+name: abs_in_loop_body_not_block_obj_line_empty
-# #+BEGIN_SRC d
-# if ((type["header_make"] == State.on)
-# && (line_occur["header_make"] > State.off)) {
-#   /+ header_make instructions (current line empty) +/
-#   auto dochead_metadata_and_make =
-#     set_header.header_metadata_and_make_jsonstr(strip(an_object["obj"]), dochead_meta, dochead_make);
-#   static assert(!isTypeTuple!(dochead_metadata_and_make));
-#   dochead_meta = dochead_metadata_and_make[0];
-#   dochead_make = dochead_metadata_and_make[1];
-#   header_set_common(line_occur, an_object, type);
-#   processing.remove("verse");
-# #+END_SRC
-
-**** +header_metadata+                                        :header:metadata:
-
-# #+name: abs_in_loop_body_not_block_obj_line_empty
-# #+BEGIN_SRC d
-# } else if ((type["header_meta"] == State.on)
-# && (line_occur["header_meta"] > State.off)) {
-#   /+ header_meta (current line empty) +/
-#   auto dochead_metadata_and_make =
-#     set_header.header_metadata_and_make_jsonstr(strip(an_object["obj"]), dochead_meta, dochead_make);
-#   static assert(!isTypeTuple!(dochead_metadata_and_make));
-#   dochead_meta = dochead_metadata_and_make[0];
-#   dochead_make = dochead_metadata_and_make[1];
-#   header_set_common(line_occur, an_object, type);
-#   type["header_make"] = State.off;
-#   type["header_meta"] = State.off;
-#   processing.remove("verse");
-# #+END_SRC
-
-* tangles (code structure)                                           :tangle:
-** ao_markup_header_extract.d:                  :ao_markup_header_extract.d:
-#+BEGIN_SRC d :tangle ../src/sdp/ao_header_extract.d
-/+
-  extract header return json
-+/
-template SiSUheaderExtract() {
-  private import
-    std.exception,
-    std.regex,
-    std.utf,
-    std.conv : to;
-  private import
-    ao_rgx;       // ao_defaults.d
-  struct HeaderDocMetadataMakeJson {
-    mixin SiSUrgxInitFlags;
-    mixin RgxInit;
-    auto rgx = Rgx();
-    enum State { off, on }
-    string hm, hs;
-    <<ao_markup_header_extract>>
-  }
-}
-#+END_SRC
diff --git a/org/ao_output_debugs.org b/org/ao_output_debugs.org
index 6f6a6c8..99a3301 100644
--- a/org/ao_output_debugs.org
+++ b/org/ao_output_debugs.org
@@ -87,7 +87,7 @@ debug(objects) {
 }
 #+END_SRC
 ** (headermakejson)                                             :json:header:
-#+name: ao_output_debugs
+##+name: ao_output_debugs
 #+BEGIN_SRC d
 debug(headermakejson) {
   writefln(
@@ -117,7 +117,7 @@ debug(headermakejson) {
 }
 #+END_SRC
 ** (headermetadatajson)                                         :json:header:
-#+name: ao_output_debugs
+##+name: ao_output_debugs
 #+BEGIN_SRC d
 debug(headermetadatajson) {
   writefln(
@@ -460,8 +460,8 @@ template SiSUoutputDebugs() {
       auto ref const S         contents,
       string[][string][string] bookindex_unordered_hashes,
       JSONValue[]              biblio,
-      JSONValue[string]        dochead_make,
-      JSONValue[string]        dochead_meta,
+      // JSONValue[string]        dochead_make,
+      // JSONValue[string]        dochead_meta,
       string                   fn_src,
       bool[string]             opt_action_bool
     ) {
diff --git a/org/ao_read_source_files.org b/org/ao_read_source_files.org
index 2d41105..05e42ec 100644
--- a/org/ao_read_source_files.org
+++ b/org/ao_read_source_files.org
@@ -13,6 +13,64 @@
 #+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n)
 
 [[./sdp.org][sdp]]  [[./][org/]]
+* get config file                                                    :config:
+
+** [#A] read config file, source string                              :string:
+*** config file                                                 :file:config:
+#+name: ao_config_file
+#+BEGIN_SRC d
+final private string readInConfigFile() {
+  // enforce(
+  //   exists(fn_src)!=0,
+  //   "file not found"
+  // );
+  string[] possible_config_path_locations = [
+    environment["PWD"] ~ "/.sisu",
+    environment["PWD"] ~ "/_sisu",
+    environment["HOME"] ~ "/.sisu",
+    "/etc/sisu"
+  ];
+  string conf_sdl = "conf.sdl";
+  string config_file_str;
+  foreach(pth; possible_config_path_locations) {
+    auto conf_file = format(
+      "%s/%s",
+      pth,
+      conf_sdl,
+    );
+    // writeln(conf_file);
+    try {
+      if (exists(conf_file)) {
+        writeln(conf_file);
+        config_file_str = readText(conf_file);
+        break;
+      }
+    }
+    catch (ErrnoException ex) {
+    //// Handle errors
+    // switch(ex.errno) {
+    // case EPERM:
+    // case EACCES:
+    //   // Permission denied
+    //   break;
+    // case ENOENT:
+    //   // File does not exist
+    //   break;
+    // default:
+    //   // Handle other errors
+    //   break;
+    // }
+    }
+    // catch (UTFException ex) {
+    //   // Handle validation errors
+    // }
+    catch (FileException ex) {
+      // Handle errors
+    }
+  }
+  return config_file_str;
+}
+#+END_SRC
 
 * get markup source, read file                                :source:markup:
 
@@ -91,6 +149,68 @@ final private char[][] header0Content1(in string src_text) {
 }
 #+END_SRC
 
+** header sdlang
+
+#+name: ao_header_extract_sdl
+#+BEGIN_SRC d
+final private auto headerMakeSDLang(in string src_header) {
+  scope(failure) {
+    stderr.writefln(
+      "%s\n%s\n%s:%s failed here:\n  src_header: %s",
+      __MODULE__, __FUNCTION__,
+      __FILE__, __LINE__,
+      src_header,
+    );
+  }
+  Tag sdl_root_header;
+  try {
+    sdl_root_header = parseSource(src_header);
+  }
+  catch(SDLangParseException e) {
+    stderr.writeln("SDLang problem with this document header:");
+    stderr.writeln(src_header);
+    // Error messages of the form:
+    // myFile.sdl(5:28): Error: Invalid integer suffix.
+    stderr.writeln(e.msg);
+  }
+  debug(sdlang) {
+    // // Value is a std.variant.Algebraic
+    // Value output_dir_structure_by = sdl_root_header.tags["output_dir_structure_by"][0].values[0];
+    // assert(output_dir_structure_by.type == typeid(string));
+    // writeln(output_dir_structure_by);
+
+    // Tag person = sdl_root_header.namespaces["myNamespace"].tags["person"][0];
+    // writeln("Name: ", person.attributes["name"][0].value);
+    //
+    // int age = person.tags["age"][0].values[0].get!int();
+    // writeln("Age: ", age);
+
+    writeln("header SDL:");
+    writeln(sdl_root_header.toSDLDocument());
+  }
+  return sdl_root_header;
+}
+#+END_SRC
+
+** header sdlang                                                       :header:
+#+name: ao_header_extract_sdl
+#+BEGIN_SRC d
+private auto headerSDLang(in char[] src_header) {
+  char[][] source_header_arr =
+    split(cast(char[]) src_header, rgx.line_delimiter);
+  char[] header_clean;
+  foreach(header_line; source_header_arr) {
+    if (!match(header_line, rgx.comments)) {
+      header_clean ~= header_line ~ "\n";
+      // writeln(header_line);
+    }
+  }
+  // writeln(header_clean); // consider
+  auto header_sdlang=headerMakeSDLang(to!string(header_clean));
+  return header_sdlang;
+}
+#+END_SRC
+
 ** source line array                                                  :array:
 #+name: ao_markup_source_raw
 #+BEGIN_SRC d
@@ -332,7 +452,7 @@ template SiSUmarkupRaw() {
       auto raw = MarkupRawUnit();
       auto t =
         raw.markupSourceHeaderContentRawLineTupleArray(fn_src, rgx.src_pth);
-      auto header_content_raw = t[0];
+      auto header_raw = t[0];
       auto sourcefile_content = t[1];
       if (match(fn_src, rgx.src_fn_master)) {
         auto ins = Inserts();
@@ -341,16 +461,19 @@ template SiSUmarkupRaw() {
         // auto ins = SiSUdocInserts.Inserts();
       }
       t = tuple(
-        header_content_raw,
+        header_raw,
         sourcefile_content
       );
       return t;
     }
   }
   private
+  struct HeaderExtractSDL {
+    <<ao_header_extract_sdl>>
+  }
   struct MarkupRawUnit {
     private import std.file;
-    enum State { off, on }
+    // enum State { off, on }
     <<ao_markup_source_raw>>
   }
   struct Inserts {
@@ -385,3 +508,52 @@ template SiSUmarkupRaw() {
 INSERTS?
 [[./ao_scan_inserts.org][ao_scan_inserts]]
 WORK AREA
+
+** config files:                                          :ao_config_files.d:
+
+#+BEGIN_SRC d :tangle ../src/sdp/ao_read_config_files.d
+/+
+  ao_config_files.d
+  - read config files
++/
+template SiSUconfiguration() {
+  private import
+    std.exception,
+    // std.regex,
+    std.stdio,
+    std.utf,
+    std.conv : to;
+  // private import
+  //   ao_rgx;       // ao_defaults.d
+  // mixin RgxInit;
+  // auto rgx = Rgx();
+  private
+  struct Config {
+    private import std.file;
+    <<ao_config_file>>
+  }
+}
+#+END_SRC
+
+* figure out
+** break up file here to sisu markup content and header
+
+break up file here to sisu markup content and header
+
+*** header
+take master and single sst file, read in as header until the required header 0~
+keep separate (from content) for extraction of header metadata & make detail
+also now may be sdlang or old sisu markup!
+
+*** content
+from 0~ read in as content
+
+** what
+# #+NAME: sdp_each_file_do
+# #+BEGIN_SRC d
+/+ ↓ read file +/
+// auto conf = MarkupRaw();
+auto conf = Config();
+auto configfile_content =
+  conf.sourceConfig(fn_src);
+# #+END_SRC
diff --git a/org/output.org b/org/output.org
index 3575558..e8187d6 100644
--- a/org/output.org
+++ b/org/output.org
@@ -1537,29 +1537,6 @@ template SiSUoutputHub() {
 }
 #+END_SRC
 
-** dump
-
-template SiSUoutput() {
-  struct SDPoutput {
-    auto html(S)(
-      auto ref const S         contents,
-      JSONValue[string]        dochead_make,
-      JSONValue[string]        dochead_meta,
-      string[][string][string] bookindex_unordered_hashes,
-      JSONValue[]              biblio,
-      string                   fn_src,
-      bool[string]             opt_action_bool
-    ) {
-      mixin RgxInit;
-      mixin ScreenTxtColors;
-      auto rgx = Rgx();
-      <<ao_output_html>>
-        <<ao_output_html_summary>>
-      }
-    }
-  }
-}
-
 ** head
 <!DOCTYPE html>
 <html>
diff --git a/org/sdp.org b/org/sdp.org
index 80b7a45..4d221bb 100644
--- a/org/sdp.org
+++ b/org/sdp.org
@@ -28,7 +28,7 @@ struct Version {
   int minor;
   int patch;
 }
-enum ver = Version(0, 4, 1);
+enum ver = Version(0, 5, 0);
 #+END_SRC
 
 ** pre loop init
@@ -60,7 +60,7 @@ import
   compile_time_info,          // sdp/compile_time_info.d
   ao_abstract_doc_source,     // sdp/ao_abstract_doc_source.d
   ao_defaults,                // sdp/ao_defaults.d
-  ao_header_extract,          // sdp/ao_header_extract.d
+  ao_read_config_files,       // sdp/ao_read_config_files.d
   ao_read_source_files,       // sdp/ao_read_source_files.d
   ao_output_debugs,           // sdp/ao_output_debugs.d
   ao_rgx,                     // sdp/ao_rgx.d
@@ -69,6 +69,16 @@ import
   // std.conv;
 #+END_SRC
 
+**** sdlang                                                      :import:sdlang:
+#+NAME: sdlang_imports_use
+#+BEGIN_SRC d
+/+ sdlang http://sdlang.org +/
+import sdlang;                            // sdlang.d
+  // sdlang.parser,                       // sdlang/parser.d
+  // sdlang.exceptions;                   // sdp/ao_ansi_colors.d
+  // // std.conv;
+#+END_SRC
+
 **** std                                                         :import:std:
 #+NAME: sdp_imports
 #+BEGIN_SRC d
@@ -95,8 +105,9 @@ private import
   std.conv : to;
 #+END_SRC
 
-*** sdp output                                                      :output:
-#+NAME: sdp_args
+**** sdp output check selection                                      :output:
+
+#+NAME: sdp_output_selection
 #+BEGIN_SRC d
 struct SDPoutput {
   auto hub(S)(
@@ -163,10 +174,9 @@ mixin(import("version.txt"));
 #+NAME: sdp_args
 #+BEGIN_SRC d
 mixin SiSUheaderSkel;
-mixin SiSUheaderExtract;
 mixin SiSUbiblio;
 mixin SiSUrgxInitFlags;
-// mixin SiSUconfiguration;
+mixin SiSUconfiguration;
 mixin SiSUmarkupRaw;
 mixin SiSUdocAbstraction;
 mixin SiSUoutputDebugs;
@@ -179,7 +189,7 @@ mixin ScreenTxtColors;
 #+NAME: sdp_args
 #+BEGIN_SRC d
 auto raw = MarkupRaw();
-auto head = HeaderDocMetadataMakeJson();
+auto headsdl = HeaderExtractSDL();
 auto abs = Abstraction();
 auto dbg = SDPoutputDebugs();
 auto output = SDPoutput();
@@ -294,6 +304,62 @@ foreach(arg; args) {
 }
 #+END_SRC
 
+*** config files (load)                                        :config:files:
+#+BEGIN_SRC text
+./.sisu ./_sisu ~/.sisu /etc/.sisu
+#+END_SRC
+
+#+BEGIN_SRC d
+// string[string] envVars = environment.toAA();
+// writeln(envVars);
+/+
+writefln(
+  "pwd: %s; home: %s",
+  environment["PWD"],
+  environment["HOME"]
+);
++/
+#+END_SRC
+
+**** config load
+#+NAME: sdp_config_files
+#+BEGIN_SRC d
+auto conf = Config();
+auto configuration = conf.readInConfigFile();
+#+END_SRC
+
+**** config read
+
+#+NAME: sdp_config_files
+#+BEGIN_SRC d
+/+ sdlang config +/
+Tag sdl_root_conf;
+try {
+  sdl_root_conf = parseSource(configuration);
+}
+catch(SDLangParseException e) {
+  stderr.writeln("SDLang problem with config.sdl content");
+  // Error messages of the form:
+  // myFile.sdl(5:28): Error: Invalid integer suffix.
+  stderr.writeln(e.msg);
+}
+debug(sdlang) {
+  // Value is a std.variant.Algebraic
+  Value output_dir_structure_by = sdl_root_conf.tags["output_dir_structure_by"][0].values[0];
+  assert(output_dir_structure_by.type == typeid(string));
+  writeln(output_dir_structure_by);
+
+  // Tag person = sdl_root_conf.namespaces["myNamespace"].tags["person"][0];
+  // writeln("Name: ", person.attributes["name"][0].value);
+  //
+  // int age = person.tags["age"][0].values[0].get!int();
+  // writeln("Age: ", age);
+
+  writeln("config SDL:");
+  writeln(sdl_root_conf.toSDLDocument());
+}
+#+END_SRC
+
 ** each file (loop) [+2]                                         :loop:files:
 *** filename provided [+1]                                     :file:process:
 **** loop scope                                                       :scope:
@@ -342,18 +408,15 @@ debug(header_and_content) {
 **** [#A] read doc header: metadata & make         :doc:header:metadata:make:
 #+NAME: sdp_each_file_do
 #+BEGIN_SRC d
-/+ ↓ headers metadata & make +/
-auto header_content = head.headerContentJSON(header);
-static assert(!isTypeTuple!(header_content));
-auto dochead_make_json = header_content[0];
-auto dochead_meta_json = header_content[1];
+/+ ↓ headers metadata & make sdlang +/
+auto header_sdlang = headsdl.headerSDLang(header);
 #+END_SRC
 
 **** [#A] processing: document abstraction, tuple                :processing:
 #+NAME: sdp_each_file_do
 #+BEGIN_SRC d
 /+ ↓ porcess document, return abstraction as tuple +/
-auto t = abs.abstract_doc_source(sourcefile_content, dochead_make_json, dochead_meta_json);
+auto t = abs.abstract_doc_source(sourcefile_content);
 static assert(!isTypeTuple!(t));
 auto doc_ao_contents = t[0]; // contents ~ endnotes ~ bookindex;
 // static assert(!isIterable!(doc_ao_contents));
@@ -372,8 +435,8 @@ debug(checkdoc) { // checkbook & dumpdoc
     doc_ao_contents,
     doc_ao_bookindex_unordered_hashes,
     doc_ao_biblio,
-    dochead_make_json,
-    dochead_meta_json,
+    // doc_ao_make_json,
+    // doc_ao_metadata_json,
     fn_src,
     opt_action_bool
   );
@@ -436,13 +499,16 @@ break;
   sdp.d
 +/
 <<sdp_imports_use>>
+<<sdlang_imports_use>>
 <<sdp_imports>>
+<<sdp_output_selection>>
 <<sdp_version_mixin>>
 mixin CompileTimeInfo;
 mixin RgxInit;
 void main(string[] args) {
   <<sdp_compilation>>
   <<sdp_args>>
+  <<sdp_config_files>>
   foreach(fn_src; fns_src) {
   // foreach(fn_src; fns_src) {
     if (!empty(fn_src)) {
@@ -505,3 +571,97 @@ figure out best program dir structure for dub and compilers, issue with rdmd
 |---------------------+------------------------------------------+------------------------+--------|
 | metadata            |                                          | (from regular header)  | output |
 |---------------------+------------------------------------------+------------------------+--------|
+
+** config                                                            :config:
+using sdlang in sdp
+*** sdp config and header? file format? sdl ? yml ? json ?       :sdl:sdlang:
+
+[[https://sdlang.org/][SDL: Simple Declarative Language]]  [[http://sdl4r.rubyforge.org/syntaxhighlighter_brush.html][highlighter]]
+https://github.com/Abscissa/SDLang-D
+https://github.com/Abscissa/SDLang-D/blob/master/HOWTO.md
+
+**** build/ compile
+
+The recommended way to use SDLang-D is via DUB. Just add a dependency to
+sdlang-d in your project's dub.json or dub.sdl file as shown here. Then simply
+build your project with DUB as usual. dub dependency
+http://code.dlang.org/packages/sdlang-d
+
+
+Alternatively, you can git clone both SDLang-D and the latest version of
+libInputVisitor,
+
+#+BEGIN_SRC d :tangle no
+git clone https://github.com/Abscissa/SDLang-D
+git clone https://github.com/abscissa/libInputVisitor
+#+END_SRC
+
+ and when running the compiler include:
+
+#+BEGIN_SRC d :tangle no
+ -I{path to SDLang-D}/src -I{path to libInputVisitor}/src
+#+END_SRC
+
+**** Importing
+
+To use SDL, first import the module sdlang:
+
+#+BEGIN_SRC d :tangle no
+import sdlang;
+#+END_SRC
+
+If you're not using DUB, then you must also include the path the SDLang-D sources when you compile:
+
+#+BEGIN_SRC d :tangle no
+rdmd --build-only -I{path to sdlang}/src -I{path to libInputVisitor}/src {other flags} yourProgram.d
+#+END_SRC
+
+**** misc
+http://forum.dlang.org/thread/hphtqkkmrfnlcipnxzai@forum.dlang.org
+http://forum.dlang.org/thread/gnfctbuhiemidetngrzi@forum.dlang.org?page=23#post-rlxlfveyyzgewhkxhhta:40forum.dlang.org
+
+*** other links
+http://semitwist.com/sdlang-d-docs/v0.9.3/sdlang.html  http://semitwist.com/sdlang-d-docs/
+
+** read markup files
+**** regular .sst
+relatively straight forward
+**** master .ssm
+master files have been able to read in inser files .ssi and regular files .sst
+***** reading in .ssi files is straightforward
+***** reading in .sst files is more problematic
+.sst files have their own root (structure)
+either
+- the root needs to be disabled - not used
+or
+- the root tree needs to be demoted, which is only possible if markup from
+  heading D is not reached then A - C could be demoted to B - D
+- the other issue there is that it is common not to write out heading level A
+  text but to rely on the metadata for title and author, the issue there is that
+  at present the header for .sst files that are imported is just lopped off and
+  thrown away. At least the title and author information for each imported .sst
+  file would have to read and available for use in its header A that is demoted
+  to B
+
+** processing files, currently using utf8
+** src dir structure & files
+#+BEGIN_SRC txt :tangle no
+tree  /home/ralph/sisu_www/current/src/democratizing_innovation.eric_von_hippel.sst
+
+/home/ralph/sisu_www/current/src/
+democratizing_innovation.eric_von_hippel.sst
+└── sisupod
+    ├── doc
+    │   ├── en
+    │   │   └── democratizing_innovation.eric_von_hippel.sst
+    │   └── _sisu
+    │       └── sisu_document_make  // [interesting as part of larger conf.sdl]
+    └── image
+        ├── di_evh_f10-1.png
+        ├── di_evh_f11-1.png
+        ├── di_evh_f11-2.png
+        ├── di_evh_f1-1.png
+        ├── di_evh_f5-1.png
+        └── di_evh.png
+
+#+END_SRC
-- 
cgit v1.2.3