990 lines
41 KiB
C#

using DynamicBible.Schemas;
using DynamicBibleUtility.Geolocation;
using JMW.Extensions.String;
using SF.Snowball.Ext;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Windows.Forms;
using System.Xml;
using System.Xml.Linq;
using VAE.Common.Serialization;
namespace DynamicBibleUtility
{
public partial class frmMain : Form
{
private readonly HashSet<string> _stems = new HashSet<string>();
private readonly HashSet<string> _words = new HashSet<string>();
private readonly HashSet<string> _capitals = new HashSet<string>();
private readonly HashSet<string> _lowercase = new HashSet<string>();
private readonly Index _idx = new Index();
private readonly List<WordToStem> _wsIdx = new List<WordToStem>();
private class StrongCrossReference
{
public string Word { get; set; }
public HashSet<string> Refs { get; set; } = new HashSet<string>();
public override int GetHashCode()
{
return Word.GetHashCode();
}
public override bool Equals(object obj)
{
return Word.Equals((obj as StrongCrossReference).Word);
}
}
private Dictionary<string, Dictionary<string, StrongCrossReference>> hebCrossRefs = new Dictionary<string, Dictionary<string, StrongCrossReference>>();
private Dictionary<string, Dictionary<string, StrongCrossReference>> grkCrossRefs = new Dictionary<string, Dictionary<string, StrongCrossReference>>();
public frmMain()
{
InitializeComponent();
}
private readonly List<string> _exclusions = new List<string> { "us", "these", "her", "saith", "shalt", "let", "do", "your", "we", "no", "go", "if", "at", "an", "so", "before", "also", "on", "had", "you", "there", "then", "up", "by", "upon", "were", "are", "this", "when", "thee", "their", "ye", "will", "as", "thy", "my", "me", "have", "from", "was", "but", "which", "thou", "all", "it", "with", "them", "him", "they", "is", "be", "not", "his", "i", "shall", "a", "for", "unto", "he", "in", "to", "that", "of", "and", "the" };
private readonly char[] _trims = { '\'', ',', ':', ';', '"', '?', '.', '[', ']', '{', '}', '<', '>', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '-', '_', '=', '+' };
private readonly HashSet<char> _uppers = new HashSet<char> { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z' };
private readonly string _referenceDelimiter = ":";
public delegate void UpdateStatusDelegate(string status);
private void UpdateStatus(string statusMsg)
{
if (this.txtStatus.InvokeRequired)
{
this.txtStatus.Invoke(new UpdateStatusDelegate(UpdateStatus), statusMsg);
}
else
{
this.txtStatus.AppendText(statusMsg);
this.txtStatus.ScrollToCaret();
}
}
#region Index
private class WordToStem
{
public string w = "";
public string s = "";
}
private void AddWordToIndex(string s, string bk, string ch, string vs)
{
var cased = s.Trim().TrimEnd(this._trims).TrimStart(this._trims).Replace("'", "");
s = cased.ToLower();
if (cased.Length > 0 && this._uppers.Contains(cased[0]))
this._capitals.Add(cased);
else if (cased.Length > 0)
this._lowercase.Add(cased);
if (s != "" && !this._exclusions.Contains(s))
{
var original = s;
var st = new EnglishStemmer();
st.SetCurrent(s);
if (st.Stem())
{
s = st.GetCurrent();
}
if (!this._words.Contains(original))
{
this._wsIdx.Add(new WordToStem { s = s, w = original });
this._words.Add(original);
}
// add the word to the index
if (!this._stems.Contains(s))
{
this._stems.Add(s);
var i = new IndexItem { w = s };
i.r.Add(bk + ":" + ch + ":" + vs);
this._idx.Add(i);
}
else
{
IndexItem i = this._idx.GetItem(s);
if (!i.r.Contains(bk + ":" + ch + ":" + vs))
{
i.r.Add(bk + ":" + ch + ":" + vs);
}
}
}
}
private void CreateIndex_Click(object sender, EventArgs e)
{
this._idx.Clear();
this._stems.Clear();
this.txtStatus.Text = "";
var _thread = new Thread(CreateIndex);
_thread.SetApartmentState(ApartmentState.STA);
_thread.IsBackground = true;
_thread.Start();
}
private void CreateIndex()
{
var ofd = new OpenFileDialog();
if (ofd.ShowDialog() == DialogResult.OK)
{
var b = (XMLBIBLE)XML.GetData(typeof(XMLBIBLE), ofd.FileName);
// to index, you need to iterate through every word in the bible.
PopulateIndex(b);
File.WriteAllLines("word_list", this._stems);
var fbd = new FolderBrowserDialog();
if (fbd.ShowDialog() == DialogResult.OK)
{
this._idx.Sort(CompareIndexByWord);
this._wsIdx.Sort((x, y) => x.w.CompareTo(y.w));
File.WriteAllText(fbd.SelectedPath + @"\word_to_stem_idx.json", JSON.Serialize(this._wsIdx));
var tmp = new Index();
int i;
for (i = 0; i < this._idx.Count; i++)
{
if (i % 50 == 49 || i == this._idx.Count - 1)
{
tmp.Add(this._idx[i]);
UpdateStatus("words.unshift('" + this._idx[i].w + "');\r\n");
File.WriteAllText(fbd.SelectedPath + @"\" + this._idx[i].w + "idx.json", JSON.Serialize(tmp));
tmp.Clear();
}
else
{
tmp.Add(this._idx[i]);
}
}
}
}
}
private void PopulateIndex(XMLBIBLE b)
{
foreach (XMLBIBLE_BOOK bk in b.BIBLEBOOKS)
{
foreach (XMLBIBLE_CHAPTER ch in bk.CHAPTERS)
{
foreach (XMLBIBLE_VERSES vs in ch.VERSES)
{
if (vs.Items != null)
{
foreach (var w in vs.Items)
{
// for each word, add an entry.
if (w.GetType() == typeof(XMLBIBLE_GR))
{
var gr = (XMLBIBLE_GR)w;
if (gr.Text == null) continue;
foreach (var t in gr.Text)
{
foreach (var s in t.Split(' '))
AddWordToIndex(s, bk.bnumber.ToString(), ch.cnumber.ToString(), vs.vnumber.ToString());
}
}
else if (w.GetType() == typeof(XMLBIBLE_STYLE_ITEM))
{
var o = (XMLBIBLE_STYLE_ITEM)w;
if (o.Text != null)
{
foreach (var t in o.Text)
{
foreach (var s in t.Split(' '))
AddWordToIndex(s, bk.bnumber.ToString(), ch.cnumber.ToString(), vs.vnumber.ToString());
}
}
if (o.gr != null)
{
XMLBIBLE_STYLE_GR gr = o.gr;
if (gr.Text != null)
{
foreach (var t in gr.Text)
{
foreach (var s in t.Split(' '))
AddWordToIndex(s, bk.bnumber.ToString(), ch.cnumber.ToString(), vs.vnumber.ToString());
}
}
}
if (o.STYLE == null) continue;
foreach (XMLBIBLE_STYLE_STYLE so in o.STYLE)
{
if (so.Text != null)
{
foreach (var t in so.Text)
{
foreach (var s in t.Split(' '))
AddWordToIndex(s, bk.bnumber.ToString(), ch.cnumber.ToString(), vs.vnumber.ToString());
}
}
XMLBIBLE_STYLE_STYLE_GR gr = so.gr;
if (gr?.Value == null) continue;
foreach (var s in gr.Value.Split(' '))
AddWordToIndex(s, bk.bnumber.ToString(), ch.cnumber.ToString(), vs.vnumber.ToString());
}
}
}
}
if (vs.Text == null) continue;
foreach (var w in vs.Text)
{
foreach (var s in w.Split(' '))
{
if (s != null && s.Trim() != "")
AddWordToIndex(s, bk.bnumber.ToString(), ch.cnumber.ToString(), vs.vnumber.ToString());
}
}
}
}
UpdateStatus("Indexing Book: " + bk.bnumber + ", Word Count: " + this._stems.Count + "\r\n");
}
}
#endregion Index
#region Sorters
private static int CompareIndexByWord(IndexItem x, IndexItem y)
{
return x.w.CompareTo(y.w);
}
private static int CompareIndexByCount(IndexItem x, IndexItem y)
{
return x.r.Count.CompareTo(y.r.Count);
}
#endregion Sorters
#region Bible
private void btnCreateText_Click(object sender, EventArgs e)
{
this.txtStatus.Text = "";
var _thread = new Thread(CreateText);
_thread.SetApartmentState(ApartmentState.STA);
_thread.IsBackground = true;
_thread.Start();
}
private class ChapterRecord
{
public int Number { get; set; }
public int Last { get; set; }
}
private class BookRecord
{
public int Number { get; set; }
public string ShortName { get; set; } = string.Empty;
public string LongName { get; set; } = string.Empty;
public int LastChapter { get; set; }
public List<int> Chapters { get; set; } = new List<int>();
}
private HashSet<string> endPunc = new HashSet<string> { ".", "?", "!" };
private void CreateText()
{
// iterate through text, output json format.
var ofd = new OpenFileDialog();
var bbl = new List<Book>();
var book_records = new List<BookRecord>();
if (ofd.ShowDialog() == DialogResult.OK)
{
var doc = XDocument.Load(ofd.FileName);
var b = (XMLBIBLE)XML.GetData(typeof(XMLBIBLE), ofd.FileName);
PopulateIndex(b);
// upddate the lower/upper lists
foreach (var word in this._lowercase)
{
var q = word[0].ToString().ToUpper() + word.Substring(1);
if (this._capitals.Contains(q))
this._capitals.Remove(q);
}
foreach (XNode n in doc.Root.Nodes())
{
if (n.NodeType != XmlNodeType.Element) continue;
var el = (XElement)n;
if (el.Name != "BIBLEBOOK") continue;
var bk = new Book { bk = Convert.ToInt32(el.FirstAttribute.Value) };
var br = new BookRecord { Number = bk.bk };
book_records.Add(br);
br.Chapters.Add(0);
foreach (XElement chn in el.Nodes())
{
var ch = new Chapter { ch = Convert.ToInt32(chn.FirstAttribute.Value) };
var last = 0;
var last_vs_w = string.Empty;
foreach (XElement vs in chn.Nodes())
{
var v = new Verse { v = Convert.ToInt32(vs.FirstAttribute.Value) };
last = v.v;
var first = true;
foreach (XNode o in vs.Nodes())
{
List<Text> words = ProcessText(o, $"{bk.bk}{_referenceDelimiter}{ch.ch}{_referenceDelimiter}{v.v}");
// handle lowercasing the word if its not the start of a sentence
if (
words.Count > 0 &&
first &&
last_vs_w.Length > 0
&& !this.endPunc.Contains(last_vs_w.Last().ToString()))
{
var x = words.First().t;
var fw = x.ParseToIndexOf(" ");
if (fw.Length > 0 && !this._capitals.Contains(fw))
{
x = x.Substring(0, 1).ToLower() + x.Substring(1);
words.First().t = x;
}
}
first = false;
v.w.AddRange(words);
if (v.w.Count > 1 &&
(
v.w.Last().t.StartsWith("?")
|| v.w.Last().t.StartsWith(";")
|| v.w.Last().t.StartsWith(":")
|| v.w.Last().t.StartsWith(".")
|| v.w.Last().t.StartsWith(",")
|| v.w.Last().t.StartsWith("!")
|| v.w.Last().t.StartsWith("-")
|| v.w.Last().t.StartsWith("'")
)
)
{
Text prev = v.w[v.w.Count - 2];
Text curr = v.w.Last();
if (curr.t.StartsWith("-") || curr.t.StartsWith("'"))
{
if (curr.t.Trim().Contains(" "))
{
prev.t += curr.t.Trim().Substring(0, curr.t.Trim().IndexOf(" "));
curr.t = curr.t.Trim().Substring(curr.t.Trim().IndexOf(" ")); // you want to join the two words.
}
else
{
prev.t += curr.t.Trim();
curr.t = ""; // you want to join the two words.
}
}
else
{
prev.t += curr.t.Substring(0, 1);
curr.t = curr.t.Substring(1).Trim();
}
if (curr.t.Trim().Length == 0)
v.w.Remove(curr);
}
last_vs_w = v.w.Last().t;
}
ch.vss.Add(v);
}
bk.chs.Add(ch);
br.Chapters.Add(last);
File.WriteAllText(bk.bk + "-" + ch.ch + ".json", JSON.Serialize(ch).Replace(",\"s\":\"\"", ""));
}
bbl.Add(bk);
UpdateStatus("Book: " + bk.bk + "\r\n");
}
// finished.
File.WriteAllText("books.json", JSON.Serialize(book_records));
var lst = this.hebCrossRefs.OrderBy(kvp => int.Parse(kvp.Key)).ToList();
var Strongs = new List<Strongs>();
var last_s = 0;
for (var i = 0; i < lst.Count; i++)
{
var k = lst[i].Key;
var s = new Strongs
{
id = "H" + k,
t = "heb",
ss = lst[i].Value.Values.Select(cr =>
{
return new StrongRef
{
w = cr.Word,
rs = cr.Refs.Select(br => new BibleRef
{
r = br
}).ToList()
};
}).OrderBy(o => o.w).ToList()
};
Strongs.Add(s);
if (int.Parse(k) / 100 > last_s)
{
last_s = int.Parse(k) / 100;
File.WriteAllText("cr" + s.t + "" + last_s + ".json", JSON.Serialize(Strongs));
Strongs = new List<Strongs>();
UpdateStatus("Set: " + last_s + "\r\n");
}
}
File.WriteAllText("crheb" + (last_s + 1) + ".json", JSON.Serialize(Strongs));
UpdateStatus("Set: " + (last_s + 1) + "\r\n");
lst = this.grkCrossRefs.OrderBy(kvp => int.Parse(kvp.Key)).ToList();
Strongs = new List<Strongs>();
last_s = 0;
for (var i = 0; i < lst.Count; i++)
{
var k = lst[i].Key;
var s = new Strongs
{
id = "G" + lst[i].Key,
t = "grk",
ss = lst[i].Value.Values.Select(cr =>
{
return new StrongRef
{
w = cr.Word,
rs = cr.Refs.Select(br => new BibleRef
{
r = br
}).ToList()
};
}).OrderBy(o => o.w).ToList()
};
Strongs.Add(s);
if (int.Parse(k) / 100 > last_s)
{
last_s = int.Parse(k) / 100;
File.WriteAllText("cr" + s.t + "" + last_s + ".json", JSON.Serialize(Strongs));
Strongs = new List<Strongs>();
UpdateStatus("Set: " + last_s + "\r\n");
}
}
File.WriteAllText("crgrk" + (last_s + 1) + ".json", JSON.Serialize(Strongs));
UpdateStatus("Set: " + (last_s + 1) + "\r\n");
}
}
private List<Text> ProcessText(object o, string location)
{
var Ts = new List<Text>();
if (o.GetType() == typeof(XElement))
{
if (((XElement)o).Name == "gr")
{
var strongs_number = ((XElement)o).FirstAttribute.Value;
var text_value = ((XElement)o).Value.Trim();
foreach (var val in strongs_number.Split(' '))
{
var tv = text_value.ToLower();
if (strongs_number.Contains(" "))
{
File.AppendAllLines("errata.txt", new List<string> { "Multiple strongs numbers found: " + location + ", " + tv });
}
var sn = val.Trim('*', ' ');
if (int.Parse(location.ParseToIndexOf(_referenceDelimiter)) < 40)
{
if (!this.hebCrossRefs.ContainsKey(sn))
{
this.hebCrossRefs.Add(sn, new Dictionary<string, StrongCrossReference>());
}
if (!this.hebCrossRefs[sn].ContainsKey(tv))
{
this.hebCrossRefs[sn].Add(tv, new StrongCrossReference());
}
this.hebCrossRefs[sn][tv].Word = tv;
this.hebCrossRefs[sn][tv].Refs.Add(location);
}
else
{
if (!this.grkCrossRefs.ContainsKey(sn))
{
this.grkCrossRefs.Add(sn, new Dictionary<string, StrongCrossReference>());
}
if (!this.grkCrossRefs[sn].ContainsKey(tv))
{
this.grkCrossRefs[sn].Add(tv, new StrongCrossReference());
}
this.grkCrossRefs[sn][tv].Word = tv;
this.grkCrossRefs[sn][tv].Refs.Add(location);
}
}
Ts.Add(new Text(text_value, strongs_number));
}
else if (((XElement)o).Name.ToString().ToLower() == "style")
{
foreach (object n in ((XElement)o).Nodes())
{
Ts.AddRange(ProcessText(n, location));
}
}
else
{
throw new Exception("Unknown Element");
}
}
else if (o.GetType() == typeof(XText))
{
var t = ((XText)o).Value.Trim();
Ts.Add(new Text(t));
}
else
{
throw new Exception("Unknown Element");
}
return Ts;
}
#endregion Bible
#region Strongs
private void btnCreateStrongs_Click(object sender, EventArgs e)
{
this.txtStatus.Text = "";
var _thread = new Thread(CreateStrongs);
_thread.SetApartmentState(ApartmentState.STA);
_thread.IsBackground = true;
_thread.Start();
}
private void CreateStrongs()
{
// iterate through text, output json format.
var ofd = new OpenFileDialog();
var bbl = new List<Book>();
if (ofd.ShowDialog() == DialogResult.OK)
{
var doc = XDocument.Load(ofd.FileName);
var Strongs = new List<Strongs>();
var first = true;
var last = 0;
foreach (XNode n in doc.Root.Nodes())
{
if (n.NodeType == XmlNodeType.Element)
{
var el = (XElement)n;
if (el.Name == "item")
{
var sr = new Strongs
{
id = el.FirstAttribute.Value
};
var c = Convert.ToInt32(sr.id.Substring(1));
if (sr.id.Contains("H")) sr.t = "heb";
else
{
if (first)
{
first = false;
File.WriteAllText("crheb" + (last + 1) + ".json", JSON.Serialize(Strongs));
Strongs = new List<Strongs>();
last = c / 100 * 100;
UpdateStatus("Set: " + (last + 1) + "\r\n");
}
sr.t = "grk";
}
foreach (XElement chn in el.Nodes())
{
if (chn.Name == "title")
{
sr.d = chn.Value;
}
else if (chn.Name == "description")
{
var sr_ref = new StrongRef();
sr.ss.Add(sr_ref);
foreach (XElement i in chn.Nodes().Where(o => o is XElement))
{
if (i.Name == "title") sr_ref.w = i.Value;
else if (i.Name == "reflink")
{
var l = new BibleRef
{
r = i.FirstAttribute.Value
};
sr_ref.rs.Add(l);
}
}
}
}
Strongs.Add(sr);
if (c / 100 > last)
{
last = c / 100;
File.WriteAllText("cr" + sr.t + "" + last + ".json", JSON.Serialize(Strongs));
Strongs = new List<Strongs>();
UpdateStatus("Set: " + last + "\r\n");
}
}
}
}
File.WriteAllText("crgrk" + (last + 1) + ".json", JSON.Serialize(Strongs));
UpdateStatus("Set: " + (last + 1) + "\r\n");
// finished.
}
}
#endregion Strongs
#region Strongs dict
private void btnCreateStrongsDict_Click(object sender, EventArgs e)
{
this.txtStatus.Text = "";
var _thread = new Thread(CreateStrongsDict);
_thread.SetApartmentState(ApartmentState.STA);
_thread.IsBackground = true;
_thread.Start();
}
private void CreateStrongsDict()
{
// iterate through text, output json format.
var master_dict = new Dictionary<string, StrongDictEntry>();
IEnumerable<string> grk_filenames = Directory.EnumerateFiles(@"../../xml", "grk*", SearchOption.TopDirectoryOnly);
IEnumerable<string> heb_filenames = Directory.EnumerateFiles(@"../../xml", "heb*", SearchOption.TopDirectoryOnly);
IEnumerable<string> files = grk_filenames.Concat(heb_filenames);
foreach (var f in files)
{
var doc = XDocument.Load(f);
foreach (XNode n in doc.Root?.Nodes())
{
if (n.NodeType != XmlNodeType.Element) continue;
var el = (XElement)n;
if (el.Name != "i") continue;
var sr = new StrongDictEntry { i = el.FirstAttribute.Value };
if (master_dict.ContainsKey(sr.i)) continue;
master_dict.Add(sr.i, sr);
foreach (XElement d in el.Nodes())
{
if (d.Name == "d")
{
var s = d.ToString().Replace("<d>", "")
.Replace("</d>", "")
.Replace("><", "> <")
.Replace("<br>", "")
.Replace("[", "")
.Replace("]", "")
.Replace(";", "; ")
.Replace("<br>", "")
.Replace(" ", " ")
.Replace(" ", " ")
.Replace(" ", " ")
.Replace(" ", " ")
.Replace(" ", " ")
.Replace("\r\n", "")
.Replace("<br />", "");
var parts = s.Split(new[] { "<link", "</link>" }, StringSplitOptions.None);
foreach (var part in parts)
{
if (part.Contains("target="))
sr.de.Add(new StrongsDictEntryPart { sn = part.ParseAfterLastIndexOf_PlusLength(">") });
else
sr.de.Add(new StrongsDictEntryPart { w = part });
}
}
else if (d.Name == "p")
{
sr.p = d.Value;
}
else if (d.Name == "tr")
{
sr.tr = d.Value;
}
}
}
}
// combine with other javascript
foreach (var f in new List<string> { "../../strongs-greek-dictionary.json", "../../strongs-hebrew-dictionary.json" })
{
var doc = JSON.Deserialize(File.ReadAllText(f), typeof(SDict)) as SDict;
var dict = doc?.Entries.ToDictionary(k => k.Key, v => v.Value);
foreach (KeyValuePair<string, StrongDictEntry> pair in master_dict)
{
pair.Value.n = Convert.ToInt32(pair.Key.Substring(1));
if (dict.ContainsKey(pair.Key))
pair.Value.lemma = dict[pair.Key].lemma;
}
}
IOrderedEnumerable<StrongDictEntry> lst_heb = master_dict.Values.Where(o => o.i.StartsWith("H")).OrderBy(o => o.n);
IOrderedEnumerable<StrongDictEntry> lst_grk = master_dict.Values.Where(o => o.i.StartsWith("G")).OrderBy(o => o.n);
var i = 1;
foreach (StrongDictEntry s in lst_grk)
{
if (s.n != i)
{
UpdateStatus(s.i + ":" + i + "\r\n");
i = s.n;
}
i++;
}
i = 1;
foreach (StrongDictEntry s in lst_heb)
{
if (s.n != i)
{
UpdateStatus(s.i + ":" + i + "\r\n");
i = s.n;
}
i++;
}
var lsts = new List<IEnumerable<StrongDictEntry>> { lst_grk, lst_heb };
foreach (IEnumerable<StrongDictEntry> lst in lsts)
{
var last = 0;
var temp = new List<StrongDictEntry>();
foreach (StrongDictEntry e in lst)
{
temp.Add(e);
if (e.n / 100 <= last) continue;
last = e.n / 100;
File.WriteAllText((e.i.Contains("H") ? "heb" : "grk") + last + ".json", JSON.Serialize(temp));
temp = new List<StrongDictEntry>();
UpdateStatus("Set: " + last + "\r\n");
}
// handle the last set.
last = temp.Last().n / 100 + 1;
File.WriteAllText((temp.First().i.Contains("H") ? "heb" : "grk") + last + ".json", JSON.Serialize(temp));
UpdateStatus("Set: " + last + "\r\n");
}
}
#endregion Strongs dict
#region RMAC
private void btnCreateRMAC_Click(object sender, EventArgs e)
{
var _thread = new Thread(CreateRmac);
_thread.SetApartmentState(ApartmentState.STA);
_thread.IsBackground = true;
_thread.Start();
}
private void CreateRmac()
{
// iterate through text, output json format.
var ofd = new OpenFileDialog
{
Multiselect = true
};
if (ofd.ShowDialog() == DialogResult.OK)
{
foreach (var f in ofd.FileNames)
{
var doc = XDocument.Load(f);
var rmacs = new List<RMAC>();
foreach (XNode n in doc.Root.Nodes())
{
if (n.NodeType == XmlNodeType.Element)
{
var el = (XElement)n;
if (el.Name == "i")
{
var r = new RMAC();
rmacs.Add(r);
r.id = el.FirstAttribute.Value;
r.d = new List<string>();
foreach (XElement d in el.Nodes())
{
r.d.Add(d.Value);
}
}
}
}
var name = f.Substring(f.LastIndexOf("\\") + 1);
name = name.Substring(0, name.IndexOf("."));
File.WriteAllText(name + ".json", JSON.Serialize(rmacs));
UpdateStatus("Set: " + f + "\r\n");
}
}
}
private void btnRmacCrossRefs_Click(object sender, EventArgs e)
{
var _thread = new Thread(CreateRmacCrossRefs);
_thread.SetApartmentState(ApartmentState.STA);
_thread.IsBackground = true;
_thread.Start();
}
private void CreateRmacCrossRefs()
{
// iterate through text, output json format.
var ofd = new OpenFileDialog
{
Multiselect = true
};
if (ofd.ShowDialog() == DialogResult.OK)
{
foreach (var f in ofd.FileNames)
{
var doc = XDocument.Load(f);
var rmacs = new List<RMACCrossRef>();
foreach (XNode n in doc.Root.Nodes())
{
if (n.NodeType == XmlNodeType.Element)
{
var el = (XElement)n;
if (el.Name == "s")
{
var r = new RMACCrossRef();
rmacs.Add(r);
r.i = el.FirstAttribute.Value;
r.r = el.LastAttribute.Value;
}
}
}
var name = f.Substring(f.LastIndexOf("\\") + 1);
name = name.Substring(0, name.IndexOf("."));
File.WriteAllText(name + ".json", JSON.Serialize(rmacs));
UpdateStatus("Set: " + f + "\r\n");
}
}
}
#endregion RMAC
#region Geolocation JSON
/// <summary>
/// Handles creating geolocation JSON files when the appropriate button is clicked.
/// </summary>
/// <param name="sender">Sender of the event; ignored.</param>
/// <param name="eventArguments">Event arguments; ignored.</param>
private void btnCreateGeolocationJson_Click(object sender, EventArgs eventArguments)
{
var _thread = new Thread(CreateGeolocationJson);
_thread.SetApartmentState(ApartmentState.STA);
_thread.IsBackground = true;
_thread.Start();
}
/// <summary>
/// Prompts the user for a geolocation data file (<see cref="OpenBibleDotInfoLocationParser"/>)
/// and converts a chosen file into the appropriate output JSON files for the Dynamic Bible app.
/// </summary>
private void CreateGeolocationJson()
{
// LET THE USER CHOOSE THE FILE WITH GEOLOCATION DATA.
var open_file_dialog = new OpenFileDialog();
DialogResult file_dialog_result = open_file_dialog.ShowDialog();
var file_chosen = (file_dialog_result == DialogResult.OK);
if (!file_chosen)
{
// The user chose not to create a file.
return;
}
try
{
// READ THE LOCATION INFORMATION FROM THE FILE.
IEnumerable<BibleLocationReference> locations = OpenBibleDotInfoLocationParser.Parse(open_file_dialog.FileName);
UpdateStatus($"Parsed {locations.Count()} locations.\n");
// CREATE MORE USEFUL INDICES FOR THE LOCATIONS.
// Indexing in these different ways is useful for quick lookups in the Dynamic Bible app.
var locations_by_name = new BibleLocationIndexByName(locations);
UpdateStatus($"Finished indexing locations by name.\n");
var locations_by_verse = new BibleLocationIndexByVerse(locations);
UpdateStatus($"Finished indexing locations by verse.\n");
var locations_by_strongs_numbers = new BibleLocationIndexByStrongsNumbers(locations);
UpdateStatus($"Finished indexing locations by Strong's numbers.\n");
// WRITE OUT THE GEOLOCATION DATA TO JSON FORMAT.
const string LOCATIONS_BY_NAME_JSON_FILENAME = "locations_by_name.json";
var locations_by_name_in_json = JSON.ToJSON(locations_by_name.NameToLocationLookup);
File.WriteAllText(LOCATIONS_BY_NAME_JSON_FILENAME, locations_by_name_in_json);
UpdateStatus($"Wrote locations by name to {LOCATIONS_BY_NAME_JSON_FILENAME} in current working directory.\n");
const string LOCATION_NAMES_BY_VERSE_JSON_FILENAME = "location_names_by_verse.json";
var location_names_by_verse_in_json = JSON.ToJSON(locations_by_verse.VerseToLocationNameLookup);
File.WriteAllText(LOCATION_NAMES_BY_VERSE_JSON_FILENAME, location_names_by_verse_in_json);
UpdateStatus($"Wrote location names by verse to {LOCATION_NAMES_BY_VERSE_JSON_FILENAME} in current working directory.\n");
const string LOCATION_NAMES_BY_STRONGS_NUMBER = "location_names_by_strongs.json";
var location_names_by_strongs_number_in_json = JSON.ToJSON(locations_by_strongs_numbers.StrongsNumberToLocationNameLookup);
File.WriteAllText(LOCATION_NAMES_BY_STRONGS_NUMBER, location_names_by_strongs_number_in_json);
UpdateStatus($"Wrote location names by Strong's number to {LOCATION_NAMES_BY_STRONGS_NUMBER} in current working directory.\n");
const string LOCATIONS_BY_STRONGS_NUMBER = "locations_by_strongs.json";
var locations_by_strongs_number_in_json = JSON.ToJSON(locations_by_strongs_numbers.StrongsNumberToLocationLookup);
File.WriteAllText(LOCATIONS_BY_STRONGS_NUMBER, locations_by_strongs_number_in_json);
UpdateStatus($"Wrote locations by Strong's number to {LOCATIONS_BY_STRONGS_NUMBER} in current working directory.\n");
// INFORM THE USER THAT CREATING THE GEOLOCATION JSON FILES IS COMPLETE.
UpdateStatus("Done.\n");
}
catch (Exception exception)
{
UpdateStatus($"Exception while processing geolocations: {exception}\n");
}
}
#endregion Geolocation JSON
}
}