You've already forked linux-packaging-mono
Imported Upstream version 5.2.0.196
Former-commit-id: a9bb725ccbe0b8bfe8370b968c9f33f1558e1a2b
This commit is contained in:
parent
fad71374d0
commit
bdb6e93184
357
external/api-doc-tools/monodoc/Monodoc/HelpSource.cs
vendored
357
external/api-doc-tools/monodoc/Monodoc/HelpSource.cs
vendored
@ -1,357 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Xml;
|
||||
using System.Diagnostics;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using Mono.Utilities;
|
||||
using Lucene.Net.Index;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
public enum SortType {
|
||||
Caption,
|
||||
Element
|
||||
}
|
||||
|
||||
//
|
||||
// The HelpSource class keeps track of the archived data, and its
|
||||
// tree
|
||||
//
|
||||
public
|
||||
#if LEGACY_MODE
|
||||
partial
|
||||
#endif
|
||||
class HelpSource
|
||||
{
|
||||
static int id;
|
||||
|
||||
//
|
||||
// The unique ID for this HelpSource.
|
||||
//
|
||||
int source_id;
|
||||
|
||||
// The name of the HelpSource, used by all the file (.tree, .zip, ...) used by it
|
||||
string name;
|
||||
// The full directory path where the HelpSource files are located
|
||||
string basePath;
|
||||
|
||||
// The tree of this help source
|
||||
Tree tree;
|
||||
string treeFilePath;
|
||||
RootTree rootTree;
|
||||
|
||||
IDocCache cache;
|
||||
IDocStorage storage;
|
||||
|
||||
public HelpSource (string base_filename, bool create)
|
||||
{
|
||||
this.name = Path.GetFileName (base_filename);
|
||||
this.basePath = Path.GetDirectoryName (base_filename);
|
||||
this.treeFilePath = base_filename + ".tree";
|
||||
this.storage = new Monodoc.Storage.ZipStorage (base_filename + ".zip");
|
||||
this.cache = DocCacheHelper.GetDefaultCache (Name);
|
||||
|
||||
tree = create ? new Tree (this, string.Empty, string.Empty) : new Tree (this, treeFilePath);
|
||||
|
||||
source_id = id++;
|
||||
}
|
||||
|
||||
public HelpSource ()
|
||||
{
|
||||
tree = new Tree (this, "Blah", "Blah");
|
||||
source_id = id++;
|
||||
this.cache = new Caches.NullCache ();
|
||||
}
|
||||
|
||||
public int SourceID {
|
||||
get {
|
||||
return source_id;
|
||||
}
|
||||
}
|
||||
|
||||
public string Name {
|
||||
get {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
||||
/* This gives the full path of the source/ directory */
|
||||
public string BaseFilePath {
|
||||
get {
|
||||
return basePath;
|
||||
}
|
||||
}
|
||||
|
||||
public TraceLevel TraceLevel {
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
public string BaseDir {
|
||||
get {
|
||||
return basePath;
|
||||
}
|
||||
}
|
||||
|
||||
public Tree Tree {
|
||||
get {
|
||||
return tree;
|
||||
}
|
||||
}
|
||||
|
||||
public RootTree RootTree {
|
||||
get {
|
||||
return rootTree;
|
||||
}
|
||||
set {
|
||||
rootTree = value;
|
||||
}
|
||||
}
|
||||
|
||||
public IDocCache Cache {
|
||||
get {
|
||||
return cache;
|
||||
}
|
||||
}
|
||||
|
||||
public IDocStorage Storage {
|
||||
get {
|
||||
return storage;
|
||||
}
|
||||
protected set {
|
||||
storage = value;
|
||||
}
|
||||
}
|
||||
|
||||
// A HelpSource may have a common prefix to its URL, give it here
|
||||
protected virtual string UriPrefix {
|
||||
get {
|
||||
return "dummy:";
|
||||
}
|
||||
}
|
||||
|
||||
public virtual SortType SortType {
|
||||
get {
|
||||
return SortType.Caption;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns a stream from the packaged help source archive
|
||||
/// </summary>
|
||||
public virtual Stream GetHelpStream (string id)
|
||||
{
|
||||
return storage.Retrieve (id);
|
||||
}
|
||||
|
||||
public virtual Stream GetCachedHelpStream (string id)
|
||||
{
|
||||
if (string.IsNullOrEmpty (id))
|
||||
throw new ArgumentNullException ("id");
|
||||
if (!cache.CanCache (DocEntity.Text))
|
||||
return GetHelpStream (id);
|
||||
if (!cache.IsCached (id))
|
||||
cache.CacheText (id, GetHelpStream (id));
|
||||
return cache.GetCachedStream (id);
|
||||
}
|
||||
|
||||
public XmlReader GetHelpXml (string id)
|
||||
{
|
||||
var url = "monodoc:///" + SourceID + "@" + Uri.EscapeDataString (id) + "@";
|
||||
var stream = cache.IsCached (id) ? cache.GetCachedStream (id) : storage.Retrieve (id);
|
||||
|
||||
return stream == null ? null : new XmlTextReader (url, stream);
|
||||
}
|
||||
|
||||
public virtual XmlDocument GetHelpXmlWithChanges (string id)
|
||||
{
|
||||
XmlDocument doc = new XmlDocument ();
|
||||
if (!storage.SupportRevision) {
|
||||
doc.Load (GetHelpXml (id));
|
||||
} else {
|
||||
var revManager = storage.RevisionManager;
|
||||
doc.Load (revManager.RetrieveLatestRevision (id));
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
|
||||
public virtual string GetCachedText (string id)
|
||||
{
|
||||
if (!cache.CanCache (DocEntity.Text))
|
||||
return GetText (id);
|
||||
if (!cache.IsCached (id))
|
||||
cache.CacheText (id, GetText (id));
|
||||
return cache.GetCachedString (id);
|
||||
}
|
||||
|
||||
public virtual string GetText (string id)
|
||||
{
|
||||
return new StreamReader (GetHelpStream (id)).ReadToEnd ();
|
||||
}
|
||||
|
||||
// Tells if the result for the provided id is generated dynamically
|
||||
// by the help source
|
||||
public virtual bool IsGeneratedContent (string id)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Tells if the content of the provided id is meant to be returned raw
|
||||
public virtual bool IsRawContent (string id)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Tells if provided id refers to a multi-content-type document if it's case
|
||||
// tells the ids it's formed of
|
||||
public virtual bool IsMultiPart (string id, out IEnumerable<string> parts)
|
||||
{
|
||||
parts = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Saves the tree and the archive
|
||||
/// </summary>
|
||||
public void Save ()
|
||||
{
|
||||
tree.Save (treeFilePath);
|
||||
storage.Dispose ();
|
||||
}
|
||||
|
||||
public virtual void RenderPreviewDocs (XmlNode newNode, XmlWriter writer)
|
||||
{
|
||||
throw new NotImplementedException ();
|
||||
}
|
||||
|
||||
public virtual string GetPublicUrl (Node node)
|
||||
{
|
||||
return node.GetInternalUrl ();
|
||||
}
|
||||
|
||||
public virtual bool CanHandleUrl (string url)
|
||||
{
|
||||
return url.StartsWith (UriPrefix, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
public virtual string GetInternalIdForUrl (string url, out Node node, out Dictionary<string, string> context)
|
||||
{
|
||||
context = null;
|
||||
node = MatchNode (url);
|
||||
return node == null ? null : url.Substring (UriPrefix.Length);
|
||||
}
|
||||
|
||||
public virtual Node MatchNode (string url)
|
||||
{
|
||||
Node current = null;
|
||||
|
||||
var matchCache = LRUCache<string, Node>.Default;
|
||||
if ((current = matchCache.Get (url)) != null)
|
||||
return current;
|
||||
|
||||
current = Tree.RootNode;
|
||||
var strippedUrl = url.StartsWith (UriPrefix, StringComparison.OrdinalIgnoreCase) ? url.Substring (UriPrefix.Length) : url;
|
||||
var searchNode = new Node () { Element = strippedUrl };
|
||||
|
||||
do {
|
||||
int index = current.ChildNodes.BinarySearch (searchNode, NodeElementComparer.Instance);
|
||||
if (index >= 0) {
|
||||
Node n = current.ChildNodes[index];
|
||||
matchCache.Put (url, n);
|
||||
return n;
|
||||
}
|
||||
index = ~index;
|
||||
if (index == current.ChildNodes.Count) {
|
||||
return SlowMatchNode (Tree.RootNode, matchCache, strippedUrl);
|
||||
}
|
||||
|
||||
if (index == 0)
|
||||
return null;
|
||||
|
||||
current = current.ChildNodes [index - 1];
|
||||
} while (true);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/* That slow path is mainly here to handle ecmaspec type of url which are composed of hard to sort numbers
|
||||
* because they don't have the same amount of digit. We could use a regex to harmonise the various number
|
||||
* parts but then it would be quite specific. Since in the case of ecmaspec the tree is well-formed enough
|
||||
* the "Slow" match should still be fast enough
|
||||
*/
|
||||
Node SlowMatchNode (Node current, LRUCache<string, Node> matchCache, string url)
|
||||
{
|
||||
//Console.WriteLine ("Entering slow path for {0} starting from {1}", url, current.Element);
|
||||
while (current != null) {
|
||||
bool stop = true;
|
||||
foreach (Node n in current.ChildNodes) {
|
||||
var element = n.Element.StartsWith (UriPrefix, StringComparison.OrdinalIgnoreCase) ? n.Element.Substring (UriPrefix.Length) : n.Element;
|
||||
if (url.Equals (element, StringComparison.Ordinal)) {
|
||||
matchCache.Put (url, n);
|
||||
return n;
|
||||
} else if (url.StartsWith (element + ".", StringComparison.OrdinalIgnoreCase) && !n.IsLeaf) {
|
||||
current = n;
|
||||
stop = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (stop)
|
||||
current = null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
class NodeElementComparer : IComparer<Node>
|
||||
{
|
||||
public static NodeElementComparer Instance = new NodeElementComparer ();
|
||||
|
||||
public int Compare (Node n1, Node n2)
|
||||
{
|
||||
return string.Compare (Cleanup (n1), Cleanup (n2), StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
string Cleanup (Node n)
|
||||
{
|
||||
var prefix = n.Tree != null && n.Tree.HelpSource != null ? n.Tree.HelpSource.UriPrefix : string.Empty;
|
||||
var element = n.Element.StartsWith (prefix, StringComparison.OrdinalIgnoreCase) ? n.Element.Substring (prefix.Length) : n.Element;
|
||||
if (char.IsDigit (element, 0)) {
|
||||
var count = element.TakeWhile (char.IsDigit).Count ();
|
||||
element = element.PadLeft (Math.Max (0, 3 - count) + element.Length, '0');
|
||||
}
|
||||
//Console.WriteLine ("Cleaned up {0} to {1}", n.Element, element);
|
||||
return element;
|
||||
}
|
||||
}
|
||||
|
||||
public virtual DocumentType GetDocumentTypeForId (string id)
|
||||
{
|
||||
return DocumentType.PlainText;
|
||||
}
|
||||
|
||||
public virtual Stream GetImage (string url)
|
||||
{
|
||||
Stream result = null;
|
||||
storage.TryRetrieve (url, out result);
|
||||
return result;
|
||||
}
|
||||
|
||||
//
|
||||
// Populates the index.
|
||||
//
|
||||
public virtual void PopulateIndex (IndexMaker index_maker)
|
||||
{
|
||||
}
|
||||
|
||||
//
|
||||
// Create different Documents for adding to Lucene search index
|
||||
// The default action is do nothing. Subclasses should add the docs
|
||||
//
|
||||
public virtual void PopulateSearchableIndex (IndexWriter writer)
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
@ -1,52 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Xml;
|
||||
using System.Diagnostics;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using Mono.Utilities;
|
||||
using Lucene.Net.Index;
|
||||
|
||||
#if LEGACY_MODE
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
using Generators;
|
||||
|
||||
public partial class HelpSource
|
||||
{
|
||||
static HtmlGenerator htmlGenerator = new HtmlGenerator (null);
|
||||
|
||||
[Obsolete]
|
||||
public static bool use_css;
|
||||
[Obsolete]
|
||||
public static bool FullHtml = true;
|
||||
[Obsolete]
|
||||
public static bool UseWebdocCache;
|
||||
|
||||
[Obsolete ("Use Monodoc.Providers.HtmlGenerator.InlineCss")]
|
||||
public string InlineCss {
|
||||
get { return Monodoc.Generators.HtmlGenerator.InlineCss; }
|
||||
}
|
||||
|
||||
[Obsolete]
|
||||
public string InlineJavaScript {
|
||||
get { return null; }
|
||||
}
|
||||
|
||||
[Obsolete ("Use RenderUrl")]
|
||||
public string GetText (string url, out Node node)
|
||||
{
|
||||
return rootTree.RenderUrl (url, htmlGenerator, out node, this);
|
||||
}
|
||||
|
||||
[Obsolete ("Use RenderUrl")]
|
||||
public string RenderNamespaceLookup (string url, out Node node)
|
||||
{
|
||||
return rootTree.RenderUrl (url, htmlGenerator, out node, this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
384
external/api-doc-tools/monodoc/Monodoc/Node.cs
vendored
384
external/api-doc-tools/monodoc/Monodoc/Node.cs
vendored
@ -1,384 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Linq;
|
||||
using System.Xml;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
public
|
||||
#if LEGACY_MODE
|
||||
partial
|
||||
#endif
|
||||
class Node : IComparable<Node>, IComparable
|
||||
{
|
||||
readonly Tree parentTree;
|
||||
string caption, element, pubUrl;
|
||||
public bool Documented;
|
||||
bool loaded;
|
||||
Node parent;
|
||||
List<Node> nodes;
|
||||
#if LEGACY_MODE
|
||||
ArrayList legacyNodes;
|
||||
#endif
|
||||
Dictionary<string, Node> childrenLookup;
|
||||
bool elementSort;
|
||||
/* Address has three types of value,
|
||||
* _ 0 is for no on-disk representation
|
||||
* _ >0 is a valid address that is loaded immediately
|
||||
* _ <0 is a valid negated address to indicate lazy loading
|
||||
*/
|
||||
int address;
|
||||
|
||||
#if LEGACY_MODE
|
||||
[Obsolete ("Tree inheriting Node is being phased out. Use the `Tree.RootNode' property instead")]
|
||||
public Node (string caption, string element)
|
||||
{
|
||||
this.parentTree = (Tree) this;
|
||||
this.caption = caption;
|
||||
this.element = element;
|
||||
parent = null;
|
||||
}
|
||||
#endif
|
||||
|
||||
public Node (Node parent, string caption, string element) : this (parent.Tree, caption, element)
|
||||
{
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
internal Node (Tree tree, string caption, string element)
|
||||
{
|
||||
this.parentTree = tree;
|
||||
this.caption = caption;
|
||||
this.element = element;
|
||||
this.elementSort = parentTree.HelpSource != null && parentTree.HelpSource.SortType == SortType.Element;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a node from an on-disk representation
|
||||
/// </summary>
|
||||
internal Node (Node parent, int address) : this (parent.parentTree, address)
|
||||
{
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
internal Node (Tree tree, int address)
|
||||
{
|
||||
this.address = address;
|
||||
this.parentTree = tree;
|
||||
this.elementSort = parentTree.HelpSource != null && parentTree.HelpSource.SortType == SortType.Element;
|
||||
if (address > 0)
|
||||
LoadNode ();
|
||||
}
|
||||
|
||||
/* This is solely used for MatchNode to check for equality */
|
||||
internal Node ()
|
||||
{
|
||||
}
|
||||
|
||||
void LoadNode ()
|
||||
{
|
||||
parentTree.InflateNode (this);
|
||||
if (parent != null)
|
||||
parent.RegisterFullNode (this);
|
||||
}
|
||||
|
||||
public void AddNode (Node n)
|
||||
{
|
||||
nodes.Add (n);
|
||||
n.parent = this;
|
||||
n.Documented = true;
|
||||
RegisterFullNode (n);
|
||||
}
|
||||
|
||||
public void DeleteNode (Node n)
|
||||
{
|
||||
nodes.Remove (n);
|
||||
if (!string.IsNullOrEmpty (n.element))
|
||||
childrenLookup.Remove (n.element);
|
||||
}
|
||||
|
||||
// When a child node is inflated, it calls this method
|
||||
// so that we can add it to our lookup for quick search
|
||||
void RegisterFullNode (Node child)
|
||||
{
|
||||
if (childrenLookup == null)
|
||||
childrenLookup = new Dictionary<string, Node> ();
|
||||
if (!string.IsNullOrEmpty (child.element))
|
||||
childrenLookup[child.element] = child;
|
||||
}
|
||||
|
||||
#if LEGACY_MODE
|
||||
[Obsolete ("Use ChildNodes")]
|
||||
public ArrayList Nodes {
|
||||
get {
|
||||
if (legacyNodes == null)
|
||||
legacyNodes = new ArrayList (ChildNodes as ICollection);
|
||||
return legacyNodes;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
public IList<Node> ChildNodes {
|
||||
get {
|
||||
EnsureLoaded ();
|
||||
return nodes != null ? nodes : new List<Node> ();
|
||||
}
|
||||
}
|
||||
|
||||
public string Element {
|
||||
get {
|
||||
EnsureLoaded ();
|
||||
return element;
|
||||
}
|
||||
set {
|
||||
element = value;
|
||||
}
|
||||
}
|
||||
|
||||
public string Caption {
|
||||
get {
|
||||
EnsureLoaded ();
|
||||
return caption;
|
||||
}
|
||||
internal set {
|
||||
caption = value;
|
||||
}
|
||||
}
|
||||
|
||||
public Node Parent {
|
||||
get {
|
||||
return parent;
|
||||
}
|
||||
}
|
||||
|
||||
public Tree Tree {
|
||||
get {
|
||||
return parentTree;
|
||||
}
|
||||
}
|
||||
|
||||
internal int Address {
|
||||
get {
|
||||
return address;
|
||||
}
|
||||
#if LEGACY_MODE
|
||||
set {
|
||||
address = value;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new node, in the locator entry point, and with
|
||||
/// a user visible caption of @caption
|
||||
/// </summary>
|
||||
public Node CreateNode (string c_caption, string c_element)
|
||||
{
|
||||
EnsureNodes ();
|
||||
if (string.IsNullOrEmpty (c_caption))
|
||||
throw new ArgumentNullException ("c_caption");
|
||||
if (string.IsNullOrEmpty (c_element))
|
||||
throw new ArgumentNullException ("c_element");
|
||||
|
||||
Node t = new Node (this, c_caption, c_element);
|
||||
nodes.Add (t);
|
||||
childrenLookup[c_element] = t;
|
||||
|
||||
return t;
|
||||
}
|
||||
|
||||
public Node GetOrCreateNode (string c_caption, string c_element)
|
||||
{
|
||||
if (nodes == null)
|
||||
return CreateNode (c_caption, c_element);
|
||||
if (childrenLookup.Count != nodes.Count || (nodes.Count == 0 && childrenLookup.Count != nodes.Capacity))
|
||||
UpdateLookup ();
|
||||
|
||||
Node result;
|
||||
if (!childrenLookup.TryGetValue (c_element, out result))
|
||||
result = CreateNode (c_caption, c_element);
|
||||
return result;
|
||||
}
|
||||
|
||||
public void EnsureNodes ()
|
||||
{
|
||||
if (nodes == null) {
|
||||
nodes = new List<Node> ();
|
||||
childrenLookup = new Dictionary<string, Node> ();
|
||||
}
|
||||
}
|
||||
|
||||
public void EnsureLoaded ()
|
||||
{
|
||||
if (address < 0 && !loaded) {
|
||||
LoadNode ();
|
||||
loaded = true;
|
||||
}
|
||||
}
|
||||
|
||||
void UpdateLookup ()
|
||||
{
|
||||
foreach (var node in nodes)
|
||||
childrenLookup[node.Element] = node;
|
||||
}
|
||||
|
||||
public bool IsLeaf {
|
||||
get {
|
||||
return nodes == null || nodes.Count == 0;
|
||||
}
|
||||
}
|
||||
|
||||
void EncodeInt (BinaryWriter writer, int value)
|
||||
{
|
||||
do {
|
||||
int high = (value >> 7) & 0x01ffffff;
|
||||
byte b = (byte)(value & 0x7f);
|
||||
|
||||
if (high != 0) {
|
||||
b = (byte)(b | 0x80);
|
||||
}
|
||||
|
||||
writer.Write(b);
|
||||
value = high;
|
||||
} while(value != 0);
|
||||
}
|
||||
|
||||
int DecodeInt (BinaryReader reader)
|
||||
{
|
||||
int ret = 0;
|
||||
int shift = 0;
|
||||
byte b;
|
||||
|
||||
do {
|
||||
b = reader.ReadByte();
|
||||
|
||||
ret = ret | ((b & 0x7f) << shift);
|
||||
shift += 7;
|
||||
} while ((b & 0x80) == 0x80);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
internal void Deserialize (BinaryReader reader)
|
||||
{
|
||||
int count = DecodeInt (reader);
|
||||
element = reader.ReadString ();
|
||||
caption = reader.ReadString ();
|
||||
|
||||
if (count == 0)
|
||||
return;
|
||||
|
||||
nodes = new List<Node> (count);
|
||||
for (int i = 0; i < count; i++) {
|
||||
int child_address = DecodeInt (reader);
|
||||
|
||||
Node t = new Node (this, -child_address);
|
||||
nodes.Add (t);
|
||||
}
|
||||
|
||||
if (parentTree.ForceResort)
|
||||
nodes.Sort ();
|
||||
}
|
||||
|
||||
internal void Serialize (FileStream output, BinaryWriter writer)
|
||||
{
|
||||
if (nodes != null)
|
||||
foreach (Node child in nodes)
|
||||
child.Serialize (output, writer);
|
||||
|
||||
address = (int) output.Position;
|
||||
EncodeInt (writer, nodes == null ? 0 : (int) nodes.Count);
|
||||
writer.Write (element);
|
||||
writer.Write (caption);
|
||||
|
||||
if (nodes != null)
|
||||
foreach (Node child in nodes)
|
||||
EncodeInt (writer, child.address);
|
||||
}
|
||||
|
||||
public void Sort ()
|
||||
{
|
||||
if (nodes != null)
|
||||
nodes.Sort ();
|
||||
}
|
||||
|
||||
internal string GetInternalUrl ()
|
||||
{
|
||||
EnsureLoaded ();
|
||||
if (element.IndexOf (":") != -1 || parent == null)
|
||||
return element;
|
||||
|
||||
var parentUrl = parent.GetInternalUrl ();
|
||||
return parentUrl.EndsWith ("/") ? parentUrl + element : parentUrl + "/" + element;
|
||||
}
|
||||
|
||||
public string PublicUrl {
|
||||
get {
|
||||
if (pubUrl != null)
|
||||
return pubUrl;
|
||||
return pubUrl = parentTree.HelpSource != null ? parentTree.HelpSource.GetPublicUrl (this) : GetInternalUrl ();
|
||||
}
|
||||
}
|
||||
|
||||
int IComparable.CompareTo (object obj)
|
||||
{
|
||||
Node other = obj as Node;
|
||||
if (other == null)
|
||||
return -1;
|
||||
return CompareToInternal (other);
|
||||
}
|
||||
|
||||
int IComparable<Node>.CompareTo (Node obj)
|
||||
{
|
||||
return CompareToInternal (obj);
|
||||
}
|
||||
|
||||
int CompareToInternal (Node other)
|
||||
{
|
||||
EnsureLoaded ();
|
||||
other.EnsureLoaded ();
|
||||
|
||||
var cap1 = elementSort ? element : caption;
|
||||
var cap2 = elementSort ? other.element : other.caption;
|
||||
|
||||
/* Some node (notably from ecmaspec) have number prepended to them
|
||||
* which we need to sort better by padding them to the same number
|
||||
* of digits
|
||||
*/
|
||||
if (char.IsDigit (cap1[0]) && char.IsDigit (cap2[0])) {
|
||||
int c1 = cap1.TakeWhile (char.IsDigit).Count ();
|
||||
int c2 = cap2.TakeWhile (char.IsDigit).Count ();
|
||||
|
||||
if (c1 != c2) {
|
||||
cap1 = cap1.PadLeft (cap1.Length + Math.Max (0, c2 - c1), '0');
|
||||
cap2 = cap2.PadLeft (cap2.Length + Math.Max (0, c1 - c2), '0');
|
||||
}
|
||||
}
|
||||
|
||||
return string.Compare (cap1, cap2, StringComparison.Ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
internal static class IListExtensions
|
||||
{
|
||||
// TODO: if the backing store ever change from List<T>, we need to tune these methods to have a fallback mechanism
|
||||
public static int BinarySearch<T> (this IList<T> ilist, T item)
|
||||
{
|
||||
var list = ilist as List<T>;
|
||||
if (list == null)
|
||||
throw new NotSupportedException ();
|
||||
return list.BinarySearch (item);
|
||||
}
|
||||
|
||||
public static int BinarySearch<T> (this IList<T> ilist, T item, IComparer<T> comparer)
|
||||
{
|
||||
var list = ilist as List<T>;
|
||||
if (list == null)
|
||||
throw new NotSupportedException ();
|
||||
return list.BinarySearch (item, comparer);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Linq;
|
||||
using System.Xml;
|
||||
using System.Collections.Generic;
|
||||
|
||||
#if LEGACY_MODE
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
public partial class Node
|
||||
{
|
||||
[Obsolete ("Use `Tree' instead of 'tree'")]
|
||||
public Tree tree {
|
||||
get {
|
||||
return this.Tree;
|
||||
}
|
||||
}
|
||||
|
||||
[Obsolete ("Use TreeDumper")]
|
||||
public static void PrintTree (Tree t)
|
||||
{
|
||||
TreeDumper.PrintTree (t.RootNode);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
@ -1,27 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
public abstract class Provider
|
||||
{
|
||||
//
|
||||
// This code is used to "tag" all the different sources
|
||||
//
|
||||
static short serial;
|
||||
|
||||
public int Code { get; set; }
|
||||
|
||||
public Provider ()
|
||||
{
|
||||
Code = serial++;
|
||||
}
|
||||
|
||||
public abstract void PopulateTree (Tree tree);
|
||||
|
||||
//
|
||||
// Called at shutdown time after the tree has been populated to perform
|
||||
// any fixups or final tasks.
|
||||
//
|
||||
public abstract void CloseTree (HelpSource hs, Tree tree);
|
||||
}
|
||||
}
|
545
external/api-doc-tools/monodoc/Monodoc/RootTree.cs
vendored
545
external/api-doc-tools/monodoc/Monodoc/RootTree.cs
vendored
File diff suppressed because it is too large
Load Diff
@ -1,53 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Xml;
|
||||
using System.Diagnostics;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using Mono.Utilities;
|
||||
using Lucene.Net.Index;
|
||||
|
||||
#if LEGACY_MODE
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
using Generators;
|
||||
|
||||
public partial class RootTree
|
||||
{
|
||||
static IDocGenerator<string> rawGenerator = new RawGenerator ();
|
||||
static HtmlGenerator htmlGenerator = new HtmlGenerator (null);
|
||||
|
||||
[Obsolete ("Use RawGenerator directly")]
|
||||
public XmlDocument GetHelpXml (string id)
|
||||
{
|
||||
var rendered = RenderUrl (id, rawGenerator);
|
||||
if (rendered == null)
|
||||
return null;
|
||||
var doc = new XmlDocument ();
|
||||
doc.LoadXml (RenderUrl (id, rawGenerator));
|
||||
return doc;
|
||||
}
|
||||
|
||||
[Obsolete ("Use the RenderUrl variant accepting a generator")]
|
||||
public string RenderUrl (string url, out Node n)
|
||||
{
|
||||
return RenderUrl (url, htmlGenerator, out n);
|
||||
}
|
||||
|
||||
[Obsolete ("Use GenerateIndex")]
|
||||
public static void MakeIndex (RootTree root)
|
||||
{
|
||||
root.GenerateIndex ();
|
||||
}
|
||||
|
||||
[Obsolete ("Use GenerateSearchIndex")]
|
||||
public static void MakeSearchIndex (RootTree root)
|
||||
{
|
||||
root.GenerateSearchIndex ();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
@ -1,66 +0,0 @@
|
||||
//
|
||||
//
|
||||
// SearchableDocument.cs: Abstracts our model of document from the Lucene Document
|
||||
//
|
||||
// Author: Mario Sopena
|
||||
//
|
||||
using Lucene.Net.Documents;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
struct SearchableDocument
|
||||
{
|
||||
public string Title {
|
||||
get; set;
|
||||
}
|
||||
|
||||
public string Url {
|
||||
get; set;
|
||||
}
|
||||
|
||||
public string FullTitle {
|
||||
get; set;
|
||||
}
|
||||
|
||||
public string HotText {
|
||||
get; set;
|
||||
}
|
||||
|
||||
public string Text {
|
||||
get; set;
|
||||
}
|
||||
|
||||
public string Examples {
|
||||
get; set;
|
||||
}
|
||||
|
||||
public SearchableDocument Reset ()
|
||||
{
|
||||
Title = Url = FullTitle = HotText = Text = Examples = null;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Document LuceneDoc {
|
||||
get {
|
||||
Document doc = new Document ();
|
||||
doc.Add (UnIndexed ("title", Title));
|
||||
doc.Add (UnIndexed ("url", Url));
|
||||
doc.Add (UnIndexed ("fulltitle", FullTitle ?? string.Empty));
|
||||
doc.Add (UnStored ("hottext", HotText));
|
||||
doc.Add (UnStored ("text", Text));
|
||||
doc.Add (UnStored ("examples", Examples));
|
||||
return doc;
|
||||
}
|
||||
}
|
||||
|
||||
static Field UnIndexed(System.String name, System.String value_Renamed)
|
||||
{
|
||||
return new Field(name, value_Renamed, Field.Store.YES, Field.Index.NO);
|
||||
}
|
||||
|
||||
static Field UnStored(System.String name, System.String value_Renamed)
|
||||
{
|
||||
return new Field(name, value_Renamed, Field.Store.NO, Field.Index.ANALYZED);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,179 +0,0 @@
|
||||
//
|
||||
//
|
||||
// SearchableIndex.cs: Index that uses Lucene to search through the docs
|
||||
//
|
||||
// Author: Mario Sopena
|
||||
//
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Collections.Generic;
|
||||
// Lucene imports
|
||||
using Lucene.Net.Index;
|
||||
using Lucene.Net.Documents;
|
||||
using Lucene.Net.Analysis;
|
||||
using Lucene.Net.Analysis.Standard;
|
||||
using Lucene.Net.Search;
|
||||
using Lucene.Net.QueryParsers;
|
||||
using Lucene.Net.Store;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
public class SearchableIndex
|
||||
{
|
||||
const int maxSearchCount = 30;
|
||||
|
||||
IndexSearcher searcher;
|
||||
string dir;
|
||||
|
||||
public string Dir {
|
||||
get {
|
||||
if (dir == null)
|
||||
dir = "search_index";
|
||||
return dir;
|
||||
}
|
||||
set { dir = value; }
|
||||
}
|
||||
|
||||
public static SearchableIndex Load (string dir)
|
||||
{
|
||||
SearchableIndex s = new SearchableIndex ();
|
||||
s.dir = dir;
|
||||
try {
|
||||
//s.searcher = new IndexSearcher (dir);
|
||||
// TODO: parametrize that depending if we run on the desktop (low footprint) or the server (use RAMDirectory for instance)
|
||||
s.searcher = new IndexSearcher (FSDirectory.Open (dir));
|
||||
} catch (IOException) {
|
||||
Console.WriteLine ("Index nonexistent or in bad format");
|
||||
return null;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
public Result Search (string term)
|
||||
{
|
||||
return Search (term, maxSearchCount);
|
||||
}
|
||||
|
||||
public Result Search (string term, int count)
|
||||
{
|
||||
return Search (term, count, 0);
|
||||
}
|
||||
|
||||
public Result Search (string term, int count, int start) {
|
||||
try {
|
||||
term = term.ToLower ();
|
||||
Term htTerm = new Term ("hottext", term);
|
||||
Query qq1 = new FuzzyQuery (htTerm);
|
||||
Query qq2 = new TermQuery (htTerm);
|
||||
qq2.Boost = 10f;
|
||||
Query qq3 = new PrefixQuery (htTerm);
|
||||
qq3.Boost = 10f;
|
||||
DisjunctionMaxQuery q1 = new DisjunctionMaxQuery (0f);
|
||||
q1.Add (qq1);
|
||||
q1.Add (qq2);
|
||||
q1.Add (qq3);
|
||||
Query q2 = new TermQuery (new Term ("text", term));
|
||||
q2.Boost = 3f;
|
||||
Query q3 = new TermQuery (new Term ("examples", term));
|
||||
q3.Boost = 3f;
|
||||
DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f);
|
||||
|
||||
q.Add (q1);
|
||||
q.Add (q2);
|
||||
q.Add (q3);
|
||||
|
||||
TopDocs top = SearchInternal (q, count, start);
|
||||
Result r = new Result (term, searcher, top.ScoreDocs);
|
||||
return r;
|
||||
} catch (IOException) {
|
||||
Console.WriteLine ("No index in {0}", dir);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
TopDocs SearchInternal (Query q, int count, int start)
|
||||
{
|
||||
// Easy path that doesn't involve creating a Collector ourselves
|
||||
// watch for Lucene.NET improvement on that (like searcher.SearchAfter)
|
||||
if (start == 0)
|
||||
return searcher.Search (q, count);
|
||||
|
||||
var weight = searcher.CreateWeight (q); // TODO: reuse weight instead of query
|
||||
var collector = TopScoreDocCollector.Create (start + count + 1, false);
|
||||
searcher.Search (q, collector);
|
||||
|
||||
return collector.TopDocs (start, count);
|
||||
}
|
||||
|
||||
public Result FastSearch (string term, int number)
|
||||
{
|
||||
try {
|
||||
term = term.ToLower ();
|
||||
Query q1 = new TermQuery (new Term ("hottext", term));
|
||||
Query q2 = new PrefixQuery (new Term ("hottext", term));
|
||||
q2.Boost = 0.5f;
|
||||
DisjunctionMaxQuery q = new DisjunctionMaxQuery (0f);
|
||||
q.Add (q1);
|
||||
q.Add (q2);
|
||||
TopDocs top = searcher.Search (q, number);
|
||||
return new Result (term, searcher, top.ScoreDocs);
|
||||
} catch (IOException) {
|
||||
Console.WriteLine ("No index in {0}", dir);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// An object representing the search term with the results
|
||||
//
|
||||
public class Result {
|
||||
string term;
|
||||
Searcher searcher;
|
||||
ScoreDoc[] docs;
|
||||
|
||||
public string Term {
|
||||
get { return term;}
|
||||
}
|
||||
|
||||
public int Count {
|
||||
get { return docs.Length; }
|
||||
}
|
||||
|
||||
public Document this [int i] {
|
||||
get { return searcher.Doc (docs[i].Doc); }
|
||||
}
|
||||
|
||||
public string GetTitle (int i)
|
||||
{
|
||||
Document d = this[i];
|
||||
return d == null ? string.Empty : d.Get ("title");
|
||||
}
|
||||
|
||||
public string GetUrl (int i)
|
||||
{
|
||||
Document d = this[i];
|
||||
return d == null ? string.Empty : d.Get ("url");
|
||||
}
|
||||
|
||||
public string GetFullTitle (int i)
|
||||
{
|
||||
Document d = this[i];
|
||||
return d == null ? string.Empty : d.Get ("fulltitle");
|
||||
}
|
||||
|
||||
public float Score (int i)
|
||||
{
|
||||
return docs[i].Score;
|
||||
}
|
||||
|
||||
public Result (string Term, Searcher searcher, ScoreDoc[] docs)
|
||||
{
|
||||
this.term = Term;
|
||||
this.searcher = searcher;
|
||||
this.docs = docs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
225
external/api-doc-tools/monodoc/Monodoc/Tree.cs
vendored
225
external/api-doc-tools/monodoc/Monodoc/Tree.cs
vendored
@ -1,225 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Linq;
|
||||
using System.Xml;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
/// <summary>
|
||||
/// This tree is populated by the documentation providers, or populated
|
||||
/// from a binary encoding of the tree. The format of the tree is designed
|
||||
/// to minimize the need to load it in full.
|
||||
/// </summary>
|
||||
|
||||
/* Ideally this class should also be abstracted to let user have something
|
||||
* else than a file as a backing store, a database for instance
|
||||
*/
|
||||
public class Tree
|
||||
#if LEGACY_MODE
|
||||
: Node
|
||||
#endif
|
||||
{
|
||||
public const long CurrentVersionNumber = 1;
|
||||
const int VersionNumberKey = -(int)'v';
|
||||
public readonly HelpSource HelpSource;
|
||||
|
||||
FileStream InputStream;
|
||||
BinaryReader InputReader;
|
||||
|
||||
#if !LEGACY_MODE
|
||||
// This is the node which contains all the other node of the tree
|
||||
Node rootNode;
|
||||
#endif
|
||||
|
||||
/// <summary>
|
||||
/// Load from file constructor
|
||||
/// </summary>
|
||||
public Tree (HelpSource hs, string filename)
|
||||
#if LEGACY_MODE
|
||||
: base (null, null)
|
||||
#endif
|
||||
{
|
||||
HelpSource = hs;
|
||||
Encoding utf8 = new UTF8Encoding (false, true);
|
||||
|
||||
if (!File.Exists (filename)){
|
||||
throw new FileNotFoundException ();
|
||||
}
|
||||
|
||||
InputStream = File.OpenRead (filename);
|
||||
InputReader = new BinaryReader (InputStream, utf8);
|
||||
byte [] sig = InputReader.ReadBytes (4);
|
||||
|
||||
if (!GoodSig (sig))
|
||||
throw new Exception ("Invalid file format");
|
||||
|
||||
InputStream.Position = 4;
|
||||
// Try to read old version information
|
||||
if (InputReader.ReadInt32 () == VersionNumberKey)
|
||||
VersionNumber = InputReader.ReadInt64 ();
|
||||
else {
|
||||
// We try to see if there is a version number at the end of the file
|
||||
InputStream.Seek (-(4 + 8), SeekOrigin.End); // VersionNumberKey + long
|
||||
try {
|
||||
if (InputReader.ReadInt32 () == VersionNumberKey)
|
||||
VersionNumber = InputReader.ReadInt64 ();
|
||||
} catch {}
|
||||
// We set the stream back at the beginning of the node definition list
|
||||
InputStream.Position = 4;
|
||||
}
|
||||
|
||||
var position = InputReader.ReadInt32 ();
|
||||
#if !LEGACY_MODE
|
||||
rootNode = new Node (this, position);
|
||||
#else
|
||||
Address = position;
|
||||
#endif
|
||||
InflateNode (RootNode);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tree creation and merged tree constructor
|
||||
/// </summary>
|
||||
public Tree (HelpSource hs, string caption, string url)
|
||||
#if !LEGACY_MODE
|
||||
: this (hs, null, caption, url)
|
||||
{
|
||||
}
|
||||
#else
|
||||
: base (caption, url)
|
||||
{
|
||||
HelpSource = hs;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
public Tree (HelpSource hs, Node parent, string caption, string element)
|
||||
#if LEGACY_MODE
|
||||
: base (parent, caption, element)
|
||||
#endif
|
||||
{
|
||||
HelpSource = hs;
|
||||
#if !LEGACY_MODE
|
||||
rootNode = parent == null ? new Node (this, caption, element) : new Node (parent, caption, element);
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Saves the tree into the specified file using the help file format.
|
||||
/// </summary>
|
||||
public void Save (string file)
|
||||
{
|
||||
Encoding utf8 = new UTF8Encoding (false, true);
|
||||
using (FileStream output = File.OpenWrite (file)){
|
||||
// Skip over the pointer to the first node.
|
||||
output.Position = 4 + 4;
|
||||
|
||||
using (BinaryWriter writer = new BinaryWriter (output, utf8)) {
|
||||
// Recursively dump
|
||||
RootNode.Serialize (output, writer);
|
||||
// We want to generate 2.10 compatible files so we write the version number at the end
|
||||
writer.Write (VersionNumberKey);
|
||||
writer.Write (CurrentVersionNumber);
|
||||
|
||||
output.Position = 0;
|
||||
writer.Write (new byte [] { (byte) 'M', (byte) 'o', (byte) 'H', (byte) 'P' });
|
||||
writer.Write (RootNode.Address);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Node RootNode {
|
||||
get {
|
||||
#if LEGACY_MODE
|
||||
return this;
|
||||
#else
|
||||
return rootNode;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
public long VersionNumber {
|
||||
get;
|
||||
private set;
|
||||
}
|
||||
|
||||
static bool GoodSig (byte [] sig)
|
||||
{
|
||||
if (sig.Length != 4)
|
||||
return false;
|
||||
return sig [0] == (byte) 'M'
|
||||
&& sig [1] == (byte) 'o'
|
||||
&& sig [2] == (byte) 'H'
|
||||
&& sig [3] == (byte) 'P';
|
||||
}
|
||||
|
||||
public void InflateNode (Node baseNode)
|
||||
{
|
||||
var address = baseNode.Address;
|
||||
if (address < 0)
|
||||
address = -address;
|
||||
|
||||
InputStream.Position = address;
|
||||
baseNode.Deserialize (InputReader);
|
||||
}
|
||||
|
||||
// Nodes use this value to know if they should manually re-sort their child
|
||||
// if they come from an older generator version
|
||||
internal bool ForceResort {
|
||||
get {
|
||||
return VersionNumber == 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class TreeDumper
|
||||
{
|
||||
static int indent;
|
||||
|
||||
static void Indent ()
|
||||
{
|
||||
for (int i = 0; i < indent; i++)
|
||||
Console.Write (" ");
|
||||
}
|
||||
|
||||
public static void PrintTree (Node node)
|
||||
{
|
||||
Indent ();
|
||||
Console.WriteLine ("{0},{1}\t[PublicUrl: {2}]", node.Element, node.Caption, node.PublicUrl);
|
||||
if (node.ChildNodes.Count == 0)
|
||||
return;
|
||||
|
||||
indent++;
|
||||
foreach (Node n in node.ChildNodes)
|
||||
PrintTree (n);
|
||||
indent--;
|
||||
}
|
||||
|
||||
public static string ExportToTocXml (Node root, string title, string desc)
|
||||
{
|
||||
if (root == null)
|
||||
throw new ArgumentNullException ("root");
|
||||
// Return a toc index of sub-nodes
|
||||
StringBuilder buf = new StringBuilder ();
|
||||
var writer = XmlWriter.Create (buf);
|
||||
writer.WriteStartElement ("toc");
|
||||
writer.WriteAttributeString ("title", title ?? string.Empty);
|
||||
writer.WriteElementString ("description", desc ?? string.Empty);
|
||||
writer.WriteStartElement ("list");
|
||||
foreach (Node n in root.ChildNodes) {
|
||||
writer.WriteStartElement ("item");
|
||||
writer.WriteAttributeString ("url", n.Element);
|
||||
writer.WriteValue (n.Caption);
|
||||
writer.WriteEndElement ();
|
||||
}
|
||||
writer.WriteEndElement ();
|
||||
writer.WriteEndElement ();
|
||||
writer.Flush ();
|
||||
writer.Close ();
|
||||
|
||||
return buf.ToString ();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,40 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
public static class TypeUtils
|
||||
{
|
||||
public static bool GetNamespaceAndType (string url, out string ns, out string type)
|
||||
{
|
||||
int nsidx = -1;
|
||||
int numLt = 0;
|
||||
for (int i = 0; i < url.Length; ++i) {
|
||||
char c = url [i];
|
||||
switch (c) {
|
||||
case '<':
|
||||
case '{':
|
||||
++numLt;
|
||||
break;
|
||||
case '>':
|
||||
case '}':
|
||||
--numLt;
|
||||
break;
|
||||
case '.':
|
||||
if (numLt == 0)
|
||||
nsidx = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (nsidx == -1) {
|
||||
ns = null;
|
||||
type = null;
|
||||
return false;
|
||||
}
|
||||
ns = url.Substring (0, nsidx);
|
||||
type = url.Substring (nsidx + 1);
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
55
external/api-doc-tools/monodoc/Monodoc/cache.cs
vendored
55
external/api-doc-tools/monodoc/Monodoc/cache.cs
vendored
@ -1,55 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.Configuration;
|
||||
using System.Collections.Specialized;
|
||||
using Monodoc.Caches;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
public enum DocEntity
|
||||
{
|
||||
Text,
|
||||
Blob
|
||||
}
|
||||
|
||||
public interface IDocCache : IDisposable
|
||||
{
|
||||
bool IsCached (string id);
|
||||
bool CanCache (DocEntity entity);
|
||||
|
||||
Stream GetCachedStream (string id);
|
||||
string GetCachedString (string id);
|
||||
|
||||
void CacheText (string id, string content);
|
||||
void CacheText (string id, Stream stream);
|
||||
|
||||
void CacheBlob (string id, byte[] data);
|
||||
void CacheBlob (string id, Stream stream);
|
||||
}
|
||||
|
||||
public static class DocCacheHelper
|
||||
{
|
||||
static string cacheBaseDirectory;
|
||||
|
||||
static DocCacheHelper ()
|
||||
{
|
||||
try {
|
||||
var cacheConfig = Config.Get ("cache");
|
||||
if (cacheConfig == null) return;
|
||||
var cacheValues = cacheConfig.Split (',');
|
||||
if (cacheValues.Length == 2 && cacheValues[0].Equals ("file", StringComparison.Ordinal))
|
||||
cacheBaseDirectory = cacheValues[1].Replace ("~", Environment.GetFolderPath (Environment.SpecialFolder.Personal));
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Use configuration option to query for cache directory, if it doesn't exist we instantiate a nullcache
|
||||
public static IDocCache GetDefaultCache (string name)
|
||||
{
|
||||
if (cacheBaseDirectory == null)
|
||||
return new NullCache ();
|
||||
|
||||
return new FileCache (Path.Combine (cacheBaseDirectory, name));
|
||||
}
|
||||
}
|
||||
}
|
@ -1,75 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace Monodoc.Caches
|
||||
{
|
||||
public class FileCache : IDocCache
|
||||
{
|
||||
string baseCacheDir;
|
||||
|
||||
public FileCache (string baseCacheDir)
|
||||
{
|
||||
this.baseCacheDir = baseCacheDir;
|
||||
if (!Directory.Exists (baseCacheDir))
|
||||
Directory.CreateDirectory (baseCacheDir);
|
||||
}
|
||||
|
||||
public bool IsCached (string id)
|
||||
{
|
||||
return File.Exists (MakePath (id));
|
||||
}
|
||||
|
||||
public bool CanCache (DocEntity entity)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
public Stream GetCachedStream (string id)
|
||||
{
|
||||
return File.OpenRead (MakePath (id));
|
||||
}
|
||||
|
||||
public string GetCachedString (string id)
|
||||
{
|
||||
return File.ReadAllText (MakePath (id));
|
||||
}
|
||||
|
||||
public void CacheText (string id, string content)
|
||||
{
|
||||
File.WriteAllText (MakePath (id), content);
|
||||
}
|
||||
|
||||
public void CacheText (string id, Stream stream)
|
||||
{
|
||||
using (var file = File.OpenWrite (MakePath (id)))
|
||||
stream.CopyTo (file);
|
||||
}
|
||||
|
||||
public void CacheBlob (string id, byte[] data)
|
||||
{
|
||||
File.WriteAllBytes (MakePath (id), data);
|
||||
}
|
||||
|
||||
public void CacheBlob (string id, Stream stream)
|
||||
{
|
||||
using (var file = File.OpenWrite (MakePath (id)))
|
||||
stream.CopyTo (file);
|
||||
}
|
||||
|
||||
string MakePath (string id)
|
||||
{
|
||||
id = id.Replace (Path.DirectorySeparatorChar, '_');
|
||||
return Path.Combine (baseCacheDir, id);
|
||||
}
|
||||
|
||||
public void Dispose ()
|
||||
{
|
||||
if (!Directory.Exists (baseCacheDir))
|
||||
return;
|
||||
|
||||
try {
|
||||
Directory.Delete (baseCacheDir, true);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace Monodoc.Caches
|
||||
{
|
||||
// This is basically a no-cache implementation
|
||||
public class NullCache : IDocCache
|
||||
{
|
||||
public bool IsCached (string id)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public bool CanCache (DocEntity entity)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public Stream GetCachedStream (string id)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
public string GetCachedString (string id)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
public void CacheText (string id, string content)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void CacheText (string id, Stream stream)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void CacheBlob (string id, byte[] data)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void CacheBlob (string id, Stream stream)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void Dispose ()
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Monodoc
|
||||
{
|
||||
// All type of documents that a generator may find as input
|
||||
public enum DocumentType {
|
||||
EcmaXml, // Our main monodoc format
|
||||
EcmaSpecXml,
|
||||
Man,
|
||||
AddinXml,
|
||||
MonoBook, // This is mostly XHTML already, just need a tiny bit of processing
|
||||
Html,
|
||||
TocXml, // Used by help source displaying some kind of toc of the content they host
|
||||
PlainText,
|
||||
ErrorXml
|
||||
}
|
||||
|
||||
/* This interface defines a set of transformation engine
|
||||
* that convert multiple documentation source to a single output format
|
||||
*/
|
||||
public interface IDocGenerator<TOutput>
|
||||
{
|
||||
/* This method is responsible for finding out the documentation type
|
||||
* for the given ID and use the right engine internally
|
||||
* The id can be accompanied by a context dictionary giving away extra
|
||||
* informtion to the renderer
|
||||
*/
|
||||
TOutput Generate (HelpSource hs, string internalId, Dictionary<string, string> context);
|
||||
}
|
||||
}
|
@ -1,156 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Linq;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using Monodoc;
|
||||
|
||||
namespace Monodoc.Generators
|
||||
{
|
||||
using Html;
|
||||
|
||||
interface IHtmlExporter
|
||||
{
|
||||
string CssCode { get; }
|
||||
string Export (Stream input, Dictionary<string, string> extras);
|
||||
string Export (string input, Dictionary<string, string> extras);
|
||||
}
|
||||
|
||||
public class HtmlGenerator : IDocGenerator<string>
|
||||
{
|
||||
const string cachePrefix = "htmlcached#";
|
||||
|
||||
static string css_code;
|
||||
|
||||
IDocCache defaultCache;
|
||||
static Dictionary<DocumentType, IHtmlExporter> converters;
|
||||
|
||||
static HtmlGenerator ()
|
||||
{
|
||||
converters = new Dictionary<DocumentType, IHtmlExporter> {
|
||||
{ DocumentType.EcmaXml, new Ecma2Html () },
|
||||
{ DocumentType.Man, new Man2Html () },
|
||||
{ DocumentType.TocXml, new Toc2Html () },
|
||||
{ DocumentType.EcmaSpecXml, new Ecmaspec2Html () },
|
||||
{ DocumentType.ErrorXml, new Error2Html () },
|
||||
{ DocumentType.Html, new Idem () },
|
||||
{ DocumentType.MonoBook, new MonoBook2Html () },
|
||||
{ DocumentType.AddinXml, new Addin2Html () },
|
||||
{ DocumentType.PlainText, new Idem () },
|
||||
};
|
||||
}
|
||||
|
||||
public HtmlGenerator (IDocCache defaultCache)
|
||||
{
|
||||
this.defaultCache = defaultCache;
|
||||
}
|
||||
|
||||
public string Generate (HelpSource hs, string id, Dictionary<string, string> context)
|
||||
{
|
||||
string specialPage = null;
|
||||
if (context != null && context.TryGetValue ("specialpage", out specialPage) && specialPage == "master-root")
|
||||
return GenerateMasterRootPage (hs != null ? hs.RootTree : null);
|
||||
|
||||
if (id == "root:" && hs == null)
|
||||
return MakeEmptySummary ();
|
||||
|
||||
if (hs == null || string.IsNullOrEmpty (id))
|
||||
return MakeHtmlError (string.Format ("Your request has found no candidate provider [hs=\"{0}\", id=\"{1}\"]",
|
||||
hs == null ? "(null)" : hs.Name, id ?? "(null)"));
|
||||
|
||||
var cache = defaultCache ?? hs.Cache;
|
||||
if (cache != null && cache.IsCached (MakeCacheKey (hs, id, null)))
|
||||
return cache.GetCachedString (MakeCacheKey (hs, id, null));
|
||||
|
||||
IEnumerable<string> parts;
|
||||
if (hs.IsMultiPart (id, out parts))
|
||||
return GenerateMultiPart (hs, parts, id, context);
|
||||
|
||||
if (hs.IsRawContent (id))
|
||||
return hs.GetText (id) ?? string.Empty;
|
||||
|
||||
DocumentType type = hs.GetDocumentTypeForId (id);
|
||||
if (cache != null && context != null && cache.IsCached (MakeCacheKey (hs, id, context)))
|
||||
return cache.GetCachedString (MakeCacheKey (hs, id, context));
|
||||
|
||||
IHtmlExporter exporter;
|
||||
if (!converters.TryGetValue (type, out exporter))
|
||||
return MakeHtmlError (string.Format ("Input type '{0}' not supported",
|
||||
type.ToString ()));
|
||||
var result = hs.IsGeneratedContent (id) ?
|
||||
exporter.Export (hs.GetCachedText (id), context) :
|
||||
exporter.Export (hs.GetCachedHelpStream (id), context);
|
||||
|
||||
if (cache != null)
|
||||
cache.CacheText (MakeCacheKey (hs, id, context), result);
|
||||
return result;
|
||||
}
|
||||
|
||||
string GenerateMultiPart (HelpSource hs, IEnumerable<string> ids, string originalId, Dictionary<string, string> context)
|
||||
{
|
||||
var sb = new StringBuilder ();
|
||||
foreach (var id in ids)
|
||||
sb.AppendLine (Generate (hs, id, context));
|
||||
|
||||
var cache = defaultCache ?? hs.Cache;
|
||||
if (cache != null)
|
||||
cache.CacheText (MakeCacheKey (hs, originalId, null), sb.ToString ());
|
||||
return sb.ToString ();
|
||||
}
|
||||
|
||||
string GenerateMasterRootPage (RootTree rootTree)
|
||||
{
|
||||
if (rootTree == null)
|
||||
return string.Empty;
|
||||
var assembly = System.Reflection.Assembly.GetAssembly (typeof (HtmlGenerator));
|
||||
var hpStream = assembly.GetManifestResourceStream ("home.html");
|
||||
var home = new StreamReader (hpStream).ReadToEnd ();
|
||||
var links = string.Join (Environment.NewLine,
|
||||
rootTree.RootNode.ChildNodes.Select (n => string.Format ("<li><a href=\"{0}\">{1}</a></li>", n.Element, n.Caption)));
|
||||
return home.Replace ("@@API_DOCS@@", links);
|
||||
}
|
||||
|
||||
public static string InlineCss {
|
||||
get {
|
||||
if (css_code != null)
|
||||
return css_code;
|
||||
|
||||
System.Reflection.Assembly assembly = System.Reflection.Assembly.GetAssembly (typeof (HtmlGenerator));
|
||||
Stream str_css = assembly.GetManifestResourceStream ("base.css");
|
||||
StringBuilder sb = new StringBuilder ((new StreamReader (str_css)).ReadToEnd());
|
||||
sb.Replace ("@@FONT_FAMILY@@", "Sans Serif");
|
||||
sb.Replace ("@@FONT_SIZE@@", "100%");
|
||||
css_code = sb.ToString () + converters.Values
|
||||
.Select (c => c.CssCode)
|
||||
.Where (css => !string.IsNullOrEmpty (css))
|
||||
.DefaultIfEmpty (string.Empty)
|
||||
.Aggregate (string.Concat);
|
||||
return css_code;
|
||||
}
|
||||
set {
|
||||
css_code = value;
|
||||
}
|
||||
}
|
||||
|
||||
string MakeHtmlError (string error)
|
||||
{
|
||||
return string.Format ("<html><head></head><body><p><em>Error:</em> {0}</p></body></html>", error);
|
||||
}
|
||||
|
||||
string MakeEmptySummary ()
|
||||
{
|
||||
return @"<html><head></head><body><p><em>This node doesn't have a summary available</p></body></html>";
|
||||
}
|
||||
|
||||
string MakeCacheKey (HelpSource hs, string page, IDictionary<string,string> extraParams)
|
||||
{
|
||||
var key = cachePrefix + hs.SourceID + page;
|
||||
if (extraParams != null && extraParams.Count > 0) {
|
||||
var paramPart = string.Join ("-", extraParams.Select (kvp => kvp.Key + kvp.Value));
|
||||
key += '_' + paramPart;
|
||||
}
|
||||
return key;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Linq;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using Monodoc;
|
||||
|
||||
namespace Monodoc.Generators
|
||||
{
|
||||
/// <summary>
|
||||
/// This generators returns the raw content of the HelpSource without any transformation
|
||||
/// </summary>
|
||||
public class RawGenerator : IDocGenerator<string>
|
||||
{
|
||||
public string Generate (HelpSource hs, string id, Dictionary<string, string> context)
|
||||
{
|
||||
if (hs == null || string.IsNullOrEmpty (id))
|
||||
return null;
|
||||
|
||||
IEnumerable<string> parts;
|
||||
if (hs.IsMultiPart (id, out parts))
|
||||
return GenerateMultiPart (hs, parts, id, context);
|
||||
|
||||
if (hs.IsRawContent (id))
|
||||
return hs.GetText (id) ?? string.Empty;
|
||||
|
||||
var result = hs.IsGeneratedContent (id) ? hs.GetCachedText (id) : new StreamReader (hs.GetCachedHelpStream (id)).ReadToEnd ();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
string GenerateMultiPart (HelpSource hs, IEnumerable<string> ids, string originalId, Dictionary<string, string> context)
|
||||
{
|
||||
var sb = new StringBuilder ();
|
||||
foreach (var id in ids)
|
||||
sb.AppendLine (Generate (hs, id, context));
|
||||
return sb.ToString ();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,197 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Xml;
|
||||
using System.Xml.Xsl;
|
||||
using System.Xml.XPath;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Monodoc.Generators.Html
|
||||
{
|
||||
public class Addin2Html : IHtmlExporter
|
||||
{
|
||||
public string CssCode {
|
||||
get {
|
||||
return string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
public string Export (Stream stream, Dictionary<string, string> extraArgs)
|
||||
{
|
||||
using (var reader = new StreamReader (stream))
|
||||
return Htmlize (GetAddin (reader, extraArgs["AddinID"]),
|
||||
extraArgs["show"],
|
||||
extraArgs["AddinID"],
|
||||
extraArgs["FileID"],
|
||||
extraArgs["NodeID"]);
|
||||
}
|
||||
|
||||
public string Export (string input, Dictionary<string, string> extraArgs)
|
||||
{
|
||||
return Htmlize (GetAddin (new StringReader (input), extraArgs["AddinID"]),
|
||||
extraArgs["show"],
|
||||
extraArgs["AddinID"],
|
||||
extraArgs["FileID"],
|
||||
extraArgs["NodeID"]);
|
||||
}
|
||||
|
||||
XmlElement GetAddin (TextReader reader, string addinId)
|
||||
{
|
||||
XmlDocument doc = new XmlDocument ();
|
||||
doc.Load (reader);
|
||||
XmlElement addin = (XmlElement) doc.SelectSingleNode ("Addins/Addin[@fullId='" + addinId + "']");
|
||||
return addin != null ? addin : null;
|
||||
}
|
||||
|
||||
public string Htmlize (XmlElement addin, string urlType, string addinId, string fileId, string path)
|
||||
{
|
||||
if (urlType == Monodoc.Providers.AddinsHelpSource.AddinPrefix)
|
||||
return GetAddinTextFromUrl (addin, addinId, fileId);
|
||||
else if (urlType == Monodoc.Providers.AddinsHelpSource.ExtensionPrefix)
|
||||
return GetExtensionTextFromUrl (addin, addinId, fileId, path);
|
||||
else if (urlType == Monodoc.Providers.AddinsHelpSource.ExtensionNodePrefix)
|
||||
return GetExtensionNodeTextFromUrl (addin, addinId, fileId, path);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
protected string GetAddinTextFromUrl (XmlElement addin, string addinId, string fileId)
|
||||
{
|
||||
if (addin == null)
|
||||
return "<html>Add-in not found: " + addinId + "</html>";
|
||||
|
||||
StringBuilder sb = new StringBuilder ("<html>");
|
||||
sb.Append ("<h1>").Append (addin.GetAttribute ("name")).Append ("</h1>");
|
||||
XmlElement docs = (XmlElement) addin.SelectSingleNode ("Description");
|
||||
if (docs != null)
|
||||
sb.Append (docs.InnerText);
|
||||
|
||||
sb.Append ("<p><table border=\"1\" cellpadding=\"4\" cellspacing=\"0\">");
|
||||
sb.AppendFormat ("<tr><td><b>Id</b></td><td>{0}</td></tr>", addin.GetAttribute ("addinId"));
|
||||
sb.AppendFormat ("<tr><td><b>Namespace</b></td><td>{0}</td></tr>", addin.GetAttribute ("namespace"));
|
||||
sb.AppendFormat ("<tr><td><b>Version</b></td><td>{0}</td></tr>", addin.GetAttribute ("version"));
|
||||
sb.Append ("</table></p>");
|
||||
sb.Append ("<p><b>Extension Points</b>:</p>");
|
||||
sb.Append ("<ul>");
|
||||
|
||||
foreach (XmlElement ep in addin.SelectNodes ("ExtensionPoint")) {
|
||||
sb.AppendFormat ("<li><a href=\"extension-point:{0}#{1}#{2}\">{3}</li>", fileId, addinId, ep.GetAttribute ("path"), ep.GetAttribute ("name"));
|
||||
}
|
||||
sb.Append ("</ul>");
|
||||
|
||||
sb.Append ("</html>");
|
||||
return sb.ToString ();
|
||||
}
|
||||
|
||||
protected string GetExtensionTextFromUrl (XmlElement addin, string addinId, string fileId, string path)
|
||||
{
|
||||
if (addin == null)
|
||||
return "<html>Add-in not found: " + addinId + "</html>";
|
||||
|
||||
XmlElement ext = (XmlElement) addin.SelectSingleNode ("ExtensionPoint[@path='" + path + "']");
|
||||
if (ext == null)
|
||||
return "<html>Extension point not found: " + path + "</html>";
|
||||
|
||||
StringBuilder sb = new StringBuilder ("<html>");
|
||||
sb.Append ("<h1>").Append (ext.GetAttribute ("name")).Append ("</h1>");
|
||||
|
||||
path = path.Replace ("/", " <b>/</b> ");
|
||||
sb.Append ("<p><b>Path</b>: ").Append (path).Append ("</p>");
|
||||
XmlElement desc = (XmlElement) ext.SelectSingleNode ("Description");
|
||||
if (desc != null)
|
||||
sb.Append (desc.InnerText);
|
||||
|
||||
sb.Append ("<p><b>Extension Nodes</b>:</p>");
|
||||
sb.Append ("<table border=\"1\" cellpadding=\"4\" cellspacing=\"0\">");
|
||||
|
||||
foreach (XmlElement en in ext.SelectNodes ("ExtensionNode")) {
|
||||
string nid = en.GetAttribute ("id");
|
||||
string nname = en.GetAttribute ("name");
|
||||
string sdesc = "";
|
||||
desc = (XmlElement) en.SelectSingleNode ("Description");
|
||||
if (desc != null)
|
||||
sdesc = desc.InnerText;
|
||||
|
||||
sb.AppendFormat ("<tr><td><a href=\"extension-node:{0}#{1}#{2}\">{3}</td><td>{4}</td></tr>", fileId, addinId, nid, nname, sdesc);
|
||||
}
|
||||
sb.Append ("</table>");
|
||||
|
||||
sb.Append ("</html>");
|
||||
return sb.ToString ();
|
||||
}
|
||||
|
||||
protected string GetExtensionNodeTextFromUrl (XmlElement addin, string addinId, string fileId, string nodeId)
|
||||
{
|
||||
if (addin == null)
|
||||
return "<html>Add-in not found: " + addinId + "</html>";
|
||||
|
||||
XmlElement node = (XmlElement) addin.SelectSingleNode ("ExtensionNodeType[@id='" + nodeId + "']");
|
||||
if (node == null)
|
||||
return "<html>Extension point not found: " + nodeId + "</html>";
|
||||
|
||||
StringBuilder sb = new StringBuilder ("<html>");
|
||||
sb.Append ("<h1>").Append (node.GetAttribute ("name")).Append ("</h1>");
|
||||
XmlElement desc = (XmlElement) node.SelectSingleNode ("Description");
|
||||
if (desc != null)
|
||||
sb.Append (desc.InnerText);
|
||||
|
||||
sb.Append ("<p><b>Attributes</b>:</p>");
|
||||
sb.Append ("<table border=\"1\" cellpadding=\"4\" cellspacing=\"0\"><tr>");
|
||||
sb.Append ("<td><b>Name</b></td>");
|
||||
sb.Append ("<td><b>Type</b></td>");
|
||||
sb.Append ("<td><b>Required</b></td>");
|
||||
sb.Append ("<td><b>Localizable</b></td>");
|
||||
sb.Append ("<td><b>Description</b></td>");
|
||||
sb.Append ("<tr>");
|
||||
sb.Append ("<td>id</td>");
|
||||
sb.Append ("<td>System.String</td>");
|
||||
sb.Append ("<td></td>");
|
||||
sb.Append ("<td></td>");
|
||||
sb.Append ("<td>Identifier of the node.</td>");
|
||||
sb.Append ("</tr>");
|
||||
|
||||
foreach (XmlElement at in node.SelectNodes ("Attributes/Attribute")) {
|
||||
sb.Append ("<tr>");
|
||||
sb.AppendFormat ("<td>{0}</td>", at.GetAttribute ("name"));
|
||||
sb.AppendFormat ("<td>{0}</td>", at.GetAttribute ("type"));
|
||||
if (at.GetAttribute ("required") == "True")
|
||||
sb.Append ("<td>Yes</td>");
|
||||
else
|
||||
sb.Append ("<td></td>");
|
||||
if (at.GetAttribute ("localizable") == "True")
|
||||
sb.Append ("<td>Yes</td>");
|
||||
else
|
||||
sb.Append ("<td></td>");
|
||||
string sdesc = "";
|
||||
desc = (XmlElement) at.SelectSingleNode ("Description");
|
||||
if (desc != null)
|
||||
sdesc = desc.InnerText;
|
||||
|
||||
sb.AppendFormat ("<td>{0}</td>", sdesc);
|
||||
sb.Append ("</tr>");
|
||||
}
|
||||
sb.Append ("</table>");
|
||||
|
||||
XmlNodeList children = node.SelectNodes ("ChildNodes/ExtensionNode");
|
||||
if (children.Count > 0) {
|
||||
sb.Append ("<p><b>Child Nodes</b>:</p>");
|
||||
sb.Append ("<table border=\"1\" cellpadding=\"4\" cellspacing=\"0\">");
|
||||
|
||||
foreach (XmlElement en in children) {
|
||||
string nid = en.GetAttribute ("id");
|
||||
string nname = en.GetAttribute ("name");
|
||||
string sdesc = "";
|
||||
desc = (XmlElement) en.SelectSingleNode ("Description");
|
||||
if (desc != null)
|
||||
sdesc = desc.InnerText;
|
||||
|
||||
sb.AppendFormat ("<tr><td><a href=\"extension-node:{0}#{1}#{2}\">{3}</td><td>{4}</td></tr>", fileId, addinId, nid, nname, sdesc);
|
||||
}
|
||||
sb.Append ("</table>");
|
||||
}
|
||||
|
||||
sb.Append ("</html>");
|
||||
return sb.ToString ();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,333 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Linq;
|
||||
using System.Xml;
|
||||
using System.Xml.Xsl;
|
||||
using System.Xml.XPath;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using Mono.Documentation;
|
||||
using BF = System.Reflection.BindingFlags;
|
||||
|
||||
namespace Monodoc.Generators.Html
|
||||
{
|
||||
public class Ecma2Html : IHtmlExporter
|
||||
{
|
||||
static string css_ecma;
|
||||
static string js;
|
||||
static XslCompiledTransform ecma_transform;
|
||||
readonly ExtensionObject ExtObject = new ExtensionObject ();
|
||||
|
||||
public Ecma2Html ()
|
||||
{
|
||||
}
|
||||
|
||||
public string CssCode {
|
||||
get {
|
||||
if (css_ecma != null)
|
||||
return css_ecma;
|
||||
var assembly = typeof(Ecma2Html).Assembly;
|
||||
Stream str_css = assembly.GetManifestResourceStream ("mono-ecma.css");
|
||||
css_ecma = (new StreamReader (str_css)).ReadToEnd();
|
||||
return css_ecma;
|
||||
}
|
||||
}
|
||||
|
||||
public string JsCode {
|
||||
get {
|
||||
if (js != null)
|
||||
return js;
|
||||
var assembly = typeof(Ecma2Html).Assembly;
|
||||
Stream str_js = assembly.GetManifestResourceStream ("helper.js");
|
||||
js = (new StreamReader (str_js)).ReadToEnd();
|
||||
return js;
|
||||
}
|
||||
}
|
||||
|
||||
public string Htmlize (XmlReader ecma_xml, Dictionary<string, string> extraArgs)
|
||||
{
|
||||
var args = new XsltArgumentList ();
|
||||
args.AddExtensionObject("monodoc:///extensions", ExtObject);
|
||||
string specialPage;
|
||||
if (extraArgs.TryGetValue ("specialpage", out specialPage) && specialPage == "root") {
|
||||
extraArgs.Remove ("specialpage");
|
||||
extraArgs["show"] = "masteroverview";
|
||||
}
|
||||
|
||||
foreach (var kvp in extraArgs)
|
||||
args.AddParam (kvp.Key, string.Empty, kvp.Value);
|
||||
|
||||
return Htmlize (ecma_xml, args);
|
||||
}
|
||||
|
||||
public string Htmlize (XmlReader ecma_xml, XsltArgumentList args)
|
||||
{
|
||||
try{
|
||||
EnsureTransform ();
|
||||
|
||||
var output = new StringBuilder ();
|
||||
ecma_transform.Transform (ecma_xml,
|
||||
args,
|
||||
XmlWriter.Create (output, ecma_transform.OutputSettings),
|
||||
CreateDocumentResolver ());
|
||||
return output.ToString ();
|
||||
}
|
||||
catch(Exception x)
|
||||
{
|
||||
var msg = x.ToString ();
|
||||
return msg;
|
||||
}
|
||||
}
|
||||
|
||||
protected virtual XmlResolver CreateDocumentResolver ()
|
||||
{
|
||||
// results in using XmlUrlResolver
|
||||
return null;
|
||||
}
|
||||
|
||||
public string Export (Stream stream, Dictionary<string, string> extraArgs)
|
||||
{
|
||||
return Htmlize (XmlReader.Create (new StreamReader(stream)), extraArgs);
|
||||
}
|
||||
|
||||
public string Export (string input, Dictionary<string, string> extraArgs)
|
||||
{
|
||||
return Htmlize (XmlReader.Create (new StringReader(input)), extraArgs);
|
||||
}
|
||||
|
||||
|
||||
static void EnsureTransform ()
|
||||
{
|
||||
if (ecma_transform == null) {
|
||||
ecma_transform = new XslCompiledTransform ();
|
||||
var assembly = System.Reflection.Assembly.GetAssembly (typeof (Ecma2Html));
|
||||
|
||||
Stream stream = assembly.GetManifestResourceStream ("mono-ecma-css.xsl");
|
||||
XmlReader xml_reader = new XmlTextReader (stream);
|
||||
XmlResolver r = new ManifestResourceResolver (".");
|
||||
ecma_transform.Load (xml_reader, XsltSettings.TrustedXslt, r);
|
||||
}
|
||||
}
|
||||
|
||||
public class ExtensionObject
|
||||
{
|
||||
bool quiet = true;
|
||||
Dictionary<string, System.Reflection.Assembly> assemblyCache = new Dictionary<string, System.Reflection.Assembly> ();
|
||||
|
||||
public string Colorize(string code, string lang)
|
||||
{
|
||||
return Mono.Utilities.Colorizer.Colorize(code,lang);
|
||||
}
|
||||
|
||||
// Used by stylesheet to nicely reformat the <see cref=> tags.
|
||||
public string MakeNiceSignature(string sig, string contexttype)
|
||||
{
|
||||
if (sig.Length < 3)
|
||||
return sig;
|
||||
if (sig[1] != ':')
|
||||
return sig;
|
||||
|
||||
char s = sig[0];
|
||||
sig = sig.Substring(2);
|
||||
|
||||
switch (s) {
|
||||
case 'N': return sig;
|
||||
case 'T': return ShortTypeName (sig, contexttype);
|
||||
|
||||
case 'C': case 'M': case 'P': case 'F': case 'E':
|
||||
string type, mem, arg;
|
||||
|
||||
// Get arguments
|
||||
int paren;
|
||||
if (s == 'C' || s == 'M')
|
||||
paren = sig.IndexOf("(");
|
||||
else if (s == 'P')
|
||||
paren = sig.IndexOf("[");
|
||||
else
|
||||
paren = 0;
|
||||
|
||||
if (paren > 0 && paren < sig.Length-1) {
|
||||
string[] args = sig.Substring(paren+1, sig.Length-paren-2).Split(',');
|
||||
for (int i = 0; i < args.Length; i++)
|
||||
args[i] = ShortTypeName(args[i], contexttype);
|
||||
arg = "(" + String.Join(", ", args) + ")";
|
||||
sig = sig.Substring(0, paren);
|
||||
} else {
|
||||
arg = string.Empty;
|
||||
}
|
||||
|
||||
// Get type and member names
|
||||
int dot = sig.LastIndexOf(".");
|
||||
if (s == 'C' || dot <= 0 || dot == sig.Length-1) {
|
||||
mem = string.Empty;
|
||||
type = sig;
|
||||
} else {
|
||||
type = sig.Substring(0, dot);
|
||||
mem = sig.Substring(dot);
|
||||
}
|
||||
|
||||
type = ShortTypeName(type, contexttype);
|
||||
|
||||
return type + mem + arg;
|
||||
|
||||
default:
|
||||
return sig;
|
||||
}
|
||||
}
|
||||
|
||||
static string ShortTypeName(string name, string contexttype)
|
||||
{
|
||||
int dot = contexttype.LastIndexOf(".");
|
||||
if (dot < 0) return name;
|
||||
string contextns = contexttype.Substring(0, dot+1);
|
||||
|
||||
if (name == contexttype)
|
||||
return name.Substring(dot+1);
|
||||
|
||||
if (name.StartsWith(contextns))
|
||||
return name.Substring(contextns.Length);
|
||||
|
||||
return name.Replace("+", ".");
|
||||
}
|
||||
|
||||
string MonoImpInfo(string assemblyname, string typename, string membername, string arglist, bool strlong)
|
||||
{
|
||||
if (quiet)
|
||||
return string.Empty;
|
||||
|
||||
var a = new List<string> ();
|
||||
if (!string.IsNullOrEmpty (arglist)) a.Add (arglist);
|
||||
return MonoImpInfo(assemblyname, typename, membername, a, strlong);
|
||||
}
|
||||
|
||||
string MonoImpInfo(string assemblyname, string typename, string membername, XPathNodeIterator itr, bool strlong)
|
||||
{
|
||||
if (quiet)
|
||||
return string.Empty;
|
||||
|
||||
var rgs = itr.Cast<XPathNavigator> ().Select (nav => nav.Value).ToList ();
|
||||
|
||||
return MonoImpInfo (assemblyname, typename, membername, rgs, strlong);
|
||||
}
|
||||
|
||||
string MonoImpInfo(string assemblyname, string typename, string membername, List<string> arglist, bool strlong)
|
||||
{
|
||||
try {
|
||||
System.Reflection.Assembly assembly = null;
|
||||
|
||||
try {
|
||||
if (!assemblyCache.TryGetValue (assemblyname, out assembly)) {
|
||||
assembly = System.Reflection.Assembly.LoadWithPartialName(assemblyname);
|
||||
if (assembly != null)
|
||||
assemblyCache[assemblyname] = assembly;
|
||||
}
|
||||
} catch (Exception) {
|
||||
// nothing.
|
||||
}
|
||||
|
||||
if (assembly == null) {
|
||||
/*if (strlong) return "The assembly " + assemblyname + " is not available to MonoDoc.";
|
||||
else return string.Empty;*/
|
||||
return string.Empty; // silently ignore
|
||||
}
|
||||
|
||||
Type t = assembly.GetType(typename, false);
|
||||
if (t == null) {
|
||||
if (strlong)
|
||||
return typename + " has not been implemented.";
|
||||
else
|
||||
return "Not implemented.";
|
||||
}
|
||||
|
||||
// The following code is flakey and fails to find existing members
|
||||
return string.Empty;
|
||||
} catch (Exception) {
|
||||
return string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
string MonoImpInfo(System.Reflection.MemberInfo mi, string itemtype, bool strlong)
|
||||
{
|
||||
if (quiet)
|
||||
return string.Empty;
|
||||
|
||||
string s = string.Empty;
|
||||
|
||||
object[] atts = mi.GetCustomAttributes(true);
|
||||
int todoctr = 0;
|
||||
foreach (object att in atts) if (att.GetType().Name == "MonoTODOAttribute") todoctr++;
|
||||
|
||||
if (todoctr > 0) {
|
||||
if (strlong)
|
||||
s = "This " + itemtype + " is marked as being unfinished.<BR/>\n";
|
||||
else
|
||||
s = "Unfinished.";
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
public string MonoImpInfo(string assemblyname, string typename, bool strlong)
|
||||
{
|
||||
if (quiet)
|
||||
return string.Empty;
|
||||
|
||||
try {
|
||||
if (assemblyname == string.Empty)
|
||||
return string.Empty;
|
||||
|
||||
System.Reflection.Assembly assembly;
|
||||
if (!assemblyCache.TryGetValue (assemblyname, out assembly)) {
|
||||
assembly = System.Reflection.Assembly.LoadWithPartialName(assemblyname);
|
||||
if (assembly != null)
|
||||
assemblyCache[assemblyname] = assembly;
|
||||
}
|
||||
|
||||
if (assembly == null)
|
||||
return string.Empty;
|
||||
|
||||
Type t = assembly.GetType(typename, false);
|
||||
if (t == null) {
|
||||
if (strlong)
|
||||
return typename + " has not been implemented.";
|
||||
else
|
||||
return "Not implemented.";
|
||||
}
|
||||
|
||||
string s = MonoImpInfo(t, "type", strlong);
|
||||
|
||||
if (strlong) {
|
||||
var mis = t.GetMembers (BF.Static | BF.Instance | BF.Public | BF.NonPublic);
|
||||
|
||||
// Scan members for MonoTODO attributes
|
||||
int mctr = 0;
|
||||
foreach (var mi in mis) {
|
||||
string mii = MonoImpInfo(mi, null, false);
|
||||
if (mii != string.Empty) mctr++;
|
||||
}
|
||||
if (mctr > 0) {
|
||||
s += "This type has " + mctr + " members that are marked as unfinished.<BR/>";
|
||||
}
|
||||
}
|
||||
|
||||
return s;
|
||||
|
||||
} catch (Exception) {
|
||||
return string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
public bool MonoEditing ()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
public bool IsToBeAdded(string text)
|
||||
{
|
||||
return text.StartsWith ("To be added");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Xml;
|
||||
using System.Xml.Xsl;
|
||||
using System.Xml.XPath;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Monodoc.Generators.Html
|
||||
{
|
||||
public class Ecmaspec2Html : IHtmlExporter
|
||||
{
|
||||
static string css_ecmaspec;
|
||||
static XslTransform ecma_transform;
|
||||
static XsltArgumentList args = new XsltArgumentList();
|
||||
|
||||
public string CssCode {
|
||||
get {
|
||||
if (css_ecmaspec != null)
|
||||
return css_ecmaspec;
|
||||
System.Reflection.Assembly assembly = System.Reflection.Assembly.GetAssembly (typeof (Ecmaspec2Html));
|
||||
Stream str_css = assembly.GetManifestResourceStream ("ecmaspec.css");
|
||||
css_ecmaspec = (new StreamReader (str_css)).ReadToEnd ();
|
||||
return css_ecmaspec;
|
||||
}
|
||||
}
|
||||
|
||||
class ExtObj
|
||||
{
|
||||
public string Colorize (string code, string lang)
|
||||
{
|
||||
return Mono.Utilities.Colorizer.Colorize (code, lang);
|
||||
}
|
||||
}
|
||||
|
||||
public string Export (Stream stream, Dictionary<string, string> extraArgs)
|
||||
{
|
||||
return Htmlize (new XPathDocument (stream));
|
||||
}
|
||||
|
||||
public string Export (string input, Dictionary<string, string> extraArgs)
|
||||
{
|
||||
return Htmlize (new XPathDocument (new StringReader (input)));
|
||||
}
|
||||
|
||||
static string Htmlize (XPathDocument ecma_xml)
|
||||
{
|
||||
if (ecma_transform == null){
|
||||
ecma_transform = new XslTransform ();
|
||||
System.Reflection.Assembly assembly = System.Reflection.Assembly.GetAssembly (typeof (Ecmaspec2Html));
|
||||
Stream stream;
|
||||
stream = assembly.GetManifestResourceStream ("ecmaspec-html-css.xsl");
|
||||
|
||||
XmlReader xml_reader = new XmlTextReader (stream);
|
||||
ecma_transform.Load (xml_reader, null, null);
|
||||
args.AddExtensionObject ("monodoc:///extensions", new ExtObj ());
|
||||
}
|
||||
|
||||
if (ecma_xml == null) return "";
|
||||
|
||||
StringWriter output = new StringWriter ();
|
||||
ecma_transform.Transform (ecma_xml, args, output, null);
|
||||
|
||||
return output.ToString ();
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user