Imported Upstream version 3.6.0

Former-commit-id: da6be194a6b1221998fc28233f2503bd61dd9d14
This commit is contained in:
Jo Shields
2014-08-13 10:39:27 +01:00
commit a575963da9
50588 changed files with 8155799 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,133 @@
//
// CodePointIndexer.cs : indexing table optimizer
//
// Author:
// Atsushi Enomoto <atsushi@ximian.com>
//
// Copyright (C) 2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Globalization;
using System.Text;
namespace Mono.Globalization.Unicode
{
internal class CodePointIndexer
{
public static Array CompressArray (
Array source, Type type, CodePointIndexer indexer)
{
int totalCount = 0;
for (int i = 0; i < indexer.ranges.Length; i++)
totalCount += indexer.ranges [i].Count;
Array ret = Array.CreateInstance (type, totalCount);
for (int i = 0; i < indexer.ranges.Length; i++)
Array.Copy (
source,
indexer.ranges [i].Start,
ret,
indexer.ranges [i].IndexStart,
indexer.ranges [i].Count);
return ret;
}
// This class is used to compactize indexes to limited areas so that
// we can save extraneous 0,0,0,0,0... in the tables.
[Serializable]
internal struct TableRange
{
public TableRange (int start, int end, int indexStart)
{
Start = start;
End = end;
Count = End - Start;
IndexStart = indexStart;
IndexEnd = IndexStart + Count;
}
public readonly int Start;
public readonly int End;
public readonly int Count;
public readonly int IndexStart;
public readonly int IndexEnd;
}
readonly TableRange [] ranges;
public readonly int TotalCount;
int defaultIndex;
int defaultCP;
public CodePointIndexer (int [] starts, int [] ends, int defaultIndex, int defaultCP)
{
this.defaultIndex = defaultIndex;
this.defaultCP = defaultCP;
ranges = new TableRange [starts.Length];
for (int i = 0; i < ranges.Length; i++)
ranges [i] = new TableRange (starts [i],
ends [i], i == 0 ? 0 :
ranges [i - 1].IndexStart +
ranges [i - 1].Count);
for (int i = 0; i < ranges.Length; i++)
TotalCount += ranges [i].Count;
// for (int i = 0; i < ranges.Length; i++)
// Console.Error.WriteLine ("RANGES [{0}] : {1:x} to {2:x} index {3:x} to {4:x}. total {5:x}", i, ranges [i].Start, ranges [i].End, ranges [i].IndexStart, ranges [i].IndexEnd, ranges [i].Count);
// Console.Error.WriteLine ("Total items: {0:X} ({1})", TotalCount, TotalCount);
}
public int ToIndex (int cp)
{
for (int t = 0; t < ranges.Length; t++) {
if (cp < ranges [t].Start)
return defaultIndex;
else if (cp < ranges [t].End)
return cp - ranges [t].Start + ranges [t].IndexStart;
}
return defaultIndex;
// throw new SystemException (String.Format ("Should not happen: no map definition for cp {0:x}({1})", cp, (char) cp));
}
public int ToCodePoint (int i)
{
for (int t = 0; t < ranges.Length; t++) {
/*
if (t > 0 && i < ranges [t - 1].IndexEnd)
return defaultCP; // unexpected out of range
if (ranges [t].IndexStart <= i &&
i < ranges [t].IndexEnd)
return i - ranges [t].IndexStart
+ ranges [t].Start;
*/
if (i < ranges [t].IndexStart)
return defaultCP;
if (i < ranges [t].IndexEnd)
return i - ranges [t].IndexStart
+ ranges [t].Start;
}
return defaultCP;
// throw new SystemException (String.Format ("Should not happen: no map definition for index {0:x}({1})", i, i));
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,86 @@
//
// MSCompatUnicodeTable.cs : Utility for MSCompatUnicodeTable class.
//
// Author:
// Atsushi Enomoto <atsushi@ximian.com>
//
// Copyright (C) 2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Globalization;
using System.Text;
namespace Mono.Globalization.Unicode
{
internal /*static*/ class MSCompatUnicodeTableUtil
{
public const byte ResourceVersion = 3;
public static readonly CodePointIndexer Ignorable;
public static readonly CodePointIndexer Category;
public static readonly CodePointIndexer Level1;
public static readonly CodePointIndexer Level2;
public static readonly CodePointIndexer Level3;
// public static readonly CodePointIndexer WidthCompat;
public static readonly CodePointIndexer CjkCHS;
public static readonly CodePointIndexer Cjk;
static MSCompatUnicodeTableUtil ()
{
// FIXME: those ranges could be more compact, but since
// I haven't filled all the table yet, I keep it safer.
int [] ignoreStarts = new int [] {
0, 0xA000, 0xF900};
int [] ignoreEnds = new int [] {
0x3400, 0xA500, 0x10000};
int [] catStarts = new int [] {
0, 0x1E00, 0x3000, 0x4E00, 0xAC00, 0xF900};
int [] catEnds = new int [] {
0x1200, 0x2800, 0x3400, 0xA000, 0xD7B0, 0x10000};
int [] lv1Starts = new int [] {
0, 0x1E00, 0x3000, 0x4E00, 0xAC00, 0xF900};
int [] lv1Ends = new int [] {
0x1200, 0x2800, 0x3400, 0xA000, 0xD7B0, 0x10000};
int [] lv2Starts = new int [] {0, 0x1E00, 0x3000, 0xFB00};
int [] lv2Ends = new int [] {0xF00, 0x2800, 0x3400, 0x10000};
int [] lv3Starts = new int [] {0, 0x1E00, 0x3000, 0xFB00};
int [] lv3Ends = new int [] {0x1200, 0x2800, 0x3400, 0x10000};
// int [] widthStarts = new int [] {0, 0x2000, 0x3100, 0xFF00};
// int [] widthEnds = new int [] {0x300, 0x2200, 0x3200, 0x10000};
int [] chsStarts = new int [] {
0x3100, 0x4E00, 0xE800}; // FIXME: really?
int [] chsEnds = new int [] {
0x3400, 0xA000, 0x10000};
int [] cjkStarts = new int [] {0x3100, 0x4E00, 0xF900};
int [] cjkEnds = new int [] {0x3400, 0xA000, 0xFB00};
Ignorable = new CodePointIndexer (ignoreStarts, ignoreEnds, -1, -1);
Category = new CodePointIndexer (catStarts, catEnds, 0, 0);
Level1 = new CodePointIndexer (lv1Starts, lv1Ends, 0, 0);
Level2 = new CodePointIndexer (lv2Starts, lv2Ends, 0, 0);
Level3 = new CodePointIndexer (lv3Starts, lv3Ends, 0, 0);
// WidthCompat = new CodePointIndexer (widthStarts, widthEnds, 0, 0);
CjkCHS = new CodePointIndexer (chsStarts, chsEnds, -1, -1);
Cjk = new CodePointIndexer (cjkStarts, cjkEnds, -1, -1);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,120 @@
using System;
using System.Globalization;
using System.Text;
namespace Mono.Globalization.Unicode
{
internal class NormalizationTableUtil
{
public static readonly CodePointIndexer Prop;
public static readonly CodePointIndexer Map;
public static readonly CodePointIndexer Combining;
public static readonly CodePointIndexer Composite;
public static readonly CodePointIndexer Helper;
static NormalizationTableUtil ()
{
int [] propStarts = new int [] {
0, 0x0910, 0x1B00, 0x2460, 0x2980,
0x2C70, 0x2D60, 0x2E90, 0xA770, 0xA7F0, 0xF900,
// 0x1D100, 0x2f800, 0x2fa10
};
int [] propEnds = new int [] {
0x06E0, 0x1200, 0x2330, 0x2600, 0x2AE0,
0x2C80, 0x2D70, 0x3400, 0xA780, 0xA800, 0x10000,
// 0x1D800, 0x2f810, 0x2fa20
};
int [] mapStarts = new int [] {
0x90, 0x0920, 0x1D20, 0x2460, 0x24A0, 0x2A00,
0x2D60, 0x2E90, 0xF900,
// 0x1d150, 0x2f800
};
int [] mapEnds = new int [] {
0x06E0, 0x1100, 0x2330, 0x24A0, 0x24F0, 0x2AE0,
0x2D70, 0x3400, 0x10000,
// 0x1d800, 0x2fb00
};
int [] combiningStarts = new int [] {
0x02F0, 0x0480, 0x0590, 0x0930, 0x09B0,
0x0A30, 0x0AB0, 0x0B30, 0x0BC0, 0x0C40,
0x0CB0, 0x0D40, 0x0DC0, 0x0E30, 0x0EB0,
0x0F00, 0x1030, 0x1350, 0x1710, 0x17D0,
0x18A0, 0x1930, 0x1A10, 0x1DC0, 0x20D0,
0x3020, 0x3090, 0xA800, 0xFB10, 0xFE20,
// 0x10A00, 0x1D160, 0x1D240
};
int [] combiningEnds = new int [] {
0x0360, 0x0490, 0x0750, 0x0960, 0x09D0,
0x0A50, 0x0AD0, 0x0B50, 0x0BD0, 0x0C60,
0x0CD0, 0x0D50, 0x0DD0, 0x0E50, 0x0ED0,
0x0FD0, 0x1040, 0x1360, 0x1740, 0x17E0,
0x18B0, 0x1940, 0x1A20, 0x1DD0, 0x20F0,
0x3030, 0x30A0, 0xA810, 0xFB20, 0xFE30,
// 0x10A40, 0x1D1B0, 0x1D250
};
// since mapToCompositeIndex only holds canonical
// mappings, those indexes could be still shorten.
int [] compositeStarts = new int [] {
0x480, 0x1410, 0x1670
};
int [] compositeEnds = new int [] {
0x1080, 0x1580, 0x21B0
};
int [] helperStarts = new int [] {
0, 0x900, 0x1D00, 0x2500, 0x3000, 0x3B90,
0x4010, 0x4E00, 0xFB40,
// 0x1D150, 0x20100, 0x20510,
// 0x20630, 0x20800, 0x20A20, 0x20B60, 0x214E0,
};
int [] helperEnds = new int [] {
0x700, 0x1200, 0x2300, 0x2600, 0x3160, 0x3BA0,
0x4030, 0xA000, 0xFB50,
// 0x1D1C0, 0x20130, 0x20550,
// 0x20640, 0x208E0, 0x20A30, 0x20B70, 0x214F0,
};
Prop = new CodePointIndexer (propStarts, propEnds, 0, 0);
Map = new CodePointIndexer (mapStarts, mapEnds, 0, 0);
Combining = new CodePointIndexer (combiningStarts,
combiningEnds, 0, 0);
Composite = new CodePointIndexer (compositeStarts,
compositeEnds, 0, 0);
Helper = new CodePointIndexer (helperStarts, helperEnds,
0, 0);
}
public static int PropIdx (int cp)
{
return Prop.ToIndex (cp);
}
public static int PropCP (int index)
{
return Prop.ToCodePoint (index);
}
public static int PropCount { get { return Prop.TotalCount; } }
public static int MapIdx (int cp)
{
return Map.ToIndex (cp);
}
public static int MapCP (int index)
{
return Map.ToCodePoint (index);
}
public static int CbIdx (int cp)
{
return Combining.ToIndex (cp);
}
public static int CbCP (int index)
{
return Combining.ToCodePoint (index);
}
public static int MapCount { get { return Map.TotalCount; } }
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,133 @@
//
// System.Globalization.SortKey.cs
//
// Author:
// Atsushi Enomoto <atsushi@ximian.com>
//
// Copyright (C) 2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//
// This file works with our managed collation implementation.
//
using System;
using System.IO;
using System.Globalization;
using System.Runtime.InteropServices;
namespace System.Globalization
{
[System.Runtime.InteropServices.ComVisible (true)]
[Serializable]
[StructLayout (LayoutKind.Sequential)]
public class SortKey
{
#region Static members
public static int Compare (SortKey sortkey1, SortKey sortkey2)
{
if (sortkey1 == null)
throw new ArgumentNullException ("sortkey1");
if (sortkey2 == null)
throw new ArgumentNullException ("sortkey2");
if (Object.ReferenceEquals (sortkey1, sortkey2)
|| Object.ReferenceEquals (sortkey1.OriginalString,
sortkey2.OriginalString))
return 0;
byte [] d1 = sortkey1.KeyData;
byte [] d2 = sortkey2.KeyData;
int len = d1.Length > d2.Length ? d2.Length : d1.Length;
for (int i = 0; i < len; i++)
if (d1 [i] != d2 [i])
return d1 [i] < d2 [i] ? -1 : 1;
return d1.Length == d2.Length ? 0 : d1.Length < d2.Length ? -1 : 1;
}
#endregion
readonly string source;
readonly byte [] key;
readonly CompareOptions options;
readonly int lcid;
// for legacy unmanaged one
internal SortKey (int lcid, string source, CompareOptions opt)
{
this.lcid = lcid;
this.source = source;
this.options = opt;
}
internal SortKey (int lcid, string source, byte [] buffer, CompareOptions opt,
int lv1Length, int lv2Length, int lv3Length,
int kanaSmallLength, int markTypeLength,
int katakanaLength, int kanaWidthLength,
int identLength)
{
this.lcid = lcid;
this.source = source;
this.key = buffer;
this.options = opt;
}
public virtual string OriginalString {
get { return source; }
}
public virtual byte [] KeyData {
get { return key; }
}
// copy from original SortKey.cs
public override bool Equals (object value)
{
SortKey other = (value as SortKey);
if (other != null) {
if ((this.lcid == other.lcid) &&
(this.options == other.options) &&
(Compare (this, other) == 0)) {
return true;
}
}
return false;
}
public override int GetHashCode ()
{
if (key.Length == 0)
return 0; // should not happen though.
int val = key [0];
for (int i = 1; i < key.Length; i++)
val ^= (int) key [i] << (i & 3);
return (int) val;
}
// copy from original SortKey.cs
public override string ToString()
{
return "SortKey - " + lcid + ", " + options + ", " + source;
}
}
}

View File

@@ -0,0 +1,297 @@
//
// SortKeyBuffer.cs : buffer implementation for GetSortKey()
//
// Author:
// Atsushi Enomoto <atsushi@ximian.com>
//
// Copyright (C) 2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.IO;
using System.Globalization;
namespace Mono.Globalization.Unicode
{
// Internal sort key storage that is reused during GetSortKey.
internal class SortKeyBuffer
{
// l4s = small kana sensitivity, l4t = mark type,
// l4k = katakana flag, l4w = kana width sensitivity
byte [] l1b, l2b, l3b, l4sb, l4tb, l4kb, l4wb, l5b;
// int level5LastPos;
string source;
int l1, l2, l3, l4s, l4t, l4k, l4w, l5;
int lcid;
CompareOptions options;
bool processLevel2;
bool frenchSort;
bool frenchSorted;
public SortKeyBuffer (int lcid)
{
}
public void Reset ()
{
l1 = l2 = l3 = l4s = l4t = l4k = l4w = l5 = 0;
// level5LastPos = 0;
frenchSorted = false;
}
// It is used for CultureInfo.ClearCachedData().
internal void ClearBuffer ()
{
l1b = l2b = l3b = l4sb = l4tb = l4kb = l4wb = l5b = null;
}
internal void Initialize (CompareOptions options, int lcid, string s, bool frenchSort)
{
this.source = s;
this.lcid = lcid;
this.options = options;
int len = s.Length;
processLevel2 = (options & CompareOptions.IgnoreNonSpace) == 0;
this.frenchSort = frenchSort;
// For Korean text it is likely to be much bigger (for
// Jamo), but even in ko-KR most of the compared
// strings won't be Hangul.
if (l1b == null || l1b.Length < len)
l1b = new byte [len * 2 + 10];
if (processLevel2 && (l2b == null || l2b.Length < len))
l2b = new byte [len + 10];
if (l3b == null || l3b.Length < len)
l3b = new byte [len + 10];
// This weight is used only in Japanese text.
// We could expand the initial length as well as
// primary length (actually x3), but even in ja-JP
// most of the compared strings won't be Japanese.
if (l4sb == null)
l4sb = new byte [10];
if (l4tb == null)
l4tb = new byte [10];
if (l4kb == null)
l4kb = new byte [10];
if (l4wb == null)
l4wb = new byte [10];
if (l5b == null)
l5b = new byte [10];
}
internal void AppendCJKExtension (byte lv1msb, byte lv1lsb)
{
AppendBufferPrimitive (0xFE, ref l1b, ref l1);
AppendBufferPrimitive (0xFF, ref l1b, ref l1);
AppendBufferPrimitive (lv1msb, ref l1b, ref l1);
AppendBufferPrimitive (lv1lsb, ref l1b, ref l1);
if (processLevel2)
AppendBufferPrimitive (2, ref l2b, ref l2);
AppendBufferPrimitive (2, ref l3b, ref l3);
}
// LAMESPEC: Windows handles some of Hangul Jamo as to have
// more than two primary weight values. However this causes
// incorrect zero-termination. So I just ignore them and
// treat it as usual character.
/*
internal void AppendJamo (byte category, byte lv1msb, byte lv1lsb)
{
AppendNormal (category, lv1msb, 0, 0);
AppendBufferPrimitive (0xFF, ref l1b, ref l1);
AppendBufferPrimitive (lv1lsb, ref l1b, ref l1);
AppendBufferPrimitive (0xFF, ref l1b, ref l1);
// FIXME: those values looks extraneous but might be
// some advanced use. Worthy of digging into it.
AppendBufferPrimitive (0, ref l1b, ref l1);
AppendBufferPrimitive (0xFF, ref l1b, ref l1);
AppendBufferPrimitive (0, ref l1b, ref l1);
}
*/
// Append sort key value from table normally.
internal void AppendKana (byte category, byte lv1, byte lv2, byte lv3, bool isSmallKana, byte markType, bool isKatakana, bool isHalfWidth)
{
AppendNormal (category, lv1, lv2, lv3);
AppendBufferPrimitive ((byte) (isSmallKana ? 0xC4 : 0xE4), ref l4sb, ref l4s);
AppendBufferPrimitive (markType, ref l4tb, ref l4t);
AppendBufferPrimitive ((byte) (isKatakana ? 0xC4 : 0xE4), ref l4kb, ref l4k);
AppendBufferPrimitive ((byte) (isHalfWidth ? 0xC4 : 0xE4), ref l4wb, ref l4w);
}
// Append sort key value from table normally.
internal void AppendNormal (byte category, byte lv1, byte lv2, byte lv3)
{
if (lv2 == 0)
lv2 = 2;
if (lv3 == 0)
lv3 = 2;
// Special weight processing
if (category == 6 && (options & CompareOptions.StringSort) == 0) {
AppendLevel5 (category, lv1);
return;
}
// non-primary diacritical weight is added to that of
// the previous character (and does not reset level 3
// weight).
if (processLevel2 && category == 1 && l1 > 0) {
lv2 = (byte) (lv2 + l2b [--l2]);
lv3 = l3b [--l3];
}
if (category != 1) {
AppendBufferPrimitive (category, ref l1b, ref l1);
AppendBufferPrimitive (lv1, ref l1b, ref l1);
}
if (processLevel2)
AppendBufferPrimitive (lv2, ref l2b, ref l2);
AppendBufferPrimitive (lv3, ref l3b, ref l3);
}
// Append variable-weight character.
// It uses level 2 index for counting offsets (since level1
// might be longer than 1).
private void AppendLevel5 (byte category, byte lv1)
{
// offset
#if false
// If it strictly matches to Windows, offsetValue is always l2.
int offsetValue = l2 - level5LastPos;
// If it strictly matches ti Windows, no 0xFF here.
for (; offsetValue > 8192; offsetValue -= 8192)
AppendBufferPrimitive (0xFF, ref l5b, ref l5);
#else
// LAMESPEC: Windows cannot compute lv5 values for
// those string that has length larger than 8064.
// (It reminds me of SQL Server varchar length).
int offsetValue = (l2 + 1) % 8192;
#endif
AppendBufferPrimitive ((byte) ((offsetValue / 64) + 0x80), ref l5b, ref l5);
AppendBufferPrimitive ((byte) (offsetValue % 64 * 4 + 3), ref l5b, ref l5);
// level5LastPos = l2;
// sortkey value
AppendBufferPrimitive (category, ref l5b, ref l5);
AppendBufferPrimitive (lv1, ref l5b, ref l5);
}
private void AppendBufferPrimitive (byte value, ref byte [] buf, ref int bidx)
{
buf [bidx++] = value;
if (bidx == buf.Length) {
byte [] tmp = new byte [bidx * 2];
Array.Copy (buf, tmp, buf.Length);
buf = tmp;
}
}
public SortKey GetResultAndReset ()
{
SortKey ret = GetResult ();
Reset ();
return ret;
}
// For level2-5, 02 is the default and could be cut (implied).
// 02 02 02 -> 0
// 02 03 02 -> 2
// 03 04 05 -> 3
private int GetOptimizedLength (byte [] data, int len, byte defaultValue)
{
int cur = -1;
for (int i = 0; i < len; i++)
if (data [i] != defaultValue)
cur = i;
return cur + 1;
}
public SortKey GetResult ()
{
if (source.Length == 0)
return new SortKey (lcid, source, new byte [0], options, 0, 0, 0, 0, 0, 0, 0, 0);
if (frenchSort && !frenchSorted && l2b != null) {
int i = 0;
for (; i < l2b.Length; i++)
if (l2b [i] == 0)
break;
Array.Reverse (l2b, 0, i);
frenchSorted = true;
}
l2 = GetOptimizedLength (l2b, l2, 2);
l3 = GetOptimizedLength (l3b, l3, 2);
bool hasJapaneseWeight = (l4s > 0); // snapshot before being optimized
l4s = GetOptimizedLength (l4sb, l4s, 0xE4);
l4t = GetOptimizedLength (l4tb, l4t, 3);
l4k = GetOptimizedLength (l4kb, l4k, 0xE4);
l4w = GetOptimizedLength (l4wb, l4w, 0xE4);
l5 = GetOptimizedLength (l5b, l5, 2);
int length = l1 + l2 + l3 + l5 + 5;
int jpLength = l4s + l4t + l4k + l4w;
if (hasJapaneseWeight)
length += jpLength + 4;
byte [] ret = new byte [length];
Array.Copy (l1b, ret, l1);
ret [l1] = 1; // end-of-level mark
int cur = l1 + 1;
if (l2 > 0)
Array.Copy (l2b, 0, ret, cur, l2);
cur += l2;
ret [cur++] = 1; // end-of-level mark
if (l3 > 0)
Array.Copy (l3b, 0, ret, cur, l3);
cur += l3;
ret [cur++] = 1; // end-of-level mark
if (hasJapaneseWeight) {
Array.Copy (l4sb, 0, ret, cur, l4s);
cur += l4s;
ret [cur++] = 0xFF; // end-of-jp-subsection
Array.Copy (l4tb, 0, ret, cur, l4t);
cur += l4t;
ret [cur++] = 2; // end-of-jp-middle-subsection
Array.Copy (l4kb, 0, ret, cur, l4k);
cur += l4k;
ret [cur++] = 0xFF; // end-of-jp-subsection
Array.Copy (l4wb, 0, ret, cur, l4w);
cur += l4w;
ret [cur++] = 0xFF; // end-of-jp-subsection
}
ret [cur++] = 1; // end-of-level mark
if (l5 > 0)
Array.Copy (l5b, 0, ret, cur, l5);
cur += l5;
ret [cur++] = 0; // end-of-data mark
return new SortKey (lcid, source, ret, options, l1, l2, l3, l4s, l4t, l4k, l4w, l5);
}
}
}