lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From synhers...@apache.org
Subject [2/5] lucenenet git commit: Getting rid of most sbyte usage in public API
Date Mon, 10 Nov 2014 15:25:24 GMT
http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/Packed/BulkOperationPackedSingleBlock.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/Packed/BulkOperationPackedSingleBlock.cs b/src/Lucene.Net.Core/Util/Packed/BulkOperationPackedSingleBlock.cs
index 16e524a..9e49efa 100644
--- a/src/Lucene.Net.Core/Util/Packed/BulkOperationPackedSingleBlock.cs
+++ b/src/Lucene.Net.Core/Util/Packed/BulkOperationPackedSingleBlock.cs
@@ -55,9 +55,12 @@ namespace Lucene.Net.Util.Packed
             return ValueCount;
         }
 
-        private static long ReadLong(sbyte[] blocks, int blocksOffset)
+        private static long ReadLong(byte[] blocks, int blocksOffset)
         {
-            return (blocks[blocksOffset++] & 0xFFL) << 56 | (blocks[blocksOffset++] & 0xFFL) << 48 | (blocks[blocksOffset++] & 0xFFL) << 40 | (blocks[blocksOffset++] & 0xFFL) << 32 | (blocks[blocksOffset++] & 0xFFL) << 24 | (blocks[blocksOffset++] & 0xFFL) << 16 | (blocks[blocksOffset++] & 0xFFL) << 8 | blocks[blocksOffset++] & 0xFFL;
+            return (((sbyte)blocks[blocksOffset++]) & 0xFFL) << 56 | (((sbyte)blocks[blocksOffset++]) & 0xFFL) << 48 | 
+                (((sbyte)blocks[blocksOffset++]) & 0xFFL) << 40 | (((sbyte)blocks[blocksOffset++]) & 0xFFL) << 32 |
+                (((sbyte)blocks[blocksOffset++]) & 0xFFL) << 24 | (((sbyte)blocks[blocksOffset++]) & 0xFFL) << 16 |
+                (((sbyte)blocks[blocksOffset++]) & 0xFFL) << 8 | ((sbyte)blocks[blocksOffset++]) & 0xFFL;
         }
 
         private int Decode(long block, long[] values, int valuesOffset)
@@ -111,7 +114,7 @@ namespace Lucene.Net.Util.Packed
             }
         }
 
-        public override void Decode(sbyte[] blocks, int blocksOffset, long[] values, int valuesOffset, int iterations)
+        public override void Decode(byte[] blocks, int blocksOffset, long[] values, int valuesOffset, int iterations)
         {
             for (int i = 0; i < iterations; ++i)
             {
@@ -134,7 +137,7 @@ namespace Lucene.Net.Util.Packed
             }
         }
 
-        public override void Decode(sbyte[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations)
+        public override void Decode(byte[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations)
         {
             if (BitsPerValue > 32)
             {
@@ -166,7 +169,7 @@ namespace Lucene.Net.Util.Packed
             }
         }
 
-        public override void Encode(long[] values, int valuesOffset, sbyte[] blocks, int blocksOffset, int iterations)
+        public override void Encode(long[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations)
         {
             for (int i = 0; i < iterations; ++i)
             {
@@ -176,7 +179,7 @@ namespace Lucene.Net.Util.Packed
             }
         }
 
-        public override void Encode(int[] values, int valuesOffset, sbyte[] blocks, int blocksOffset, int iterations)
+        public override void Encode(int[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations)
         {
             for (int i = 0; i < iterations; ++i)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/Packed/PackedInts.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/Packed/PackedInts.cs b/src/Lucene.Net.Core/Util/Packed/PackedInts.cs
index 5b6d90d..dd365b7 100644
--- a/src/Lucene.Net.Core/Util/Packed/PackedInts.cs
+++ b/src/Lucene.Net.Core/Util/Packed/PackedInts.cs
@@ -491,7 +491,7 @@ namespace Lucene.Net.Util.Packed
             /// <param name="values">       the values buffer </param>
             /// <param name="valuesOffset"> the offset where to start writing values </param>
             /// <param name="iterations">   controls how much data to decode </param>
-            void Decode(sbyte[] blocks, int blocksOffset, long[] values, int valuesOffset, int iterations);
+            void Decode(byte[] blocks, int blocksOffset, long[] values, int valuesOffset, int iterations);
 
             /// <summary>
             /// Read <code>iterations * blockCount()</code> blocks from <code>blocks</code>,
@@ -515,7 +515,7 @@ namespace Lucene.Net.Util.Packed
             /// <param name="values">       the values buffer </param>
             /// <param name="valuesOffset"> the offset where to start writing values </param>
             /// <param name="iterations">   controls how much data to decode </param>
-            void Decode(sbyte[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations);
+            void Decode(byte[] blocks, int blocksOffset, int[] values, int valuesOffset, int iterations);
         }
 
         /// <summary>
@@ -569,7 +569,7 @@ namespace Lucene.Net.Util.Packed
             /// <param name="values">       the values buffer </param>
             /// <param name="valuesOffset"> the offset where to start reading values </param>
             /// <param name="iterations">   controls how much data to encode </param>
-            void Encode(long[] values, int valuesOffset, sbyte[] blocks, int blocksOffset, int iterations);
+            void Encode(long[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations);
 
             /// <summary>
             /// Read <code>iterations * valueCount()</code> values from <code>values</code>,
@@ -593,7 +593,7 @@ namespace Lucene.Net.Util.Packed
             /// <param name="values">       the values buffer </param>
             /// <param name="valuesOffset"> the offset where to start reading values </param>
             /// <param name="iterations">   controls how much data to encode </param>
-            void Encode(int[] values, int valuesOffset, sbyte[] blocks, int blocksOffset, int iterations);
+            void Encode(int[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations);
         }
 
         /// <summary>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/Packed/PackedReaderIterator.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/Packed/PackedReaderIterator.cs b/src/Lucene.Net.Core/Util/Packed/PackedReaderIterator.cs
index 68f9fcb..eaa27a0 100644
--- a/src/Lucene.Net.Core/Util/Packed/PackedReaderIterator.cs
+++ b/src/Lucene.Net.Core/Util/Packed/PackedReaderIterator.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Util.Packed
         internal readonly int PackedIntsVersion;
         internal readonly PackedInts.Format Format;
         internal readonly BulkOperation BulkOperation;
-        internal readonly sbyte[] NextBlocks;
+        internal readonly byte[] NextBlocks;
         internal readonly LongsRef NextValues;
         internal readonly int Iterations_Renamed;
         internal int Position;
@@ -42,7 +42,7 @@ namespace Lucene.Net.Util.Packed
             BulkOperation = BulkOperation.Of(format, bitsPerValue);
             Iterations_Renamed = Iterations(mem);
             Debug.Assert(valueCount == 0 || Iterations_Renamed > 0);
-            NextBlocks = new sbyte[Iterations_Renamed * BulkOperation.ByteBlockCount()];
+            NextBlocks = new byte[Iterations_Renamed * BulkOperation.ByteBlockCount()];
             NextValues = new LongsRef(new long[Iterations_Renamed * BulkOperation.ByteValueCount()], 0, 0);
             NextValues.Offset = NextValues.Longs.Length;
             Position = -1;
@@ -81,7 +81,7 @@ namespace Lucene.Net.Util.Packed
                 @in.ReadBytes(NextBlocks, 0, blocksToRead);
                 if (blocksToRead < NextBlocks.Length)
                 {
-                    Arrays.Fill(NextBlocks, blocksToRead, NextBlocks.Length, (sbyte)0);
+                    Arrays.Fill(NextBlocks, blocksToRead, NextBlocks.Length, (byte)0);
                 }
 
                 BulkOperation.Decode(NextBlocks, 0, NextValues.Longs, 0, Iterations_Renamed);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/Packed/PackedWriter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/Packed/PackedWriter.cs b/src/Lucene.Net.Core/Util/Packed/PackedWriter.cs
index fa41e38..68e0c8c 100644
--- a/src/Lucene.Net.Core/Util/Packed/PackedWriter.cs
+++ b/src/Lucene.Net.Core/Util/Packed/PackedWriter.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Util.Packed
         internal bool Finished;
         internal readonly PackedInts.Format Format_Renamed;
         internal readonly BulkOperation Encoder;
-        internal readonly sbyte[] NextBlocks;
+        internal readonly byte[] NextBlocks;
         internal readonly long[] NextValues;
         internal readonly int Iterations;
         internal int Off;
@@ -43,7 +43,7 @@ namespace Lucene.Net.Util.Packed
             this.Format_Renamed = format;
             Encoder = BulkOperation.Of(format, bitsPerValue);
             Iterations = Encoder.ComputeIterations(valueCount, mem);
-            NextBlocks = new sbyte[Iterations * Encoder.ByteBlockCount()];
+            NextBlocks = new byte[Iterations * Encoder.ByteBlockCount()];
             NextValues = new long[Iterations * Encoder.ByteValueCount()];
             Off = 0;
             Written = 0;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/PagedBytes.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/PagedBytes.cs b/src/Lucene.Net.Core/Util/PagedBytes.cs
index 348754c..c806ebb 100644
--- a/src/Lucene.Net.Core/Util/PagedBytes.cs
+++ b/src/Lucene.Net.Core/Util/PagedBytes.cs
@@ -38,7 +38,7 @@ namespace Lucene.Net.Util
     // other "shift/mask big arrays". there are too many of these classes!
     public sealed class PagedBytes
     {
-        private readonly IList<sbyte[]> Blocks = new List<sbyte[]>();
+        private readonly IList<byte[]> Blocks = new List<byte[]>();
 
         // TODO: these are unused?
         private readonly IList<int> BlockEnd = new List<int>();
@@ -49,10 +49,10 @@ namespace Lucene.Net.Util
         private bool DidSkipBytes;
         private bool Frozen;
         private int Upto;
-        private sbyte[] CurrentBlock;
+        private byte[] CurrentBlock;
         private readonly long BytesUsedPerBlock;
 
-        private static readonly sbyte[] EMPTY_BYTES = new sbyte[0];
+        private static readonly byte[] EMPTY_BYTES = new byte[0];
 
         /// <summary>
         /// Provides methods to read BytesRefs from a frozen
@@ -61,7 +61,7 @@ namespace Lucene.Net.Util
         /// <seealso cref= #freeze  </seealso>
         public sealed class Reader
         {
-            internal readonly sbyte[][] Blocks;
+            internal readonly byte[][] Blocks;
             internal readonly int[] BlockEnds;
             internal readonly int BlockBits;
             internal readonly int BlockMask;
@@ -69,8 +69,8 @@ namespace Lucene.Net.Util
 
             internal Reader(PagedBytes pagedBytes)
             {
-                Blocks = new sbyte[pagedBytes.Blocks.Count][];
-                for (int i = 0; i < Blocks.Length; i++)
+                Blocks = new byte[pagedBytes.Blocks.Count][];
+                for (var i = 0; i < Blocks.Length; i++)
                 {
                     Blocks[i] = pagedBytes.Blocks[i];
                 }
@@ -103,8 +103,8 @@ namespace Lucene.Net.Util
                 {
                     return;
                 }
-                int index = (int)(start >> BlockBits);
-                int offset = (int)(start & BlockMask);
+                var index = (int)(start >> BlockBits);
+                var offset = (int)(start & BlockMask);
                 if (BlockSize - offset >= length)
                 {
                     // Within block
@@ -114,7 +114,7 @@ namespace Lucene.Net.Util
                 else
                 {
                     // Split
-                    b.Bytes = new sbyte[length];
+                    b.Bytes = new byte[length];
                     b.Offset = 0;
                     Array.Copy(Blocks[index], offset, b.Bytes, 0, BlockSize - offset);
                     Array.Copy(Blocks[1 + index], 0, b.Bytes, BlockSize - offset, length - (BlockSize - offset));
@@ -134,9 +134,9 @@ namespace Lucene.Net.Util
             // TODO: this really needs to be refactored into fieldcacheimpl
             public void Fill(BytesRef b, long start)
             {
-                int index = (int)(start >> BlockBits);
-                int offset = (int)(start & BlockMask);
-                sbyte[] block = b.Bytes = Blocks[index];
+                var index = (int)(start >> BlockBits);
+                var offset = (int)(start & BlockMask);
+                var block = b.Bytes = Blocks[index];
 
                 if ((block[offset] & 128) == 0)
                 {
@@ -187,7 +187,7 @@ namespace Lucene.Net.Util
                         Blocks.Add(CurrentBlock);
                         BlockEnd.Add(Upto);
                     }
-                    CurrentBlock = new sbyte[BlockSize];
+                    CurrentBlock = new byte[BlockSize];
                     Upto = 0;
                     left = BlockSize;
                 }
@@ -222,7 +222,7 @@ namespace Lucene.Net.Util
                     BlockEnd.Add(Upto);
                     DidSkipBytes = true;
                 }
-                CurrentBlock = new sbyte[BlockSize];
+                CurrentBlock = new byte[BlockSize];
                 Upto = 0;
                 left = BlockSize;
                 Debug.Assert(bytes.Length <= BlockSize);
@@ -251,7 +251,7 @@ namespace Lucene.Net.Util
             }
             if (trim && Upto < BlockSize)
             {
-                sbyte[] newBlock = new sbyte[Upto];
+                var newBlock = new byte[Upto];
                 Array.Copy(CurrentBlock, 0, newBlock, 0, Upto);
                 CurrentBlock = newBlock;
             }
@@ -311,7 +311,7 @@ namespace Lucene.Net.Util
                     Blocks.Add(CurrentBlock);
                     BlockEnd.Add(Upto);
                 }
-                CurrentBlock = new sbyte[BlockSize];
+                CurrentBlock = new byte[BlockSize];
                 Upto = 0;
             }
 
@@ -319,12 +319,12 @@ namespace Lucene.Net.Util
 
             if (bytes.Length < 128)
             {
-                CurrentBlock[Upto++] = (sbyte)bytes.Length;
+                CurrentBlock[Upto++] = (byte)bytes.Length;
             }
             else
             {
-                CurrentBlock[Upto++] = unchecked((sbyte)(0x80 | (bytes.Length >> 8)));
-                CurrentBlock[Upto++] = unchecked((sbyte)(bytes.Length & 0xff));
+                CurrentBlock[Upto++] = unchecked((byte)(0x80 | (bytes.Length >> 8)));
+                CurrentBlock[Upto++] = unchecked((byte)(bytes.Length & 0xff));
             }
             Array.Copy(bytes.Bytes, bytes.Offset, CurrentBlock, Upto, bytes.Length);
             Upto += bytes.Length;
@@ -338,7 +338,7 @@ namespace Lucene.Net.Util
 
             internal int CurrentBlockIndex;
             internal int CurrentBlockUpto;
-            internal sbyte[] CurrentBlock;
+            internal byte[] CurrentBlock;
 
             internal PagedBytesDataInput(PagedBytes outerInstance)
             {
@@ -428,10 +428,10 @@ namespace Lucene.Net.Util
                         OuterInstance.Blocks.Add(OuterInstance.CurrentBlock);
                         OuterInstance.BlockEnd.Add(OuterInstance.Upto);
                     }
-                    OuterInstance.CurrentBlock = new sbyte[OuterInstance.BlockSize];
+                    OuterInstance.CurrentBlock = new byte[OuterInstance.BlockSize];
                     OuterInstance.Upto = 0;
                 }
-                OuterInstance.CurrentBlock[OuterInstance.Upto++] = (sbyte)b;
+                OuterInstance.CurrentBlock[OuterInstance.Upto++] = (byte)b;
             }
 
             public override void WriteBytes(byte[] b, int offset, int length)
@@ -449,7 +449,7 @@ namespace Lucene.Net.Util
                         OuterInstance.Blocks.Add(OuterInstance.CurrentBlock);
                         OuterInstance.BlockEnd.Add(OuterInstance.Upto);
                     }
-                    OuterInstance.CurrentBlock = new sbyte[OuterInstance.BlockSize];
+                    OuterInstance.CurrentBlock = new byte[OuterInstance.BlockSize];
                     OuterInstance.Upto = 0;
                 }
 
@@ -463,7 +463,7 @@ namespace Lucene.Net.Util
                         System.Buffer.BlockCopy(b, offset, OuterInstance.CurrentBlock, OuterInstance.Upto, blockLeft);
                         OuterInstance.Blocks.Add(OuterInstance.CurrentBlock);
                         OuterInstance.BlockEnd.Add(OuterInstance.BlockSize);
-                        OuterInstance.CurrentBlock = new sbyte[OuterInstance.BlockSize];
+                        OuterInstance.CurrentBlock = new byte[OuterInstance.BlockSize];
                         OuterInstance.Upto = 0;
                         offset += blockLeft;
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/RecyclingByteBlockAllocator.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/RecyclingByteBlockAllocator.cs b/src/Lucene.Net.Core/Util/RecyclingByteBlockAllocator.cs
index f099580..aaefe07 100644
--- a/src/Lucene.Net.Core/Util/RecyclingByteBlockAllocator.cs
+++ b/src/Lucene.Net.Core/Util/RecyclingByteBlockAllocator.cs
@@ -33,7 +33,7 @@ namespace Lucene.Net.Util
     /// </summary>
     public sealed class RecyclingByteBlockAllocator : ByteBlockPool.Allocator
     {
-        private sbyte[][] FreeByteBlocks;
+        private byte[][] FreeByteBlocks;
         private readonly int MaxBufferedBlocks_Renamed;
         private int FreeBlocks_Renamed = 0;
         private readonly Counter BytesUsed_Renamed;
@@ -51,7 +51,7 @@ namespace Lucene.Net.Util
         public RecyclingByteBlockAllocator(int blockSize, int maxBufferedBlocks, Counter bytesUsed)
             : base(blockSize)
         {
-            FreeByteBlocks = new sbyte[maxBufferedBlocks][];
+            FreeByteBlocks = new byte[maxBufferedBlocks][];
             this.MaxBufferedBlocks_Renamed = maxBufferedBlocks;
             this.BytesUsed_Renamed = bytesUsed;
         }
@@ -79,28 +79,28 @@ namespace Lucene.Net.Util
         {
         }
 
-        public override sbyte[] ByteBlock
+        public override byte[] ByteBlock
         {
             get
             {
                 if (FreeBlocks_Renamed == 0)
                 {
                     BytesUsed_Renamed.AddAndGet(BlockSize);
-                    return new sbyte[BlockSize];
+                    return new byte[BlockSize];
                 }
-                sbyte[] b = FreeByteBlocks[--FreeBlocks_Renamed];
+                var b = FreeByteBlocks[--FreeBlocks_Renamed];
                 FreeByteBlocks[FreeBlocks_Renamed] = null;
                 return b;
             }
         }
 
-        public override void RecycleByteBlocks(sbyte[][] blocks, int start, int end)
+        public override void RecycleByteBlocks(byte[][] blocks, int start, int end)
         {
             int numBlocks = Math.Min(MaxBufferedBlocks_Renamed - FreeBlocks_Renamed, end - start);
             int size = FreeBlocks_Renamed + numBlocks;
             if (size >= FreeByteBlocks.Length)
             {
-                sbyte[][] newBlocks = new sbyte[ArrayUtil.Oversize(size, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][];
+                var newBlocks = new byte[ArrayUtil.Oversize(size, RamUsageEstimator.NUM_BYTES_OBJECT_REF)][];
                 Array.Copy(FreeByteBlocks, 0, newBlocks, 0, FreeBlocks_Renamed);
                 FreeByteBlocks = newBlocks;
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/StringHelper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/StringHelper.cs b/src/Lucene.Net.Core/Util/StringHelper.cs
index 506f28d..fee7def 100644
--- a/src/Lucene.Net.Core/Util/StringHelper.cs
+++ b/src/Lucene.Net.Core/Util/StringHelper.cs
@@ -38,9 +38,9 @@ namespace Lucene.Net.Util
         public static int BytesDifference(BytesRef left, BytesRef right)
         {
             int len = left.Length < right.Length ? left.Length : right.Length;
-            sbyte[] bytesLeft = left.Bytes;
+            var bytesLeft = left.Bytes;
             int offLeft = left.Offset;
-            sbyte[] bytesRight = right.Bytes;
+            var bytesRight = right.Bytes;
             int offRight = right.Offset;
             for (int i = 0; i < len; i++)
             {
@@ -213,7 +213,7 @@ namespace Lucene.Net.Util
         /// Returns the MurmurHash3_x86_32 hash.
         /// Original source/tests at https://github.com/yonik/java_util/
         /// </summary>
-        public static int Murmurhash3_x86_32(sbyte[] data, int offset, int len, int seed)
+        public static int Murmurhash3_x86_32(byte[] data, int offset, int len, int seed)
         {
             const int c1 = unchecked((int)0xcc9e2d51);
             const int c2 = 0x1b873593;
@@ -224,7 +224,7 @@ namespace Lucene.Net.Util
             for (int i = offset; i < roundedEnd; i += 4)
             {
                 // little endian load order
-                int k1 = (data[i] & 0xff) | ((data[i + 1] & 0xff) << 8) | ((data[i + 2] & 0xff) << 16) | (data[i + 3] << 24);
+                int k1 = (((sbyte)data[i]) & 0xff) | ((((sbyte)data[i + 1]) & 0xff) << 8) | ((((sbyte)data[i + 2]) & 0xff) << 16) | (((sbyte)data[i + 3]) << 24);
                 k1 *= c1;
                 k1 = Number.RotateLeft(k1, 15);
                 k1 *= c2;
@@ -240,15 +240,15 @@ namespace Lucene.Net.Util
             switch (len & 0x03)
             {
                 case 3:
-                    k2 = (data[roundedEnd + 2] & 0xff) << 16;
+                    k2 = (((sbyte)data[roundedEnd + 2]) & 0xff) << 16;
                     // fallthrough
                     goto case 2;
                 case 2:
-                    k2 |= (data[roundedEnd + 1] & 0xff) << 8;
+                    k2 |= (((sbyte)data[roundedEnd + 1]) & 0xff) << 8;
                     // fallthrough
                     goto case 1;
                 case 1:
-                    k2 |= (data[roundedEnd] & 0xff);
+                    k2 |= (((sbyte)data[roundedEnd]) & 0xff);
                     k2 *= c1;
                     k2 = Number.RotateLeft(k2, 15);
                     k2 *= c2;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/ToStringUtils.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/ToStringUtils.cs b/src/Lucene.Net.Core/Util/ToStringUtils.cs
index 12d2f69..d3d97c4 100644
--- a/src/Lucene.Net.Core/Util/ToStringUtils.cs
+++ b/src/Lucene.Net.Core/Util/ToStringUtils.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Util
             }
         }
 
-        public static void ByteArray(StringBuilder buffer, sbyte[] bytes)
+        public static void ByteArray(StringBuilder buffer, byte[] bytes)
         {
             for (int i = 0; i < bytes.Length; i++)
             {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Core/Util/UnicodeUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Core/Util/UnicodeUtil.cs b/src/Lucene.Net.Core/Util/UnicodeUtil.cs
index aa7604e..931142f 100644
--- a/src/Lucene.Net.Core/Util/UnicodeUtil.cs
+++ b/src/Lucene.Net.Core/Util/UnicodeUtil.cs
@@ -105,7 +105,7 @@ namespace Lucene.Net.Util
         ///  WARNING: this is not a valid UTF8 Term
         ///
         /// </summary>
-        public static readonly BytesRef BIG_TERM = new BytesRef(new sbyte[] { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }); // TODO this is unrelated here find a better place for it
+        public static readonly BytesRef BIG_TERM = new BytesRef(new byte[] { 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff }); // TODO this is unrelated here find a better place for it
 
         public const int UNI_SUR_HIGH_START = 0xD800;
         public const int UNI_SUR_HIGH_END = 0xDBFF;
@@ -131,12 +131,12 @@ namespace Lucene.Net.Util
             int upto = 0;
             int i = offset;
             int end = offset + length;
-            sbyte[] @out = result.Bytes;
+            var @out = result.Bytes;
             // Pre-allocate for worst case 4-for-1
             int maxLen = length * 4;
             if (@out.Length < maxLen)
             {
-                @out = result.Bytes = new sbyte[maxLen];
+                @out = result.Bytes = new byte[maxLen];
             }
             result.Offset = 0;
 
@@ -146,18 +146,18 @@ namespace Lucene.Net.Util
 
                 if (code < 0x80)
                 {
-                    @out[upto++] = (sbyte)code;
+                    @out[upto++] = (byte)code;
                 }
                 else if (code < 0x800)
                 {
-                    @out[upto++] = (sbyte)(0xC0 | (code >> 6));
-                    @out[upto++] = (sbyte)(0x80 | (code & 0x3F));
+                    @out[upto++] = (byte)(0xC0 | (code >> 6));
+                    @out[upto++] = (byte)(0x80 | (code & 0x3F));
                 }
                 else if (code < 0xD800 || code > 0xDFFF)
                 {
-                    @out[upto++] = (sbyte)(0xE0 | (code >> 12));
-                    @out[upto++] = (sbyte)(0x80 | ((code >> 6) & 0x3F));
-                    @out[upto++] = (sbyte)(0x80 | (code & 0x3F));
+                    @out[upto++] = (byte)(0xE0 | (code >> 12));
+                    @out[upto++] = (byte)(0x80 | ((code >> 6) & 0x3F));
+                    @out[upto++] = (byte)(0x80 | (code & 0x3F));
                 }
                 else
                 {
@@ -171,18 +171,18 @@ namespace Lucene.Net.Util
                         {
                             utf32 = (code << 10) + utf32 + SURROGATE_OFFSET;
                             i++;
-                            @out[upto++] = (sbyte)(0xF0 | (utf32 >> 18));
-                            @out[upto++] = (sbyte)(0x80 | ((utf32 >> 12) & 0x3F));
-                            @out[upto++] = (sbyte)(0x80 | ((utf32 >> 6) & 0x3F));
-                            @out[upto++] = (sbyte)(0x80 | (utf32 & 0x3F));
+                            @out[upto++] = (byte)(0xF0 | (utf32 >> 18));
+                            @out[upto++] = (byte)(0x80 | ((utf32 >> 12) & 0x3F));
+                            @out[upto++] = (byte)(0x80 | ((utf32 >> 6) & 0x3F));
+                            @out[upto++] = (byte)(0x80 | (utf32 & 0x3F));
                             continue;
                         }
                     }
                     // replace unpaired surrogate or out-of-order low surrogate
                     // with substitution character
-                    @out[upto++] = unchecked((sbyte)0xEF);
-                    @out[upto++] = unchecked((sbyte)0xBF);
-                    @out[upto++] = unchecked((sbyte)0xBD);
+                    @out[upto++] = unchecked((byte)0xEF);
+                    @out[upto++] = unchecked((byte)0xBF);
+                    @out[upto++] = unchecked((byte)0xBD);
                 }
             }
             //assert matches(source, offset, length, out, upto);
@@ -198,34 +198,33 @@ namespace Lucene.Net.Util
         {
             int end = offset + length;
 
-            sbyte[] @out = result.Bytes;
+            var @out = result.Bytes;
             result.Offset = 0;
             // Pre-allocate for worst case 4-for-1
             int maxLen = length * 4;
             if (@out.Length < maxLen)
             {
-                @out = result.Bytes = new sbyte[maxLen];
+                @out = result.Bytes = new byte[maxLen];
             }
 
             int upto = 0;
             for (int i = offset; i < end; i++)
             {
-                int code = (int)s[i];
-
+                var code = (int)s[i];
                 if (code < 0x80)
                 {
-                    @out[upto++] = (sbyte)code;
+                    @out[upto++] = (byte)code;
                 }
                 else if (code < 0x800)
                 {
-                    @out[upto++] = (sbyte)(0xC0 | (code >> 6));
-                    @out[upto++] = (sbyte)(0x80 | (code & 0x3F));
+                    @out[upto++] = (byte)(0xC0 | (code >> 6));
+                    @out[upto++] = (byte)(0x80 | (code & 0x3F));
                 }
                 else if (code < 0xD800 || code > 0xDFFF)
                 {
-                    @out[upto++] = (sbyte)(0xE0 | (code >> 12));
-                    @out[upto++] = (sbyte)(0x80 | ((code >> 6) & 0x3F));
-                    @out[upto++] = (sbyte)(0x80 | (code & 0x3F));
+                    @out[upto++] = (byte)(0xE0 | (code >> 12));
+                    @out[upto++] = (byte)(0x80 | ((code >> 6) & 0x3F));
+                    @out[upto++] = (byte)(0x80 | (code & 0x3F));
                 }
                 else
                 {
@@ -239,18 +238,18 @@ namespace Lucene.Net.Util
                         {
                             utf32 = (code << 10) + utf32 + SURROGATE_OFFSET;
                             i++;
-                            @out[upto++] = (sbyte)(0xF0 | (utf32 >> 18));
-                            @out[upto++] = (sbyte)(0x80 | ((utf32 >> 12) & 0x3F));
-                            @out[upto++] = (sbyte)(0x80 | ((utf32 >> 6) & 0x3F));
-                            @out[upto++] = (sbyte)(0x80 | (utf32 & 0x3F));
+                            @out[upto++] = (byte)(0xF0 | (utf32 >> 18));
+                            @out[upto++] = (byte)(0x80 | ((utf32 >> 12) & 0x3F));
+                            @out[upto++] = (byte)(0x80 | ((utf32 >> 6) & 0x3F));
+                            @out[upto++] = (byte)(0x80 | (utf32 & 0x3F));
                             continue;
                         }
                     }
                     // replace unpaired surrogate or out-of-order low surrogate
                     // with substitution character
-                    @out[upto++] = unchecked((sbyte)0xEF);
-                    @out[upto++] = unchecked((sbyte)0xBF);
-                    @out[upto++] = unchecked((sbyte)0xBD);
+                    @out[upto++] = unchecked((byte)0xEF);
+                    @out[upto++] = unchecked((byte)0xBF);
+                    @out[upto++] = unchecked((byte)0xBD);
                 }
             }
             //assert matches(s, offset, length, out, upto);
@@ -416,7 +415,7 @@ namespace Lucene.Net.Util
         {
             int pos = utf8.Offset;
             int limit = pos + utf8.Length;
-            sbyte[] bytes = utf8.Bytes;
+            var bytes = utf8.Bytes;
 
             int codePointCount = 0;
             for (; pos < limit; codePointCount++)
@@ -461,7 +460,7 @@ namespace Lucene.Net.Util
             int utf32Count = 0;
             int utf8Upto = utf8.Offset;
             int[] ints = utf32.Ints;
-            sbyte[] bytes = utf8.Bytes;
+            var bytes = utf8.Bytes;
             int utf8Limit = utf8.Offset + utf8.Length;
             while (utf8Upto < utf8Limit)
             {
@@ -584,7 +583,7 @@ namespace Lucene.Net.Util
         // for debugging
         public static string ToHexString(string s)
         {
-            StringBuilder sb = new StringBuilder();
+            var sb = new StringBuilder();
             for (int i = 0; i < s.Length; i++)
             {
                 char ch = s[i];
@@ -633,14 +632,14 @@ namespace Lucene.Net.Util
         /// Explicit checks for valid UTF-8 are not performed.
         /// </summary>
         // TODO: broken if chars.offset != 0
-        public static void UTF8toUTF16(sbyte[] utf8, int offset, int length, CharsRef chars)
+        public static void UTF8toUTF16(byte[] utf8, int offset, int length, CharsRef chars)
         {
             int out_offset = chars.Offset = 0;
             char[] @out = chars.Chars = ArrayUtil.Grow(chars.Chars, length);
             int limit = offset + length;
             while (offset < limit)
             {
-                int b = utf8[offset++] & 0xff;
+                int b = ((sbyte)utf8[offset++]) & 0xff;
                 if (b < 0xc0)
                 {
                     Debug.Assert(b < 0x80);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Queries/TermsFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Queries/TermsFilter.cs b/src/Lucene.Net.Queries/TermsFilter.cs
index 5731ec1..8355d58 100644
--- a/src/Lucene.Net.Queries/TermsFilter.cs
+++ b/src/Lucene.Net.Queries/TermsFilter.cs
@@ -47,7 +47,7 @@ namespace Lucene.Net.Queries
          * be efficient for GC and cache-lookups
          */
         private readonly int[] offsets;
-        private readonly sbyte[] termsBytes;
+        private readonly byte[] termsBytes;
         private readonly TermsAndField[] termsAndFields;
         private readonly int hashCode_Renamed; // cached hashcode for fast cache lookups
         private const int PRIME = 31;
@@ -154,7 +154,7 @@ namespace Lucene.Net.Queries
             // an automaton an call intersect on the termsenum if the density is high
 
             int hash = 9;
-            sbyte[] serializedTerms = new sbyte[0];
+            var serializedTerms = new byte[0];
             this.offsets = new int[length + 1];
             int lastEndOffset = 0;
             int index = 0;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Analysis/MockFixedLengthPayloadFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Analysis/MockFixedLengthPayloadFilter.cs b/src/Lucene.Net.TestFramework/Analysis/MockFixedLengthPayloadFilter.cs
index d4a6646..95c8db9 100644
--- a/src/Lucene.Net.TestFramework/Analysis/MockFixedLengthPayloadFilter.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/MockFixedLengthPayloadFilter.cs
@@ -29,7 +29,7 @@ namespace Lucene.Net.Analysis
     {
         private readonly IPayloadAttribute PayloadAtt;
         private readonly Random Random;
-        private readonly sbyte[] Bytes;
+        private readonly byte[] Bytes;
         private readonly BytesRef Payload;
 
         public MockFixedLengthPayloadFilter(Random random, TokenStream @in, int length)
@@ -40,7 +40,7 @@ namespace Lucene.Net.Analysis
                 throw new System.ArgumentException("length must be >= 0");
             }
             this.Random = random;
-            this.Bytes = new sbyte[length];
+            this.Bytes = new byte[length];
             this.Payload = new BytesRef(Bytes);
             this.PayloadAtt = AddAttribute<IPayloadAttribute>();
         }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Analysis/MockUTF16TermAttributeImpl.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Analysis/MockUTF16TermAttributeImpl.cs b/src/Lucene.Net.TestFramework/Analysis/MockUTF16TermAttributeImpl.cs
index 5716f1c..1f637eb 100644
--- a/src/Lucene.Net.TestFramework/Analysis/MockUTF16TermAttributeImpl.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/MockUTF16TermAttributeImpl.cs
@@ -35,7 +35,7 @@ namespace Lucene.Net.Analysis
         public override void FillBytesRef()
         {
             BytesRef bytes = BytesRef;
-            sbyte[] utf16 = (sbyte[])(Array)Charset.GetBytes(Charset.ToString());
+            var utf16 = Charset.GetBytes(Charset.ToString());
             bytes.Bytes = utf16;
             bytes.Offset = 0;
             bytes.Length = utf16.Length;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Analysis/MockVariableLengthPayloadFilter.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Analysis/MockVariableLengthPayloadFilter.cs b/src/Lucene.Net.TestFramework/Analysis/MockVariableLengthPayloadFilter.cs
index 07631bf..99e4201 100644
--- a/src/Lucene.Net.TestFramework/Analysis/MockVariableLengthPayloadFilter.cs
+++ b/src/Lucene.Net.TestFramework/Analysis/MockVariableLengthPayloadFilter.cs
@@ -31,7 +31,7 @@ namespace Lucene.Net.Analysis
 
         private readonly IPayloadAttribute PayloadAtt;
         private readonly Random Random;
-        private readonly sbyte[] Bytes = new sbyte[MAXLENGTH];
+        private readonly byte[] Bytes = new byte[MAXLENGTH];
         private readonly BytesRef Payload;
 
         public MockVariableLengthPayloadFilter(Random random, TokenStream @in)

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs b/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs
index c17a20d..e2b415b 100644
--- a/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/compressing/dummy/DummyCompressingCodec.cs
@@ -68,7 +68,7 @@ namespace Lucene.Net.Codecs.Compressing.dummy
                 Debug.Assert(offset + length <= originalLength);
                 if (bytes.Bytes.Length < originalLength)
                 {
-                    bytes.Bytes = new sbyte[ArrayUtil.Oversize(originalLength, 1)];
+                    bytes.Bytes = new byte[ArrayUtil.Oversize(originalLength, 1)];
                 }
                 @in.ReadBytes(bytes.Bytes, 0, offset + length);
                 bytes.Offset = offset;
@@ -89,7 +89,7 @@ namespace Lucene.Net.Codecs.Compressing.dummy
             {
             }
 
-            public override void Compress(sbyte[] bytes, int off, int len, DataOutput @out)
+            public override void Compress(byte[] bytes, int off, int len, DataOutput @out)
             {
                 @out.WriteBytes(bytes, off, len);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs
index 772927a..3ef3cbf 100644
--- a/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/lucene42/Lucene42DocValuesConsumer.cs
@@ -388,7 +388,7 @@ namespace Lucene.Net.Codecs.Lucene42
         // per-document vint-encoded byte[]
         internal class SortedSetIterator : IEnumerator<BytesRef>
         {
-            internal sbyte[] Buffer = new sbyte[10];
+            internal byte[] Buffer = new byte[10];
             internal ByteArrayDataOutput @out = new ByteArrayDataOutput();
             internal BytesRef @ref = new BytesRef();
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs b/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
index 455e8dc..eaa0a58 100644
--- a/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
+++ b/src/Lucene.Net.TestFramework/Codecs/ramonly/RAMOnlyPostingsFormat.cs
@@ -62,8 +62,8 @@ namespace Lucene.Net.Codecs.ramonly
 
             public virtual int Compare(BytesRef t1, BytesRef t2)
             {
-                sbyte[] b1 = t1.Bytes;
-                sbyte[] b2 = t2.Bytes;
+                var b1 = t1.Bytes;
+                var b2 = t2.Bytes;
                 int b1Stop;
                 int b1Upto = t1.Offset;
                 int b2Upto = t2.Offset;
@@ -258,7 +258,7 @@ namespace Lucene.Net.Codecs.ramonly
         {
             internal readonly int DocID;
             internal readonly int[] Positions;
-            internal sbyte[][] Payloads;
+            internal byte[][] Payloads;
 
             public RAMDoc(int docID, int freq)
             {
@@ -275,7 +275,7 @@ namespace Lucene.Net.Codecs.ramonly
 
                 if (Payloads != null)
                 {
-                    foreach (sbyte[] payload in Payloads)
+                    foreach (var payload in Payloads)
                     {
                         sizeInBytes += (payload != null) ? RamUsageEstimator.SizeOf(payload) : 0;
                     }
@@ -383,9 +383,9 @@ namespace Lucene.Net.Codecs.ramonly
                 {
                     if (Current.Payloads == null)
                     {
-                        Current.Payloads = new sbyte[Current.Positions.Length][];
+                        Current.Payloads = new byte[Current.Positions.Length][];
                     }
-                    sbyte[] bytes = Current.Payloads[PosUpto] = new sbyte[payload.Length];
+                    var bytes = Current.Payloads[PosUpto] = new byte[payload.Length];
                     Array.Copy(payload.Bytes, payload.Offset, bytes, 0, payload.Length);
                 }
                 PosUpto++;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
index 0bdbdbf..4e940ee 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseDocValuesFormatTestCase.cs
@@ -990,7 +990,7 @@ namespace Lucene.Net.Index
             conf.SetMergePolicy(NewLogMergePolicy());
             RandomIndexWriter iwriter = new RandomIndexWriter(Random(), directory, conf);
             Document doc = new Document();
-            sbyte[] bytes = new sbyte[32766];
+            var bytes = new byte[32766];
             BytesRef b = new BytesRef(bytes);
             Random().NextBytes((byte[])(Array)bytes);
             doc.Add(new BinaryDocValuesField("dv", b));
@@ -1019,9 +1019,9 @@ namespace Lucene.Net.Index
             conf.SetMergePolicy(NewLogMergePolicy());
             RandomIndexWriter iwriter = new RandomIndexWriter(Random(), directory, conf);
             Document doc = new Document();
-            sbyte[] bytes = new sbyte[32766];
+            var bytes = new byte[32766];
             BytesRef b = new BytesRef(bytes);
-            Random().NextBytes((byte[])(Array)bytes);
+            Random().NextBytes(bytes);
             doc.Add(new SortedDocValuesField("dv", b));
             iwriter.AddDocument(doc);
             iwriter.Dispose();
@@ -1055,7 +1055,7 @@ namespace Lucene.Net.Index
             IndexReader ireader = DirectoryReader.Open(directory); // read-only=true
             Debug.Assert(ireader.Leaves.Count == 1);
             BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetBinaryDocValues("dv");
-            sbyte[] mybytes = new sbyte[20];
+            var mybytes = new byte[20];
             BytesRef scratch = new BytesRef(mybytes);
             dv.Get(0, scratch);
             Assert.AreEqual("boo!", scratch.Utf8ToString());
@@ -1083,7 +1083,7 @@ namespace Lucene.Net.Index
             IndexReader ireader = DirectoryReader.Open(directory); // read-only=true
             Debug.Assert(ireader.Leaves.Count == 1);
             BinaryDocValues dv = ((AtomicReader)ireader.Leaves[0].Reader).GetSortedDocValues("dv");
-            sbyte[] mybytes = new sbyte[20];
+            var mybytes = new byte[20];
             BytesRef scratch = new BytesRef(mybytes);
             dv.Get(0, scratch);
             Assert.AreEqual("boo!", scratch.Utf8ToString());
@@ -1585,7 +1585,7 @@ namespace Lucene.Net.Index
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf);
             Document doc = new Document();
             Field idField = new StringField("id", "", Field.Store.NO);
-            Field storedField = new StoredField("stored", new sbyte[0]);
+            Field storedField = new StoredField("stored", new byte[0]);
             Field dvField = new BinaryDocValuesField("dv", new BytesRef());
             doc.Add(idField);
             doc.Add(storedField);
@@ -1605,8 +1605,8 @@ namespace Lucene.Net.Index
                 {
                     length = TestUtil.NextInt(Random(), minLength, maxLength);
                 }
-                sbyte[] buffer = new sbyte[length];
-                Random().NextBytes((byte[])(Array)buffer);
+                var buffer = new byte[length];
+                Random().NextBytes(buffer);
                 storedField.BytesValue = new BytesRef(buffer);
                 dvField.BytesValue = new BytesRef(buffer);
                 writer.AddDocument(doc);
@@ -1671,7 +1671,7 @@ namespace Lucene.Net.Index
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf);
             Document doc = new Document();
             Field idField = new StringField("id", "", Field.Store.NO);
-            Field storedField = new StoredField("stored", new sbyte[0]);
+            Field storedField = new StoredField("stored", new byte[0]);
             Field dvField = new SortedDocValuesField("dv", new BytesRef());
             doc.Add(idField);
             doc.Add(storedField);
@@ -1691,8 +1691,8 @@ namespace Lucene.Net.Index
                 {
                     length = TestUtil.NextInt(Random(), minLength, maxLength);
                 }
-                sbyte[] buffer = new sbyte[length];
-                Random().NextBytes((byte[])(Array)buffer);
+                var buffer = new byte[length];
+                Random().NextBytes(buffer);
                 storedField.BytesValue = new BytesRef(buffer);
                 dvField.BytesValue = new BytesRef(buffer);
                 writer.AddDocument(doc);
@@ -2971,7 +2971,7 @@ namespace Lucene.Net.Index
                 numDocs = TestUtil.NextInt(Random(), 100, 200);
             }
             IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
-            IList<sbyte[]> docBytes = new List<sbyte[]>();
+            var docBytes = new List<byte[]>();
             long totalBytes = 0;
             for (int docID = 0; docID < numDocs; docID++)
             {
@@ -2998,8 +2998,8 @@ namespace Lucene.Net.Index
                 {
                     break;
                 }
-                sbyte[] bytes = new sbyte[numBytes];
-                Random().NextBytes((byte[])(Array)bytes);
+                var bytes = new byte[numBytes];
+                Random().NextBytes(bytes);
                 docBytes.Add(bytes);
                 Document doc = new Document();
                 BytesRef b = new BytesRef(bytes);
@@ -3058,7 +3058,7 @@ namespace Lucene.Net.Index
                 Document doc = ar.Document(docID);
                 BytesRef bytes = new BytesRef();
                 s.Get(docID, bytes);
-                sbyte[] expected = docBytes[Convert.ToInt32(doc.Get("id"))];
+                var expected = docBytes[Convert.ToInt32(doc.Get("id"))];
                 Assert.AreEqual(expected.Length, bytes.Length);
                 Assert.AreEqual(new BytesRef(expected), bytes);
             }
@@ -3094,7 +3094,7 @@ namespace Lucene.Net.Index
                 numDocs = TestUtil.NextInt(Random(), 100, 200);
             }
             IndexWriter w = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
-            IList<sbyte[]> docBytes = new List<sbyte[]>();
+            var docBytes = new List<byte[]>();
             long totalBytes = 0;
             for (int docID = 0; docID < numDocs; docID++)
             {
@@ -3121,8 +3121,8 @@ namespace Lucene.Net.Index
                 {
                     break;
                 }
-                sbyte[] bytes = new sbyte[numBytes];
-                Random().NextBytes((byte[])(Array)bytes);
+                var bytes = new byte[numBytes];
+                Random().NextBytes(bytes);
                 docBytes.Add(bytes);
                 Document doc = new Document();
                 BytesRef b = new BytesRef(bytes);
@@ -3143,7 +3143,7 @@ namespace Lucene.Net.Index
                 Document doc = ar.Document(docID);
                 BytesRef bytes = new BytesRef();
                 s.Get(docID, bytes);
-                sbyte[] expected = docBytes[Convert.ToInt32(doc.Get("id"))];
+                var expected = docBytes[Convert.ToInt32(doc.Get("id"))];
                 Assert.AreEqual(expected.Length, bytes.Length);
                 Assert.AreEqual(new BytesRef(expected), bytes);
             }
@@ -3163,7 +3163,7 @@ namespace Lucene.Net.Index
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf);
             Document doc = new Document();
             Field idField = new StringField("id", "", Field.Store.NO);
-            Field storedBinField = new StoredField("storedBin", new sbyte[0]);
+            Field storedBinField = new StoredField("storedBin", new byte[0]);
             Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef());
             Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef());
             Field storedNumericField = new StoredField("storedNum", "");
@@ -3181,8 +3181,8 @@ namespace Lucene.Net.Index
             {
                 idField.StringValue = Convert.ToString(i);
                 int length = TestUtil.NextInt(Random(), 0, 8);
-                sbyte[] buffer = new sbyte[length];
-                Random().NextBytes((byte[])(Array)buffer);
+                var buffer = new byte[length];
+                Random().NextBytes(buffer);
                 storedBinField.BytesValue = new BytesRef(buffer);
                 dvBinField.BytesValue = new BytesRef(buffer);
                 dvSortedField.BytesValue = new BytesRef(buffer);
@@ -3283,7 +3283,7 @@ namespace Lucene.Net.Index
             IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
             RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, conf);
             Field idField = new StringField("id", "", Field.Store.NO);
-            Field storedBinField = new StoredField("storedBin", new sbyte[0]);
+            Field storedBinField = new StoredField("storedBin", new byte[0]);
             Field dvBinField = new BinaryDocValuesField("dvBin", new BytesRef());
             Field dvSortedField = new SortedDocValuesField("dvSorted", new BytesRef());
             Field storedNumericField = new StoredField("storedNum", "");
@@ -3295,8 +3295,8 @@ namespace Lucene.Net.Index
             {
                 idField.StringValue = Convert.ToString(i);
                 int length = TestUtil.NextInt(Random(), 0, 8);
-                sbyte[] buffer = new sbyte[length];
-                Random().NextBytes((byte[])(Array)buffer);
+                var buffer = new byte[length];
+                Random().NextBytes(buffer);
                 storedBinField.BytesValue = new BytesRef(buffer);
                 dvBinField.BytesValue = new BytesRef(buffer);
                 dvSortedField.BytesValue = new BytesRef(buffer);
@@ -3478,7 +3478,7 @@ namespace Lucene.Net.Index
                 Directory dir = NewDirectory();
                 RandomIndexWriter w = new RandomIndexWriter(Random(), dir);
                 BytesRef bytes = new BytesRef();
-                bytes.Bytes = new sbyte[1 << i];
+                bytes.Bytes = new byte[1 << i];
                 bytes.Length = 1 << i;
                 for (int j = 0; j < 4; j++)
                 {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
index 1fa531e..202be43 100644
--- a/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BasePostingsFormatTestCase.cs
@@ -154,7 +154,7 @@ namespace Lucene.Net.Index
                 }
 
                 FixedPayloads = Random.NextBoolean();
-                sbyte[] payloadBytes = new sbyte[PayloadSize];
+                var payloadBytes = new byte[PayloadSize];
                 Payload_Renamed = new BytesRef(payloadBytes);
                 this.Options = options;
                 DoPositions = FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS.CompareTo(options) <= 0;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
index 9eef7af..c885649 100644
--- a/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Index/BaseStoredFieldsFormatTestCase.cs
@@ -231,15 +231,15 @@ namespace Lucene.Net.Index
         {
             Directory dir = NewDirectory();
             IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
-            sbyte[] b = new sbyte[50];
+            var b = new byte[50];
             for (int i = 0; i < 50; i++)
             {
-                b[i] = (sbyte)(i + 77);
+                b[i] = (byte)(i + 77);
             }
 
             Document doc = new Document();
             Field f = new StoredField("binary", b, 10, 17);
-            sbyte[] bx = f.BinaryValue().Bytes;
+            var bx = f.BinaryValue().Bytes;
             Assert.IsTrue(bx != null);
             Assert.AreEqual(50, bx.Length);
             Assert.AreEqual(10, f.BinaryValue().Offset);
@@ -375,7 +375,7 @@ namespace Lucene.Net.Index
             ft.Freeze();
 
             string @string = TestUtil.RandomSimpleString(Random(), 50);
-            sbyte[] bytes = @string.GetBytes(IOUtils.CHARSET_UTF_8);
+            var bytes = @string.GetBytes(IOUtils.CHARSET_UTF_8);
             long l = Random().NextBoolean() ? Random().Next(42) : Random().NextLong();
             int i = Random().NextBoolean() ? Random().Next(42) : Random().Next();
             float f = Random().NextFloat();
@@ -554,12 +554,12 @@ namespace Lucene.Net.Index
             }
         }
 
-        private static sbyte[] RandomByteArray(int length, int max)
+        private static byte[] RandomByteArray(int length, int max)
         {
-            var result = new sbyte[length];
+            var result = new byte[length];
             for (int i = 0; i < length; ++i)
             {
-                result[i] = (sbyte)Random().Next(max);
+                result[i] = (byte)Random().Next(max);
             }
             return result;
         }
@@ -583,11 +583,11 @@ namespace Lucene.Net.Index
             RandomIndexWriter iw = new RandomIndexWriter(Random(), dir, (IndexWriterConfig)iwConf.Clone());
 
             int docCount = AtLeast(200);
-            sbyte[][][] data = new sbyte[docCount][][];
+            var data = new byte[docCount][][];
             for (int i = 0; i < docCount; ++i)
             {
                 int fieldCount = Rarely() ? RandomInts.NextIntBetween(Random(), 1, 500) : RandomInts.NextIntBetween(Random(), 1, 5);
-                data[i] = new sbyte[fieldCount][];
+                data[i] = new byte[fieldCount][];
                 for (int j = 0; j < fieldCount; ++j)
                 {
                     int length = Rarely() ? Random().Next(1000) : Random().Next(10);
@@ -653,9 +653,9 @@ namespace Lucene.Net.Index
                 Assert.AreEqual(data[docId].Length + 1, doc.Fields.Count);
                 for (int j = 0; j < data[docId].Length; ++j)
                 {
-                    sbyte[] arr = data[docId][j];
+                    var arr = data[docId][j];
                     BytesRef arr2Ref = doc.GetBinaryValue("bytes" + j);
-                    sbyte[] arr2 = Arrays.CopyOfRange(arr2Ref.Bytes, arr2Ref.Offset, arr2Ref.Offset + arr2Ref.Length);
+                    var arr2 = Arrays.CopyOfRange(arr2Ref.Bytes, arr2Ref.Offset, arr2Ref.Offset + arr2Ref.Length);
                     Assert.AreEqual(arr, arr2);
                 }
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Index/DocHelper.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/DocHelper.cs b/src/Lucene.Net.TestFramework/Index/DocHelper.cs
index 483d174..986de18 100644
--- a/src/Lucene.Net.TestFramework/Index/DocHelper.cs
+++ b/src/Lucene.Net.TestFramework/Index/DocHelper.cs
@@ -87,7 +87,7 @@ namespace Lucene.Net.Index
         public static Field UnStoredField2;
 
         public const string LAZY_FIELD_BINARY_KEY = "lazyFieldBinary";
-        public static sbyte[] LAZY_FIELD_BINARY_BYTES;
+        public static byte[] LAZY_FIELD_BINARY_BYTES;
         public static Field LazyFieldBinary;
 
         public const string LAZY_FIELD_KEY = "lazyField";

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs b/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs
index 9965297..ccd5ade 100644
--- a/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs
+++ b/src/Lucene.Net.TestFramework/Index/FieldFilterAtomicReader.cs
@@ -86,7 +86,7 @@ namespace Lucene.Net.Index
         {
             private readonly FieldFilterAtomicReader OuterInstance;
 
-            private StoredFieldVisitor Visitor;
+            private readonly StoredFieldVisitor Visitor;
 
             public StoredFieldVisitorAnonymousInnerClassHelper(FieldFilterAtomicReader outerInstance, StoredFieldVisitor visitor)
             {
@@ -94,7 +94,7 @@ namespace Lucene.Net.Index
                 this.Visitor = visitor;
             }
 
-            public override void BinaryField(FieldInfo fieldInfo, sbyte[] value)
+            public override void BinaryField(FieldInfo fieldInfo, byte[] value)
             {
                 Visitor.BinaryField(fieldInfo, value);
             }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
index 4139f9d..b706748 100644
--- a/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
+++ b/src/Lucene.Net.TestFramework/Util/LuceneTestCase.cs
@@ -2107,7 +2107,7 @@ namespace Lucene.Net.Util
                     else if (code == 2)
                     {
                         // term, but ensure a non-zero offset
-                        sbyte[] newbytes = new sbyte[term.Length + 5];
+                        var newbytes = new byte[term.Length + 5];
                         Array.Copy(term.Bytes, term.Offset, newbytes, 5, term.Length);
                         tests.Add(new BytesRef(newbytes, 5, term.Length));
                     }
@@ -2120,7 +2120,7 @@ namespace Lucene.Net.Util
                                 break;
 
                             case 1:
-                                tests.Add(new BytesRef(new sbyte[] { unchecked((sbyte)0xFF), unchecked((sbyte)0xFF) })); // past the last term
+                                tests.Add(new BytesRef(new byte[] { unchecked((byte)0xFF), unchecked((byte)0xFF) })); // past the last term
                                 break;
 
                             case 2:

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs b/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs
index 3800ec9..df1ce35 100644
--- a/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs
+++ b/src/Lucene.Net.TestFramework/Util/fst/FSTTester.cs
@@ -84,7 +84,7 @@ namespace Lucene.Net.Util.Fst
             {
                 int x = ir.Ints[ir.Offset + i];
                 Debug.Assert(x >= 0 && x <= 255);
-                br.Bytes[i] = (sbyte)x;
+                br.Bytes[i] = (byte)x;
             }
             br.Length = ir.Length;
             return br;

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Analysis/TestToken.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Analysis/TestToken.cs b/src/Lucene.Net.Tests/core/Analysis/TestToken.cs
index d0a1930..0cd67b4 100644
--- a/src/Lucene.Net.Tests/core/Analysis/TestToken.cs
+++ b/src/Lucene.Net.Tests/core/Analysis/TestToken.cs
@@ -197,7 +197,7 @@ namespace Lucene.Net.Analysis
             Assert.AreEqual(t.ToString(), copy.ToString());
             Assert.AreNotSame(buf, copy.Buffer());
 
-            BytesRef pl = new BytesRef(new sbyte[] { 1, 2, 3, 4 });
+            BytesRef pl = new BytesRef(new byte[] { 1, 2, 3, 4 });
             t.Payload = pl;
             copy = AssertCloneIsEqual(t);
             Assert.AreEqual(pl, copy.Payload);
@@ -220,7 +220,7 @@ namespace Lucene.Net.Analysis
             Assert.AreEqual(t.ToString(), copy.ToString());
             Assert.AreNotSame(buf, copy.Buffer());
 
-            BytesRef pl = new BytesRef(new sbyte[] { 1, 2, 3, 4 });
+            BytesRef pl = new BytesRef(new byte[] { 1, 2, 3, 4 });
             t.Payload = pl;
             copy = AssertCopyIsEqual(t);
             Assert.AreEqual(pl, copy.Payload);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestCompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestCompressionMode.cs b/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestCompressionMode.cs
index 4a5902d..d1f08ce 100644
--- a/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestCompressionMode.cs
+++ b/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestCompressionMode.cs
@@ -34,56 +34,56 @@ namespace Lucene.Net.Codecs.Compressing
     {
         internal CompressionMode Mode;
 
-        internal static sbyte[] RandomArray()
+        internal static byte[] RandomArray()
         {
             int max = Random().NextBoolean() ? Random().Next(4) : Random().Next(256);
             int length = Random().NextBoolean() ? Random().Next(20) : Random().Next(192 * 1024);
             return RandomArray(length, max);
         }
 
-        internal static sbyte[] RandomArray(int length, int max)
+        internal static byte[] RandomArray(int length, int max)
         {
-            sbyte[] arr = new sbyte[length];
+            var arr = new byte[length];
             for (int i = 0; i < arr.Length; ++i)
             {
-                arr[i] = (sbyte)RandomInts.NextIntBetween(Random(), 0, max);
+                arr[i] = (byte)RandomInts.NextIntBetween(Random(), 0, max);
             }
             return arr;
         }
 
-        internal virtual sbyte[] Compress(sbyte[] decompressed, int off, int len)
+        internal virtual byte[] Compress(byte[] decompressed, int off, int len)
         {
             Compressor compressor = Mode.NewCompressor();
             return Compress(compressor, decompressed, off, len);
         }
 
-        internal static sbyte[] Compress(Compressor compressor, sbyte[] decompressed, int off, int len)
+        internal static byte[] Compress(Compressor compressor, byte[] decompressed, int off, int len)
         {
-            sbyte[] compressed = new sbyte[len * 2 + 16]; // should be enough
-            ByteArrayDataOutput @out = new ByteArrayDataOutput((byte[])(Array)compressed);
+            var compressed = new byte[len * 2 + 16]; // should be enough
+            ByteArrayDataOutput @out = new ByteArrayDataOutput(compressed);
             compressor.Compress(decompressed, off, len, @out);
             int compressedLen = @out.Position;
             return Arrays.CopyOf(compressed, compressedLen);
         }
 
-        internal virtual sbyte[] Decompress(sbyte[] compressed, int originalLength)
+        internal virtual byte[] Decompress(byte[] compressed, int originalLength)
         {
             Decompressor decompressor = Mode.NewDecompressor();
             return Decompress(decompressor, compressed, originalLength);
         }
 
-        internal static sbyte[] Decompress(Decompressor decompressor, sbyte[] compressed, int originalLength)
+        internal static byte[] Decompress(Decompressor decompressor, byte[] compressed, int originalLength)
         {
             BytesRef bytes = new BytesRef();
-            decompressor.Decompress(new ByteArrayDataInput((byte[])(Array)compressed), originalLength, 0, originalLength, bytes);
+            decompressor.Decompress(new ByteArrayDataInput(compressed), originalLength, 0, originalLength, bytes);
             return Arrays.CopyOfRange(bytes.Bytes, bytes.Offset, bytes.Offset + bytes.Length);
         }
 
-        internal virtual sbyte[] Decompress(sbyte[] compressed, int originalLength, int offset, int length)
+        internal virtual byte[] Decompress(byte[] compressed, int originalLength, int offset, int length)
         {
             Decompressor decompressor = Mode.NewDecompressor();
             BytesRef bytes = new BytesRef();
-            decompressor.Decompress(new ByteArrayDataInput((byte[])(Array)compressed), originalLength, offset, length, bytes);
+            decompressor.Decompress(new ByteArrayDataInput(compressed), originalLength, offset, length, bytes);
             return Arrays.CopyOfRange(bytes.Bytes, bytes.Offset, bytes.Offset + bytes.Length);
         }
 
@@ -93,11 +93,11 @@ namespace Lucene.Net.Codecs.Compressing
             int iterations = AtLeast(10);
             for (int i = 0; i < iterations; ++i)
             {
-                sbyte[] decompressed = RandomArray();
+                var decompressed = RandomArray();
                 int off = Random().NextBoolean() ? 0 : TestUtil.NextInt(Random(), 0, decompressed.Length);
                 int len = Random().NextBoolean() ? decompressed.Length - off : TestUtil.NextInt(Random(), 0, decompressed.Length - off);
-                sbyte[] compressed = Compress(decompressed, off, len);
-                sbyte[] restored = Decompress(compressed, len);
+                var compressed = Compress(decompressed, off, len);
+                var restored = Decompress(compressed, len);
                 Assert.AreEqual(Arrays.CopyOfRange(decompressed, off, off + len), restored);//was AssertArrayEquals
             }
         }
@@ -108,8 +108,8 @@ namespace Lucene.Net.Codecs.Compressing
             int iterations = AtLeast(10);
             for (int i = 0; i < iterations; ++i)
             {
-                sbyte[] decompressed = RandomArray();
-                sbyte[] compressed = Compress(decompressed, 0, decompressed.Length);
+                var decompressed = RandomArray();
+                var compressed = Compress(decompressed, 0, decompressed.Length);
                 int offset, length;
                 if (decompressed.Length == 0)
                 {
@@ -120,20 +120,20 @@ namespace Lucene.Net.Codecs.Compressing
                     offset = Random().Next(decompressed.Length);
                     length = Random().Next(decompressed.Length - offset);
                 }
-                sbyte[] restored = Decompress(compressed, decompressed.Length, offset, length);
+                var restored = Decompress(compressed, decompressed.Length, offset, length);
                 Assert.AreEqual(Arrays.CopyOfRange(decompressed, offset, offset + length), restored); //was AssertArrayEquals
             }
         }
 
-        public virtual sbyte[] Test(sbyte[] decompressed)
+        public virtual byte[] Test(byte[] decompressed)
         {
             return Test(decompressed, 0, decompressed.Length);
         }
 
-        public virtual sbyte[] Test(sbyte[] decompressed, int off, int len)
+        public virtual byte[] Test(byte[] decompressed, int off, int len)
         {
-            sbyte[] compressed = Compress(decompressed, off, len);
-            sbyte[] restored = Decompress(compressed, len);
+            var compressed = Compress(decompressed, off, len);
+            var restored = Decompress(compressed, len);
             Assert.AreEqual(len, restored.Length);
             return compressed;
         }
@@ -141,22 +141,22 @@ namespace Lucene.Net.Codecs.Compressing
         [Test]
         public virtual void TestEmptySequence()
         {
-            Test(new sbyte[0]);
+            Test(new byte[0]);
         }
 
         [Test]
         public virtual void TestShortSequence()
         {
-            Test(new sbyte[] { (sbyte)Random().Next(256) });
+            Test(new[] { (byte)Random().Next(256) });
         }
 
         [Test]
         public virtual void TestIncompressible()
         {
-            sbyte[] decompressed = new sbyte[RandomInts.NextIntBetween(Random(), 20, 256)];
+            var decompressed = new byte[RandomInts.NextIntBetween(Random(), 20, 256)];
             for (int i = 0; i < decompressed.Length; ++i)
             {
-                decompressed[i] = (sbyte)i;
+                decompressed[i] = (byte)i;
             }
             Test(decompressed);
         }
@@ -164,8 +164,8 @@ namespace Lucene.Net.Codecs.Compressing
         [Test]
         public virtual void TestConstant()
         {
-            sbyte[] decompressed = new sbyte[TestUtil.NextInt(Random(), 1, 10000)];
-            Arrays.Fill(decompressed, (sbyte)Random().Next());
+            var decompressed = new byte[TestUtil.NextInt(Random(), 1, 10000)];
+            Arrays.Fill(decompressed, (byte)Random().Next());
             Test(decompressed);
         }
 
@@ -173,7 +173,7 @@ namespace Lucene.Net.Codecs.Compressing
         public virtual void TestLUCENE5201()
         {
             sbyte[] data = { 14, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 72, 14, 72, 14, 85, 3, 72, 14, 72, 14, 72, 14, 72, 14, 72, 14, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 85, 3, 72, 14, 50, 64, 0, 46, -1, 0, 0, 0, 29, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 50, 64, 0, 47, -105, 0, 0, 0, 30, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, -97, 6, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68
 , -113, 0, 120, 64, 0, 48, 4, 0, 0, 0, 31, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 24, 32, 34, 124, 0, 120, 64, 0, 48, 80, 0, 0, 0, 31, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 
 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 7
 2, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 72, 34, 72, 29, 72, 37, 72, 35, 72, 45, 72, 23, 72, 46, 72, 20, 72, 40, 72, 33, 72, 25, 72, 39, 72, 38, 72, 26, 72, 28, 72, 42, 72, 24, 72, 27, 72, 36, 72, 41, 72, 32, 72, 18, 72, 30, 72, 22, 72, 31, 72, 43, 72, 19, 50, 64, 0, 49, 20, 0, 0, 0, 32, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 50, 64, 0, 50, 53, 0, 0, 0, 34, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0
 , 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 50, 64, 0, 51, 85, 0, 0, 0, 36, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, -97, 5, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 50, -64, 0, 51, -45, 0, 0, 0, 37, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, -9
 7, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -113, 0, 2, 3, -97, 6, 0, 68, -113, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 2, 3, 85, 8, -113, 0, 68, -97, 3, 0, 120, 64, 0, 52, -88, 0, 0, 0, 39, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 72, 13, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, 72, 13, 85, 5, 72, 13, -19, -24, -101, -35 };
-            Test(data, 9, data.Length - 9);
+            Test((byte[])(Array)data, 9, data.Length - 9);
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs b/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
index 35b3ce5..8add739 100644
--- a/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
+++ b/src/Lucene.Net.Tests/core/Codecs/Compressing/AbstractTestLZ4CompressionMode.cs
@@ -28,9 +28,9 @@ namespace Lucene.Net.Codecs.Compressing
     {
         private LZ4 lz4;
 
-        public override sbyte[] Test(sbyte[] decompressed)
+        public override byte[] Test(byte[] decompressed)
         {
-            sbyte[] compressed = base.Test(decompressed);
+            var compressed = base.Test(decompressed);
             int off = 0;
             int decompressedOff = 0;
             for (; ; )
@@ -94,7 +94,7 @@ namespace Lucene.Net.Codecs.Compressing
         public virtual void TestShortLiteralsAndMatchs()
         {
             // literals and matchs lengths <= 15
-            sbyte[] decompressed = "1234562345673456745678910123".GetBytes(Encoding.UTF8);
+            var decompressed = "1234562345673456745678910123".GetBytes(Encoding.UTF8);
             Test(decompressed);
         }
 
@@ -102,10 +102,10 @@ namespace Lucene.Net.Codecs.Compressing
         public virtual void TestLongMatchs()
         {
             // match length >= 20
-            sbyte[] decompressed = new sbyte[RandomInts.NextIntBetween(Random(), 300, 1024)];
+            var decompressed = new byte[RandomInts.NextIntBetween(Random(), 300, 1024)];
             for (int i = 0; i < decompressed.Length; ++i)
             {
-                decompressed[i] = (sbyte)i;
+                decompressed[i] = (byte)i;
             }
             Test(decompressed);
         }
@@ -114,7 +114,7 @@ namespace Lucene.Net.Codecs.Compressing
         public virtual void TestLongLiterals()
         {
             // long literals (length >= 16) which are not the last literals
-            sbyte[] decompressed = RandomArray(RandomInts.NextIntBetween(Random(), 400, 1024), 256);
+            var decompressed = RandomArray(RandomInts.NextIntBetween(Random(), 400, 1024), 256);
             int matchRef = Random().Next(30);
             int matchOff = RandomInts.NextIntBetween(Random(), decompressed.Length - 40, decompressed.Length - 20);
             int matchLength = RandomInts.NextIntBetween(Random(), 4, 10);
@@ -125,7 +125,7 @@ namespace Lucene.Net.Codecs.Compressing
         [Test]
         public virtual void TestMatchRightBeforeLastLiterals()
         {
-            Test(new sbyte[] { 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 5 });
+            Test(new byte[] { 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 5 });
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Codecs/Compressing/TestFastDecompressionMode.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Codecs/Compressing/TestFastDecompressionMode.cs b/src/Lucene.Net.Tests/core/Codecs/Compressing/TestFastDecompressionMode.cs
index bdcbb95..d7fc0b0 100644
--- a/src/Lucene.Net.Tests/core/Codecs/Compressing/TestFastDecompressionMode.cs
+++ b/src/Lucene.Net.Tests/core/Codecs/Compressing/TestFastDecompressionMode.cs
@@ -29,10 +29,10 @@ namespace Lucene.Net.Codecs.Compressing
             Mode = CompressionMode.FAST_DECOMPRESSION;
         }
 
-        public override sbyte[] Test(sbyte[] decompressed, int off, int len)
+        public override byte[] Test(byte[] decompressed, int off, int len)
         {
-            sbyte[] compressed = base.Test(decompressed, off, len);
-            sbyte[] compressed2 = Compress(CompressionMode.FAST.NewCompressor(), decompressed, off, len);
+            var compressed = base.Test(decompressed, off, len);
+            var compressed2 = Compress(CompressionMode.FAST.NewCompressor(), decompressed, off, len);
             // because of the way this compression mode works, its output is necessarily
             // smaller than the output of CompressionMode.FAST
             Assert.IsTrue(compressed.Length <= compressed2.Length);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestBlockPostingsFormat3.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestBlockPostingsFormat3.cs b/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestBlockPostingsFormat3.cs
index 138d941..a519b86 100644
--- a/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestBlockPostingsFormat3.cs
+++ b/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestBlockPostingsFormat3.cs
@@ -261,7 +261,7 @@ namespace Lucene.Net.Codecs.Lucene41
                     else if (code == 2)
                     {
                         // term, but ensure a non-zero offset
-                        sbyte[] newbytes = new sbyte[term.Length + 5];
+                        var newbytes = new byte[term.Length + 5];
                         Array.Copy(term.Bytes, term.Offset, newbytes, 5, term.Length);
                         tests.Add(new BytesRef(newbytes, 5, term.Length));
                     }

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestForUtil.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestForUtil.cs b/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestForUtil.cs
index 8139c6e..944e57f 100644
--- a/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestForUtil.cs
+++ b/src/Lucene.Net.Tests/core/Codecs/Lucene41/TestForUtil.cs
@@ -68,7 +68,7 @@ namespace Lucene.Net.Codecs.Lucene41
 
                 for (int i = 0; i < iterations; ++i)
                 {
-                    forUtil.WriteBlock(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, values.Length), new sbyte[Lucene41.ForUtil.MAX_ENCODED_SIZE], @out);
+                    forUtil.WriteBlock(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, values.Length), new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], @out);
                 }
                 endPointer = @out.FilePointer;
                 @out.Dispose();
@@ -86,7 +86,7 @@ namespace Lucene.Net.Codecs.Lucene41
                         continue;
                     }
                     int[] restored = new int[Lucene41.ForUtil.MAX_DATA_SIZE];
-                    forUtil.ReadBlock(@in, new sbyte[Lucene41.ForUtil.MAX_ENCODED_SIZE], restored);
+                    forUtil.ReadBlock(@in, new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], restored);
                     Assert.AreEqual(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, (i + 1) * Lucene41PostingsFormat.BLOCK_SIZE), Arrays.CopyOf(restored, Lucene41PostingsFormat.BLOCK_SIZE));
                 }
                 Assert.AreEqual(endPointer, @in.FilePointer);

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Document/TestBinaryDocument.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Document/TestBinaryDocument.cs b/src/Lucene.Net.Tests/core/Document/TestBinaryDocument.cs
index d50d055..74d3e93 100644
--- a/src/Lucene.Net.Tests/core/Document/TestBinaryDocument.cs
+++ b/src/Lucene.Net.Tests/core/Document/TestBinaryDocument.cs
@@ -44,7 +44,7 @@ namespace Lucene.Net.Document
         {
             FieldType ft = new FieldType();
             ft.Stored = true;
-            IndexableField binaryFldStored = new StoredField("binaryStored", (sbyte[])(Array)System.Text.UTF8Encoding.UTF8.GetBytes(BinaryValStored));
+            IndexableField binaryFldStored = new StoredField("binaryStored", System.Text.UTF8Encoding.UTF8.GetBytes(BinaryValStored));
             IndexableField stringFldStored = new Field("stringStored", BinaryValStored, ft);
 
             Documents.Document doc = new Documents.Document();
@@ -92,8 +92,8 @@ namespace Lucene.Net.Document
         [Test]
         public virtual void TestCompressionTools()
         {
-            IndexableField binaryFldCompressed = new StoredField("binaryCompressed", (sbyte[])(Array)CompressionTools.Compress(BinaryValCompressed.GetBytes(Encoding.UTF8)));
-            IndexableField stringFldCompressed = new StoredField("stringCompressed", (sbyte[])(Array)CompressionTools.CompressString(BinaryValCompressed));
+            IndexableField binaryFldCompressed = new StoredField("binaryCompressed", CompressionTools.Compress(BinaryValCompressed.GetBytes(Encoding.UTF8)));
+            IndexableField stringFldCompressed = new StoredField("stringCompressed", CompressionTools.CompressString(BinaryValCompressed));
 
             var doc = new Documents.Document {binaryFldCompressed, stringFldCompressed};
 

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Document/TestDocument.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Document/TestDocument.cs b/src/Lucene.Net.Tests/core/Document/TestDocument.cs
index dba80cc..8d31bfc 100644
--- a/src/Lucene.Net.Tests/core/Document/TestDocument.cs
+++ b/src/Lucene.Net.Tests/core/Document/TestDocument.cs
@@ -376,7 +376,7 @@ namespace Lucene.Net.Document
             doc.Add(new Field("tokenized", "abc xyz", Field.Store.NO, Field.Index.ANALYZED));
             doc.Add(new Field("tokenized_reader", new StringReader("abc xyz")));
             doc.Add(new Field("tokenized_tokenstream", w.w.Analyzer.TokenStream("tokenized_tokenstream", new StringReader("abc xyz"))));
-            doc.Add(new Field("binary", new sbyte[10]));
+            doc.Add(new Field("binary", new byte[10]));
             doc.Add(new Field("tv", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
             doc.Add(new Field("tv_pos", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS));
             doc.Add(new Field("tv_off", "abc xyz", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_OFFSETS));

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/139ad812/src/Lucene.Net.Tests/core/Document/TestField.cs
----------------------------------------------------------------------
diff --git a/src/Lucene.Net.Tests/core/Document/TestField.cs b/src/Lucene.Net.Tests/core/Document/TestField.cs
index 45bc0b2..8a0bde7 100644
--- a/src/Lucene.Net.Tests/core/Document/TestField.cs
+++ b/src/Lucene.Net.Tests/core/Document/TestField.cs
@@ -453,7 +453,7 @@ namespace Lucene.Net.Document
         {
             try
             {
-                f.BytesValue = new BytesRef(new sbyte[] { 5, 5 });
+                f.BytesValue = new BytesRef(new byte[] { 5, 5 });
                 Assert.Fail();
             }
             catch (System.ArgumentException expected)


Mime
View raw message