Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
04c2733
WIP - matching files for rename
alansingfield Feb 8, 2020
8b0bc64
Doesn't regress, but does it work?
alansingfield Feb 8, 2020
ea91103
Renaming test working
alansingfield Feb 8, 2020
78c9777
Removed dead code
alansingfield Feb 8, 2020
960a52b
Store state in HashSet not dictionary. Removed serialization for now …
alansingfield Feb 8, 2020
b792d86
Refactoring to tidy up
alansingfield Feb 8, 2020
4f68c37
Added ability to see files moved from one directory to another.
alansingfield Feb 8, 2020
850c19e
Lifting FileSystemEntry through to the collection
alansingfield Feb 11, 2020
befd080
Hash directory and file names
alansingfield Feb 13, 2020
ef4d24e
Attempt to intern strings
alansingfield Feb 14, 2020
4150991
Reduce string length by cutting off root folder
alansingfield Feb 28, 2020
dabe842
PathRedux first trial version
alansingfield Feb 28, 2020
14616e6
Attempt to do chained lookup
alansingfield Mar 6, 2020
41fdd2c
Attempt on hash bucket storage
alansingfield Mar 7, 2020
95859bc
Created hashed character buffer
alansingfield Mar 7, 2020
7116d72
Got compressed hash buffer working
alansingfield Mar 7, 2020
67f3c79
Renamed to HashBucket
alansingfield Mar 10, 2020
bbceb2b
Removed RelativeDir thingy
alansingfield Mar 12, 2020
6b207bb
Removed use of StringIntern
alansingfield Mar 14, 2020
9dbd61d
Allow wraparound on hashbucket.
alansingfield Mar 14, 2020
00c535f
Comment changes
alansingfield Mar 15, 2020
cb1cb36
Working on rebuild of buffers
alansingfield Apr 6, 2020
67a5e60
Working on hashing ReadOnlySequence
alansingfield Apr 8, 2020
1786b62
Switched over from HashFunction to HashCode
alansingfield Apr 11, 2020
62e3f23
Removed 3x 64-bit version flags
alansingfield Apr 25, 2020
6206541
Improved comments slightly
alansingfield Apr 25, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions PathReduxTests/HashCodes/ControllableHashCode.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
using System;
using System.Collections.Generic;
using System.Text;
using YellowCounter.FileSystemState.HashCodes;

namespace PathReduxTests.HashCodes
{
public class ControllableHashCode : IHashCode
{
private StringBuilder stringBuilder = new StringBuilder();
private bool dead = false;

public void Add(char value)
{
stringBuilder.Append(value);
}

public int ToHashCode()
{
deadCheck();

string arg = stringBuilder.ToString();

// Use comma as delimiter between desired hash number and remaining text.
int commaPos = arg.IndexOf(',');

if(commaPos == -1)
throw new Exception($"{nameof(ControllableHashCode)} requires , in each string");

if(int.TryParse(arg.Substring(0, commaPos), out int result))
return result;

throw new Exception("Text before , must be an integer");
}

private void deadCheck()
{
if(dead)
throw new Exception("Cannot call ToHashCode() twice");

dead = true;
}
}
}
55 changes: 55 additions & 0 deletions PathReduxTests/HashCodes/DeterministicHashCode.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
using System;
using System.Collections.Generic;
using System.Text;
using YellowCounter.FileSystemState.HashCodes;

namespace PathReduxTests.HashCodes
{
// Want a deterministic hash function so our tests are repeatable.
// https://andrewlock.net/why-is-string-gethashcode-different-each-time-i-run-my-program-in-net-core/

public class DeterministicHashCode : IHashCode
{
private bool dead = false;
private bool odd = false;
private int hash1 = 352654597; //(5381 << 16) + 5381;
private int hash2 = 352654597;

public void Add(char value)
{
unchecked
{
if(!odd)
{
hash1 = ((hash1 << 5) + hash1) ^ value;

}
else
{
hash2 = ((hash2 << 5) + hash2) ^ value;

}
}

odd = !odd;
}

public int ToHashCode()
{
deadCheck();

unchecked
{
return hash1 + (hash2 * 1566083941);
}
}

private void deadCheck()
{
if(dead)
throw new Exception("Cannot call ToHashCode() twice");

dead = true;
}
}
}
109 changes: 109 additions & 0 deletions PathReduxTests/PathRedux/CharBufferTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Text;
using YellowCounter.FileSystemState.PathRedux;
using Shouldly;

namespace YellowCounter.FileSystemState.Tests.PathRedux
{
[TestClass]
public class CharBufferTests
{
[TestMethod]
public void CharBuffer1()
{
var charBuffer = new CharBuffer(100);

int idx1 = charBuffer.Store("Hello");
int idx2 = charBuffer.Store("World");

charBuffer.Retrieve(idx1).ToString().ShouldBe("Hello");
charBuffer.Retrieve(idx2).ToString().ShouldBe("World");
}

[TestMethod]
public void CharBuffer2()
{
var charBuffer = new CharBuffer(100);

int idx1 = charBuffer.Store("Hello");
int idx2 = charBuffer.Store("World");

charBuffer.Retrieve(new[] { idx1, idx2 }).ToString().ShouldBe("HelloWorld");
}

[TestMethod]
public void CharBufferRealloc()
{
var charBuffer = new CharBuffer(13);

int idx1 = charBuffer.Store("Hello");
int idx2 = charBuffer.Store("World");

var helloSpan = charBuffer.Retrieve(idx1);

var worldSpan = charBuffer.Retrieve(idx2);

charBuffer.Resize(25);

// These spans are still pointing at the old buffer - how does it avoid
// freeing up the memory?
helloSpan.ToString().ShouldBe("Hello");
worldSpan.ToString().ShouldBe("World");

var hello2Span = charBuffer.Retrieve(idx1);
var world2Span = charBuffer.Retrieve(idx2);

hello2Span.ToString().ShouldBe("Hello");
world2Span.ToString().ShouldBe("World");
}

[TestMethod]
public void CharBufferEnumerate()
{
var charBuffer = new CharBuffer(100);

int idx1 = charBuffer.Store("Hello");
int idx2 = charBuffer.Store("World");

var results = new List<string>();
foreach(var item in charBuffer)
{
results.Add(item.Span.ToString());
}

results.ShouldBe(new[] { "Hello", "World" });
}

[TestMethod]
public void CharBufferMaxCapacity()
{
// To store the text "Hello" without expanding, we need 5 chars for Hello,
// 1 char for the null terminator of Hello, and 1 char for the null terminator
// of the overall buffer.
var charBuffer = new CharBuffer(7);

int idx1 = charBuffer.Store("Hello");
idx1.ShouldNotBe(-1);
charBuffer.Capacity.ShouldBe(7);

charBuffer.Retrieve(idx1).ToString().ShouldBe("Hello");

int c = 0;
foreach(var itm in charBuffer)
{
if(c == 0)
{
itm.Pos.ShouldBe(0);
itm.Span.ToString().ShouldBe("Hello");
}
else
{
throw new Exception("Should only have one item");
}
c++;
}
}
}
}
106 changes: 106 additions & 0 deletions PathReduxTests/PathRedux/HashBucketTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Text;
using YellowCounter.FileSystemState.PathRedux;
using Shouldly;

namespace PathReduxTests.PathRedux
{
[TestClass]
public class HashBucketTests
{
[TestMethod]
public void HashBucketStoreRetrieve()
{
var m = new HashBucket(2, 2);

m.Store(0, 123456).ShouldBe(true);
m.Store(0, 765432).ShouldBe(true);

var result = m.Retrieve(0);

result.ToArray().ShouldBe(new[] { 123456, 765432 });
}

[TestMethod]
public void HashBucketStoreFlowpast()
{
var m = new HashBucket(2, 2);

m.Store(1, 123456).ShouldBe(true);
m.Store(1, 765432).ShouldBe(true);

var result = m.Retrieve(1);

result.ToArray().ShouldBe(new[] { 123456, 765432 });
}

[TestMethod]
public void HashBucketStoreZero()
{
var m = new HashBucket(2, 2);

// It can store a zero
m.Store(0, 0).ShouldBe(true);

var result = m.Retrieve(0);
result.ToArray().ShouldBe(new[] { 0 });
}

[TestMethod]
public void HashBucketChainLimit()
{
var m = new HashBucket(8, 2);

m.Store(0, 100).ShouldBe(true);
m.Store(0, 200).ShouldBe(true);
m.Store(0, 300).ShouldBe(false);

var result = m.Retrieve(0);

result.ToArray().ShouldBe(new[] { 100, 200 });
}

[TestMethod]
public void HashBucketOverlap()
{
var m = new HashBucket(8, 8);

// The values are going to overlap.
m.Store(0, 100).ShouldBe(true);
m.Store(1, 200).ShouldBe(true);
m.Store(0, 300).ShouldBe(true);

var result = m.Retrieve(0);

result.ToArray().ShouldBe(new[] { 100, 200, 300 });
}

[TestMethod]
public void HashBucketOverlapLimited()
{
var m = new HashBucket(8, 2);

// If we set the max chain to a lower value then the overlap
// won't occur.
m.Store(0, 100).ShouldBe(true);
m.Store(1, 200).ShouldBe(true);
m.Store(0, 300).ShouldBe(false);

m.Retrieve(0).ToArray().ShouldBe(new[] { 100, 200 });
m.Retrieve(1).ToArray().ShouldBe(new[] { 200 });
}

[TestMethod]
public void HashBucketWraparound()
{
var m = new HashBucket(4, 2);

m.Store(3, 100).ShouldBe(true);
m.Store(3, 200).ShouldBe(true);

m.Retrieve(3).ToArray().ShouldBe(new[] { 100, 200 });
}
}
}
Loading