Compare commits

...

22 Commits
0.5.0 ... main

Author SHA1 Message Date
f8c00da2b8 release: version 0.10.0 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m45s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m26s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m24s
Upload Python Package / deploy (push) Successful in 1m48s
2024-12-13 16:04:23 +01:00
d4ac62c592 feat: add support for custom token types and longs, ref: NOISSUE 2024-12-13 16:04:19 +01:00
0f533c2018 release: version 0.9.3 🚀
All checks were successful
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m39s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m39s
Upload Python Package / Create Release (push) Successful in 13s
Upload Python Package / deploy (push) Successful in 1m56s
2024-12-13 13:50:04 +01:00
2067fe06fc fix: remove duplicate TokenConverter definition, ref: NOISSUE 2024-12-13 13:50:01 +01:00
cc0f0a24d9 release: version 0.9.2 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m12s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m20s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m22s
Upload Python Package / deploy (push) Successful in 1m42s
2024-12-13 13:30:50 +01:00
c41d665ab8 fix: more bugfixes, ref: NOISSUE 2024-12-13 13:30:46 +01:00
2fbdafa0e9 release: version 0.9.1 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m45s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m18s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m16s
Upload Python Package / deploy (push) Successful in 1m50s
2024-12-13 13:23:06 +01:00
f942954678 fix: fix build issues, ref: NOISSUE 2024-12-13 13:23:02 +01:00
4c67e8efb0 release: version 0.9.0 🚀
Some checks failed
SonarQube Scan / SonarQube Trigger (push) Successful in 2m22s
Upload Python Package / Create Release (push) Successful in 2m16s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Failing after 2m23s
Upload Python Package / deploy (push) Failing after 1m55s
2024-12-13 13:13:50 +01:00
e83e99758a feat: add filter option to TokenConverter, ref: NOISSUE 2024-12-13 13:13:43 +01:00
81ac797b4c release: version 0.8.0 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m39s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m15s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m18s
Upload Python Package / deploy (push) Successful in 1m42s
2024-12-12 20:03:14 +01:00
e9aa60524c feat: adding sensible index constructors refs: NOISSUE 2024-12-12 20:03:07 +01:00
7e5ab9f799 release: version 0.7.2 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m45s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m7s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m8s
Upload Python Package / deploy (push) Successful in 1m43s
2024-12-06 00:33:43 +01:00
fc137ebd03 fix: add some missing API methods, ref: NOISSUE 2024-12-06 00:33:37 +01:00
1b2e9ad1ee release: version 0.7.1 🚀
All checks were successful
SonarQube Scan / SonarQube Trigger (push) Successful in 2m8s
Upload Python Package / Create Release (push) Successful in 2m7s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m9s
Upload Python Package / deploy (push) Successful in 1m41s
2024-12-06 00:03:23 +01:00
550c8280a6 fix: allow for parsing single chars as input, ref: NOISSUE 2024-12-05 23:58:11 +01:00
Simon Diesenreiter
b261773b13 release: version 0.7.0 🚀
Some checks failed
Upload Python Package / Create Release (push) Failing after 3s
Upload Python Package / deploy (push) Has been skipped
SonarQube Scan / SonarQube Trigger (push) Successful in 1m55s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 1m55s
2024-12-05 23:36:29 +01:00
Simon Diesenreiter
85d94eca2f feat: implement generic data set manipulator, ref: NOISSUE 2024-12-05 23:36:23 +01:00
2ed103abbf release: version 0.6.0 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m12s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m8s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m10s
Upload Python Package / deploy (push) Successful in 1m40s
2024-12-03 23:09:15 +01:00
fab5d2eee7 feat: enable named literals, ref: NOISSUE 2024-12-03 23:09:08 +01:00
7be09140e2 release: version 0.5.1 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m14s
SonarQube Scan / SonarQube Trigger (push) Successful in 1m57s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m2s
Upload Python Package / deploy (push) Successful in 1m33s
2024-12-03 21:32:05 +01:00
09bbba1293 fix: some bugfixes with fragment parser logic, ref: NOISSUE 2024-12-03 21:32:02 +01:00
34 changed files with 1120 additions and 119 deletions

View File

@ -17,23 +17,7 @@ on:
workflow_dispatch:
jobs:
linter:
strategy:
fail-fast: false
matrix:
dotnet-version: [9.0.X]
os: [ubuntu-latest]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-dotnet@v4
with:
dotnet-version: ${{ matrix.dotnet-version }}
- name: Run linter
run: make lint
tests_linux:
needs: linter
strategy:
fail-fast: false
matrix:

View File

@ -4,6 +4,104 @@ Changelog
(unreleased)
------------
- Feat: add support for custom token types and longs, ref: NOISSUE.
[Simon Diesenreiter]
0.9.3 (2024-12-13)
------------------
Fix
~~~
- Remove duplicate TokenConverter definition, ref: NOISSUE. [Simon
Diesenreiter]
Other
~~~~~
0.9.2 (2024-12-13)
------------------
Fix
~~~
- More bugfixes, ref: NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.1 (2024-12-13)
------------------
Fix
~~~
- Fix build issues, ref: NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.0 (2024-12-13)
------------------
- Feat: add filter option to TokenConverter, ref: NOISSUE. [Simon
Diesenreiter]
0.8.0 (2024-12-12)
------------------
- Feat: adding sensible index constructors refs: NOISSUE. [Simon
Diesenreiter]
0.7.2 (2024-12-05)
------------------
Fix
~~~
- Add some missing API methods, ref: NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.7.1 (2024-12-05)
------------------
Fix
~~~
- Allow for parsing single chars as input, ref: NOISSUE. [Simon
Diesenreiter]
Other
~~~~~
0.7.0 (2024-12-05)
------------------
- Feat: implement generic data set manipulator, ref: NOISSUE. [Simon
Diesenreiter]
0.6.0 (2024-12-03)
------------------
- Feat: enable named literals, ref: NOISSUE. [Simon Diesenreiter]
0.5.1 (2024-12-03)
------------------
Fix
~~~
- Some bugfixes with fragment parser logic, ref: NOISSUE. [Simon
Diesenreiter]
Other
~~~~~
0.5.0 (2024-12-03)
------------------
Fix
~~~

View File

@ -1,8 +1,8 @@
namespace TextParser.Tests;
using Parsing;
using Parsing.Data;
using Parsing.Schema;
using Parsing.Schema.BuildingBlocks;
using Parsing.Tokenization;
public class TextParserTests
@ -18,8 +18,22 @@ public class TextParserTests
private const string testInput5 = @"asdfnums(2,5,3)ght
cv strs(test) jh 4,3,2
34,54,2nums(2,8) strs(aa,ab,ba,bb)aa,bb";
private const string testInput6 = @"adfdf1()324ddf3()svsdvs
davnsldkvjs2()m23423()
mcsodkcn owdjnfj 1() asdfnad 23234 2() sdvsdv";
private const string testInput7 = @"adfdf1()324ddf3()()()svsdvs
davnsldkvjs2()()m23423()()()
mcsodkcn owdjnfj 1() asdfnad 23234 2()() sdvsdv";
private const string testInput8 = @"2 4 6 4 1 3 5 4 7 2 4 6 8 3";
private const string testInput9 = @"2 4 6 4 1
3 5 4 7 6
4 6 8 3 9";
private const string testInput10 = @"abc
bca
cab";
private const string testInput11 = @"2 x y 4 x y 6 x y 4 x y 1 x y";
[Fact]
public void LineParser_TestSimpleRepetition()
@ -230,7 +244,7 @@ public class TextParserTests
.EndOptions()
.Build();
var parser = new TextParser<FragmentSchemaContext>(schema);
var parser = TextParser.Create(schema);
var fragmentData = parser
.SetInputText(testInput5)
.Parse()
@ -257,4 +271,176 @@ public class TextParserTests
Assert.Equal(12, convertedData[2]);
Assert.Equal(4, convertedData[3]);
}
[Fact]
public void FragmentParser_LiteralTest()
{
var schemaBuilder = new FragmentSchemaBuilder();
var schema = schemaBuilder
.StartOptions()
.Option()
.Expect("1()", "option1")
.Option()
.Expect("2()", "option2")
.Option()
.Expect("3()", "option3")
.EndOptions()
.Build();
var parser = TextParser.Create(schema);
var fragmentData = parser
.SetInputText(testInput6)
.Parse()
.AsFragments();
var convertedData = fragmentData
.ConvertAll((Fragment f) =>
{
bool saw1 = f.ContainsKey("option1") ? f["option1"].Count > 0 : false;
bool saw2 = f.ContainsKey("option2") ? f["option2"].Count() > 0 : false;
bool saw3 = f.ContainsKey("option3") ? f["option3"].Count() > 0 : false;
int indicator = 0;
if (saw1)
{
indicator += 1;
}
if (saw2)
{
indicator += 2;
}
if (saw3)
{
indicator += 4;
}
return indicator;
});
Assert.Equal(6, convertedData.Count);
Assert.Equal(1, convertedData[0]);
Assert.Equal(4, convertedData[1]);
Assert.Equal(2, convertedData[2]);
Assert.Equal(4, convertedData[3]);
Assert.Equal(1, convertedData[4]);
Assert.Equal(2, convertedData[5]);
}
[Fact]
public void DataManipulator_SimpleOneDimensionalTest()
{
var schemaBuilder = new InputSchemaBuilder();
var schema = schemaBuilder
.Repeat()
.Expect(InputType.Integer)
.EndRepetition()
.Build();
var parser = new TextParser<InputSchemaContext>(schema);
var row = parser
.SetInputText(testInput8)
.Parse()
.AsSingleStream<int>();
var searchSequence = new List<int> { 4, 6 };
var manipulator = DefaultOneDimensionalManipulator.Create(row);
var searchResults = manipulator.FindInSet(searchSequence);
Assert.Equal(3, searchResults.Count);
Assert.Equal(1, searchResults[0].DataIndex.GetIndices()[0]);
Assert.Equal(3, searchResults[1].DataIndex.GetIndices()[0]);
Assert.Equal(10, searchResults[2].DataIndex.GetIndices()[0]);
Assert.Equal(Direction.Forward, searchResults[0].Direction);
Assert.Equal(Direction.Backward, searchResults[1].Direction);
Assert.Equal(Direction.Forward, searchResults[2].Direction);
}
[Fact]
public void DataManipulator_SimpleTwoDimensionalTest()
{
var schemaBuilder = new InputSchemaBuilder();
var schema = schemaBuilder
.Repeat()
.Expect(InputType.Integer)
.EndRepetition()
.Build();
var parser = new TextParser<InputSchemaContext>(schema);
var row = parser
.SetInputText(testInput9)
.Parse()
.AsListRows<int>();
var searchSequence = new List<int> { 4, 6 };
var manipulator = DefaultTwoDimensionalManipulator.Create(row);
var searchResults = manipulator.FindInSet(searchSequence);
Assert.Equal(6, searchResults.Count);
Assert.Equal(0, searchResults[0].DataIndex.GetIndices()[0]);
Assert.Equal(0, searchResults[0].DataIndex.GetIndices()[1]);
Assert.Equal(2, searchResults[1].DataIndex.GetIndices()[0]);
Assert.Equal(1, searchResults[1].DataIndex.GetIndices()[1]);
Assert.Equal(2, searchResults[2].DataIndex.GetIndices()[0]);
Assert.Equal(1, searchResults[2].DataIndex.GetIndices()[1]);
Assert.Equal(1, searchResults[3].DataIndex.GetIndices()[0]);
Assert.Equal(2, searchResults[3].DataIndex.GetIndices()[1]);
Assert.Equal(3, searchResults[4].DataIndex.GetIndices()[0]);
Assert.Equal(2, searchResults[4].DataIndex.GetIndices()[1]);
Assert.Equal(3, searchResults[5].DataIndex.GetIndices()[0]);
Assert.Equal(2, searchResults[5].DataIndex.GetIndices()[1]);
Assert.Equal(Direction.E, searchResults[0].Direction);
Assert.Equal(Direction.N, searchResults[1].Direction);
Assert.Equal(Direction.SW, searchResults[2].Direction);
Assert.Equal(Direction.E, searchResults[3].Direction);
Assert.Equal(Direction.SE, searchResults[4].Direction);
Assert.Equal(Direction.W, searchResults[5].Direction);
}
[Fact]
public void TextParser_TestReadingChars()
{
var schemaBuilder = new InputSchemaBuilder();
var schema = schemaBuilder
.Repeat()
.Expect(InputType.Char)
.EndRepetition()
.Build();
var parser = new TextParser<InputSchemaContext>(schema);
var row = parser
.SetInputText(testInput10)
.Parse()
.AsListRows<string>();
Assert.Equal(3, row.Count);
Assert.Equal("a", row[0][0]);
Assert.Equal(3, row[0].Count);
Assert.Equal(3, row[1].Count);
Assert.Equal(3, row[2].Count);
}
[Fact]
public void TextParser_TestFilter()
{
var schemaBuilder = new InputSchemaBuilder();
var schema = schemaBuilder
.Repeat()
.Expect(InputType.Integer)
.Expect(InputType.Char)
.Expect(InputType.Char)
.EndRepetition()
.Build();
var parser = new TextParser<InputSchemaContext>(schema);
var numbers = parser
.SetInputText(testInput11)
.Parse()
.Filter(InputType.Integer)
.AsSingleStream<int>();
Assert.Equal(5, numbers.Count);
Assert.Equal(2, numbers[0]);
Assert.Equal(4, numbers[1]);
Assert.Equal(6, numbers[2]);
Assert.Equal(4, numbers[3]);
Assert.Equal(1, numbers[4]);
}
}

View File

@ -0,0 +1,49 @@
namespace Parsing.Data;
using Parsing;
using Parsing.Tokenization;
public static class DataConversionHelpers
{
public static List<TNewType> ConvertData<TTokenType, TNewType, TOldType>(this List<IToken> tokenList, Func<TOldType, TNewType> converter) where TTokenType : IValueToken<TOldType>
{
var newList = new List<TNewType>();
foreach (var token in tokenList)
{
var typedToken = token as IValueToken<TOldType>;
if (typedToken == null)
{
throw new Exception("Invalid Token type encountered during value conversion");
}
newList.Add(converter(typedToken.GetValue()));
}
return newList;
}
public static List<TNewType> ConvertData<TTokenType, TNewType, TOldType>(this List<IToken> tokenList, Func<TOldType, List<TNewType>> converter) where TTokenType : IValueToken<TOldType>
{
var newList = new List<TNewType>();
foreach (var token in tokenList)
{
var typedToken = token as IValueToken<TOldType>;
if (typedToken == null)
{
throw new Exception("Invalid Token type encountered during value conversion");
}
newList.AddRange(converter(typedToken.GetValue()));
}
return newList;
}
public static List<List<TNewType>> ConvertData<TTokenType, TNewType, TOldType>(this List<List<IToken>> tokenListList, Func<TOldType, TNewType> converter) where TTokenType : IValueToken<TOldType>
{
var newListList = new List<List<TNewType>>();
foreach (var tokenList in tokenListList)
{
newListList.Add(tokenList.ConvertData<TTokenType, TNewType, TOldType>(converter));
}
return newListList;
}
}

View File

@ -0,0 +1,43 @@
namespace Parsing.Data;
public static class DataManipulationHelpers
{
public static TType ReduceData<TType>(this List<TType> data, Func<TType, TType, TType> reducer)
{
if (data.Count < 2)
{
return data[0];
}
TType result = data[0];
for (int i = 1; i < data.Count; i++)
{
result = reducer(result, data[i]);
}
return result;
}
public static TType ReduceData<TType>(this List<TType> data, Func<List<TType>, TType> reducer)
{
return reducer(data);
}
public static List<TNewType> TransformData<TType, TNewType>(this List<TType> data, Func<TType, TNewType> transformer)
{
var newList = new List<TNewType>();
foreach (TType dataItem in data)
{
newList.Add(transformer(dataItem));
}
return newList;
}
public static List<TNewType> TransformData<TType, TNewType>(this List<List<TType>> data, Func<List<TType>, TNewType> transformer)
{
var newList = new List<TNewType>();
foreach (List<TType> dataItemList in data)
{
newList.Add(transformer(dataItemList));
}
return newList;
}
}

View File

@ -0,0 +1,154 @@
using Parsing.Data;
namespace Parsing.Data;
public class SearchResult<TIndexType>
{
public SearchResult(IDataIndex<TIndexType> dataIndex)
{
this.DataIndex = dataIndex;
}
public IDataIndex<TIndexType>? DataIndex { get; set; }
}
public class DirectionalSearchResult<TIndexType> : SearchResult<TIndexType>
{
public DirectionalSearchResult(IDataIndex<TIndexType> dataIndex, Direction direction, int length): base(dataIndex)
{
this.Direction = direction;
this.Length = length;
}
public Direction Direction { get; set; }
public int Length { get; set; }
}
public abstract class DataSetManipulatorBase<TCollectedType, TDataType, TIndexType> where TDataType : IEquatable<TDataType>
{
protected IDataSetIndexer<TDataType, TIndexType> indexer;
protected List<TCollectedType> dataSet;
public DataSetManipulatorBase(List<TCollectedType> dataSet, IDataSetIndexer<TDataType, TIndexType> indexer)
{
this.indexer = indexer;
this.dataSet = dataSet;
}
// we do not know how to iterate a specific data set exactly, the implementation has to take care of validating directional input
protected abstract Direction ValidDirections();
protected void ValidateDirection(Direction d)
{
var allValidDirections = this.ValidDirections();
var isValid = ((d | allValidDirections) == allValidDirections) && ((d & allValidDirections) > 0);
if (!isValid)
{
throw new ArgumentException("Invalid search direction provided for given data set!");
}
}
protected List<Direction> SimplifyDirections(Direction d)
{
this.ValidateDirection(d);
var allDirections = DirectionProvider.GetAllDirections();
var singleDirections = new List<Direction>();
foreach (Direction direction in allDirections)
{
if ((direction & d) > 0)
{
singleDirections.Add(direction);
}
}
return singleDirections;
}
public List<Direction> GetValidDirectionList(Direction d)
{
return SimplifyDirections(this.ValidDirections());
}
// we do not know how to iterate a specific data set exactly, the implementation has to take care of ending traversal in any direction
public abstract bool IsValidIndex(IDataIndex<TIndexType> queryPosition);
// we do not know how to iterate a specific data set exactly, the implementation has to take care of traversing the set
public abstract IDataIndex<TIndexType> Move(IDataIndex<TIndexType> currentPosition, Direction direction);
public List<IDataIndex<TIndexType>> GetNeighborIndices(IDataIndex<TIndexType> currentPosition, Direction directions)
{
var singleDirections = this.SimplifyDirections(directions);
var neighbors = new List<IDataIndex<TIndexType>>();
foreach (var direction in singleDirections)
{
var newPosition = this.Move(currentPosition, direction);
if (this.IsValidIndex(newPosition))
{
neighbors.Add(newPosition);
}
}
return neighbors;
}
// we do not know how to iterate a specific data set exactly, but we only need to find specific items to be able to continue with any other algorithm
public abstract List<SearchResult<TIndexType>> FindInSet(TDataType data);
public List<DirectionalSearchResult<TIndexType>> FindAtPosition(IDataIndex<TIndexType> currentPosition, List<TDataType> data)
{
return this.FindAtPosition(currentPosition, data, this.ValidDirections());
}
public List<DirectionalSearchResult<TIndexType>> FindAtPosition(IDataIndex<TIndexType> currentPosition, List<TDataType> data, Direction directions)
{
var results = new List<DirectionalSearchResult<TIndexType>>();
var givenDirections = this.SimplifyDirections(directions);
if (EqualityComparer<TDataType>.Default.Equals(this.indexer.Get(this.dataSet, currentPosition), data[0]))
{
// found valid search start point, now validate each given direction
foreach (var direction in givenDirections)
{
int searchIndex = 1;
var searchPosition = this.Move(currentPosition, direction); ;
while (searchIndex < data.Count && this.IsValidIndex(searchPosition)
&& EqualityComparer<TDataType>.Default.Equals(this.indexer.Get(this.dataSet, searchPosition), data[searchIndex]))
{
searchPosition = this.Move(searchPosition, direction);
searchIndex++;
}
if (searchIndex == data.Count)
{
var result = new DirectionalSearchResult<TIndexType>(currentPosition, direction, searchIndex);
results.Add(result);
}
}
}
return results;
}
public List<DirectionalSearchResult<TIndexType>> FindInSet(List<TDataType> data, Direction directions)
{
var result = new List<DirectionalSearchResult<TIndexType>>();
// find valid starting points in set and perform search from there
var startingPoints = this.FindInSet(data[0]);
foreach (var startingPoint in startingPoints)
{
foreach (var results in this.FindAtPosition(startingPoint.DataIndex, data, directions))
{
result.AddRange(results);
}
}
return result;
}
public List<DirectionalSearchResult<TIndexType>> FindInSet(List<TDataType> data)
{
return this.FindInSet(data, this.ValidDirections());
}
}

View File

@ -0,0 +1,62 @@
public class DefaultDataSetIndexer<TDataType> : IDataSetIndexer<TDataType, int>
{
public TDataType Get<TGenericCollectionContentType>(List<TGenericCollectionContentType> collection, IDataIndex<int> index)
{
var indices = index.GetIndices();
return this.GetInternal(collection, indices.ToArray());
}
private TDataType GetInternal<TGenericCollectionContentType>(List<TGenericCollectionContentType> collection, int[] indices)
{
if (indices.Length == 3)
{
return this.GetAtIndex((collection as List<List<List<TDataType>>>), indices[0], indices[1], indices[2]);
}
else if (indices.Length == 2)
{
return this.GetAtIndex((collection as List<List<TDataType>>), indices[0], indices[1]);
}
else if (indices.Length == 1)
{
return this.GetAtIndex((collection as List<TDataType>), indices[0]);
}
else
{
throw new ArgumentException("Invalid Data Set access!");
}
}
public TDataType Get<TGenericCollectionContentType>(List<TGenericCollectionContentType> collection, params int[] indices)
{
return this.GetInternal(collection, indices);
}
public TDataType GetAtIndex(List<TDataType> collection, int index)
{
if (collection == null)
{
throw new ArgumentException("Invalid data set provided for access");
}
return collection[index];
}
public TDataType GetAtIndex(List<List<TDataType>> collection, int x, int y)
{
if (collection == null)
{
throw new ArgumentException("Invalid data set provided for access");
}
return collection[collection.Count - y - 1][x];
}
public TDataType GetAtIndex(List<List<List<TDataType>>> collection, int x, int y, int z)
{
if (collection == null)
{
throw new ArgumentException("Invalid data set provided for access");
}
return collection[z][y][x];
}
}

View File

@ -0,0 +1,59 @@
using System.Runtime.InteropServices;
using Parsing.Data;
namespace Parsing.Data;
public static class DefaultOneDimensionalManipulator
{
public static DefaultOneDimensionalManipulator<TDataType> Create<TDataType>(List<TDataType> dataSet) where TDataType : IEquatable<TDataType>
{
return new DefaultOneDimensionalManipulator<TDataType>(dataSet);
}
}
public class DefaultOneDimensionalManipulator<TDataType> : DataSetManipulatorBase<TDataType, TDataType, int> where TDataType : IEquatable<TDataType>
{
public DefaultOneDimensionalManipulator(List<TDataType> dataSet) : base(dataSet, new DefaultDataSetIndexer<TDataType>())
{
}
protected override Direction ValidDirections()
{
return (Direction.Left | Direction.Right);
}
public override bool IsValidIndex(IDataIndex<int> queryPosition)
{
var index = queryPosition.GetIndices()[0];
return (index >= 0) && (index < this.dataSet.Count);
}
public override IDataIndex<int> Move(IDataIndex<int> currentPosition, Direction direction)
{
switch (direction)
{
case Direction.Forward:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] + 1);
case Direction.Backward:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] - 1);
default:
throw new ArgumentOutOfRangeException("Direction was not accounted for move for current data set!");
}
}
public override List<SearchResult<int>> FindInSet(TDataType data)
{
var results = new List<SearchResult<int>>();
for (int i = 0; i < this.dataSet.Count; i++)
{
if (EqualityComparer<TDataType>.Default.Equals(this.dataSet[i], data))
{
var singleResult = new SearchResult<int>(new DefaultPositionalDataIndex(i));
results.Add(singleResult);
}
}
return results;
}
}

View File

@ -0,0 +1,14 @@
public class DefaultPositionalDataIndex : IDataIndex<int>
{
private List<int> indices = new List<int>();
public DefaultPositionalDataIndex(params int[] indices)
{
this.indices.AddRange(indices);
}
public IList<int> GetIndices()
{
return indices;
}
}

View File

@ -0,0 +1,82 @@
using System.Runtime.InteropServices;
using Parsing.Data;
namespace Parsing.Data;
public static class DefaultTwoDimensionalManipulator
{
public static DefaultTwoDimensionalManipulator<TDataType> Create<TDataType>(List<List<TDataType>> dataSet) where TDataType : IEquatable<TDataType>
{
return new DefaultTwoDimensionalManipulator<TDataType>(dataSet);
}
}
public class DefaultTwoDimensionalManipulator<TDataType> : DataSetManipulatorBase<List<TDataType>, TDataType, int> where TDataType : IEquatable<TDataType>
{
public DefaultTwoDimensionalManipulator(List<List<TDataType>> dataSet) : base(dataSet, new DefaultDataSetIndexer<TDataType>())
{
}
protected override Direction ValidDirections()
{
return (Direction.N
| Direction.NE
| Direction.E
| Direction.SE
| Direction.S
| Direction.SW
| Direction.W
| Direction.NW);
}
public override bool IsValidIndex(IDataIndex<int> queryPosition)
{
var xIndex = queryPosition.GetIndices()[0];
var yIndex = queryPosition.GetIndices()[1];
return (yIndex >= 0) && (yIndex < this.dataSet.Count) && (xIndex >= 0) && (xIndex < this.dataSet[yIndex].Count);
}
public override IDataIndex<int> Move(IDataIndex<int> currentPosition, Direction direction)
{
switch (direction)
{
case Direction.N:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0], currentPosition.GetIndices()[1] + 1);
case Direction.NE:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] + 1, currentPosition.GetIndices()[1] + 1);
case Direction.E:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] + 1, currentPosition.GetIndices()[1]);
case Direction.SE:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] + 1, currentPosition.GetIndices()[1] - 1);
case Direction.S:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0], currentPosition.GetIndices()[1] - 1);
case Direction.SW:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] - 1, currentPosition.GetIndices()[1] - 1);
case Direction.W:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] - 1, currentPosition.GetIndices()[1]);
case Direction.NW:
return new DefaultPositionalDataIndex(currentPosition.GetIndices()[0] - 1, currentPosition.GetIndices()[1] + 1);
default:
throw new ArgumentOutOfRangeException("Direction was not accounted for move for current data set!");
}
}
public override List<SearchResult<int>> FindInSet(TDataType data)
{
var results = new List<SearchResult<int>>();
for (int y = 0; y < this.dataSet.Count; y++)
{
for (int x = 0; x < this.dataSet[this.dataSet.Count - y - 1].Count; x++)
{
if (EqualityComparer<TDataType>.Default.Equals(this.dataSet[this.dataSet.Count - y - 1][x], data))
{
var singleResult = new SearchResult<int>(new DefaultPositionalDataIndex(x, y));
results.Add(singleResult);
}
}
}
return results;
}
}

View File

@ -0,0 +1,43 @@
[Flags]
public enum Direction
{
N = 1,
NE = 2,
E = 4,
SE = 8,
S = 16,
SW = 32,
W = 64,
NW = 128,
Horizontal = E | W,
Vertical = N | S,
Cardinal = Horizontal | Vertical,
RisingDiagonal = NE | SW,
FallingDiagonal = NW | SE,
Diagonal = RisingDiagonal | FallingDiagonal,
All = Cardinal | Diagonal,
Left = W,
Right = E,
Up = N,
Down = S,
Forward = Right,
Backward = Left
}
public static class DirectionProvider
{
public static Direction[] GetAllDirections()
{
var directions = new Direction[] {
Direction.N,
Direction.NE,
Direction.E,
Direction.SE,
Direction.S,
Direction.SW,
Direction.W,
Direction.NW
};
return directions;
}
}

View File

@ -0,0 +1,4 @@
public interface IDataIndex<TIndexType>
{
public IList<TIndexType> GetIndices();
}

View File

@ -0,0 +1,10 @@
public interface IDataSetIndexer<TDataType, TIndexType>
{
public TDataType Get<TGenericCollectionContentType>(List<TGenericCollectionContentType> collection, IDataIndex<TIndexType> index);
public TDataType Get<TGenericCollectionContentType>(List<TGenericCollectionContentType> collection, params TIndexType[] indices);
public TDataType GetAtIndex(List<TDataType> collection, TIndexType index);
public TDataType GetAtIndex(List<List<TDataType>> collection, TIndexType x, TIndexType y);
public TDataType GetAtIndex(List<List<List<TDataType>>> collection, TIndexType x, TIndexType y, TIndexType z);
}

View File

View File

@ -1,77 +1,10 @@
namespace Parsing;
namespace Parsing.Data;
using System;
using System.Collections.Generic;
using Parsing.Schema;
using Parsing.Tokenization;
public static class DataConversionHelpers
{
public static List<TNewType> ConvertData<TTokenType, TNewType, TOldType>(this List<IToken> tokenList, Func<TOldType, TNewType> converter) where TTokenType : IValueToken<TOldType>
{
var newList = new List<TNewType>();
foreach (var token in tokenList)
{
var typedToken = token as IValueToken<TOldType>;
if (typedToken == null)
{
throw new Exception("Invalid Token type encountered during value conversion");
}
newList.Add(converter(typedToken.GetValue()));
}
return newList;
}
public static List<TNewType> ConvertData<TTokenType, TNewType, TOldType>(this List<IToken> tokenList, Func<TOldType, List<TNewType>> converter) where TTokenType : IValueToken<TOldType>
{
var newList = new List<TNewType>();
foreach (var token in tokenList)
{
var typedToken = token as IValueToken<TOldType>;
if (typedToken == null)
{
throw new Exception("Invalid Token type encountered during value conversion");
}
newList.AddRange(converter(typedToken.GetValue()));
}
return newList;
}
public static List<List<TNewType>> ConvertData<TTokenType, TNewType, TOldType>(this List<List<IToken>> tokenListList, Func<TOldType, TNewType> converter) where TTokenType : IValueToken<TOldType>
{
var newListList = new List<List<TNewType>>();
foreach (var tokenList in tokenListList)
{
newListList.Add(tokenList.ConvertData<TTokenType, TNewType, TOldType>(converter));
}
return newListList;
}
}
public static class DataManipulationHelpers
{
public static TType ReduceData<TType>(this List<TType> data, Func<TType, TType, TType> reducer)
{
if (data.Count < 2)
{
return data[0] ?? default(TType);
}
TType result = data[0];
for (int i = 1; i < data.Count; i++)
{
result = reducer(result, data[i]);
}
return result;
}
public static TType ReduceData<TType>(this List<TType> data, Func<List<TType>, TType> reducer)
{
return reducer(data);
}
}
public class TokenConverter
{
protected List<List<IToken>> rawTokens = new List<List<IToken>>();
@ -80,7 +13,7 @@ public class TokenConverter
{
}
private List<T> AsGenericCollection<T, U>() where T : ICollection<U>, new()
private List<T> AsGenericCollection<T, U>() where T : List<U>, new()
{
List<T> returnData = new List<T>();
foreach (var tokenRow in this.rawTokens)
@ -92,11 +25,15 @@ public class TokenConverter
{
throw new Exception("No token was provided, but token was expected!");
}
IValueToken<U>? valueToken = token as IValueToken<U>;
if (valueToken == null)
if (!token.GetType().IsAssignableTo(typeof(IValueToken<U>)))
{
throw new Exception("Provided token is not a ValueToken");
Console.WriteLine(token.GetText());
Type t = token.GetType();
throw new Exception("Provided token is not a ValueToken - type: " + t.ToString());
}
IValueToken<U> valueToken = token as IValueToken<U>;
newRow.Add(valueToken.GetValue());
}
@ -223,4 +160,26 @@ public class TokenConverter
return newList;
}
public TokenConverter Filter(params InputType[] inputTypes)
{
var newTokenListList = new List<List<IToken>>();
foreach(var tokenList in this.rawTokens)
{
var newTokenList = new List<IToken>();
foreach(var token in tokenList)
{
if(inputTypes.Contains(token.GetInputType()))
{
newTokenList.Add(token);
}
}
newTokenListList.Add(newTokenList);
}
this.rawTokens = newTokenListList;
return this;
}
}

View File

@ -4,10 +4,13 @@
public enum BlockType
{
Integer = 1,
String = 2,
Char = 2,
String = 4,
// technically not a block type but keeping it here for consistency/having all types in one place
Fragment = 4,
FixedRepetition = 8,
GreedyRepetition = 16,
NonZeroRepetition = 32,
Fragment = 8,
FixedRepetition = 16,
GreedyRepetition = 32,
NonZeroRepetition = 64,
Custom = 128,
Long = 256,
}

View File

@ -8,7 +8,7 @@ abstract class BuildingBlockBase : IBuildingBlock
{
}
public abstract IToken ParseWord(InputProvider inputs);
public abstract List<IToken> ParseWord(InputProvider inputs);
public abstract bool CanParseWord(InputProvider inputs);

View File

@ -0,0 +1,49 @@
namespace Parsing.Schema.BuildingBlocks;
using Parsing.Tokenization;
class CharBlock : BuildingBlockBase
{
public CharBlock()
{
}
public override List<IToken> ParseWord(InputProvider inputs)
{
var tokenList = new List<IToken>();
foreach (char c in inputs.YieldWord())
{
tokenList.Add(new StringToken(c.ToString()));
}
return tokenList;
}
public override bool CanParseWord(InputProvider inputs)
{
string word = string.Empty;
using (inputs.GetLookaheadContext())
{
word = inputs.YieldWord();
}
return this.CanParseWord(word);
}
public override bool CanParseWord(string word)
{
// Here we need to ensure we are not matching any non-string tokens, since string can match pretty much anything
IntegerBlock intBlock = new IntegerBlock();
if (intBlock.CanParseWord(word))
{
return false;
}
return true;
}
public override BlockType GetBlockType()
{
return BlockType.String;
}
}

View File

@ -0,0 +1,42 @@
namespace Parsing.Schema.BuildingBlocks;
using Parsing.Tokenization;
class CustomInputBlock<T> : BuildingBlockBase
{
private InputType definedInputType;
private Func<string, T> wordConverter;
public CustomInputBlock(InputType definedInputType, Func<string, T> wordConverter)
{
this.definedInputType = definedInputType;
this.wordConverter = wordConverter;
}
public override List<IToken> ParseWord(InputProvider inputs)
{
return new List<IToken>() { new CustomToken<T>(inputs.YieldWord(), this.definedInputType, this.wordConverter) };
}
public override bool CanParseWord(InputProvider inputs)
{
string word = string.Empty;
using (inputs.GetLookaheadContext())
{
word = inputs.YieldWord();
}
return this.CanParseWord(word);
}
public override bool CanParseWord(string word)
{
return true;
}
public override BlockType GetBlockType()
{
return BlockType.Custom;
}
}

View File

@ -20,7 +20,7 @@ class FixedRepetitionBlock : BuildingBlockBase
this.context = this.inputSchema.CreateContext();
}
public override IToken ParseWord(InputProvider inputs)
public override List<IToken> ParseWord(InputProvider inputs)
{
var result = inputSchema.ProcessNextWord(context, inputs);
if (context.HasFinished)
@ -31,7 +31,7 @@ class FixedRepetitionBlock : BuildingBlockBase
this.context = this.inputSchema.CreateContext();
}
}
return result.SingleOrDefault();
return result;
}
public override bool CanParseWord(InputProvider inputs)

View File

@ -15,14 +15,14 @@ class GreedyRepetitionBlock : BuildingBlockBase
this.context = this.inputSchema.CreateContext();
}
public override IToken ParseWord(InputProvider inputs)
public override List<IToken> ParseWord(InputProvider inputs)
{
var result = inputSchema.ProcessNextWord(context, inputs);
if (!this.CanParseWord(inputs))
{
this.context = this.inputSchema.CreateContext();
}
return result.SingleOrDefault();
return result;
}
public override bool CanParseWord(InputProvider inputs)

View File

@ -4,7 +4,7 @@ using Parsing.Tokenization;
public interface IBuildingBlock
{
public IToken ParseWord(InputProvider inputs);
public List<IToken> ParseWord(InputProvider inputs);
public bool CanParseWord(InputProvider inputs);

View File

@ -9,9 +9,9 @@ class IntegerBlock : BuildingBlockBase
{
}
public override IToken ParseWord(InputProvider inputs)
public override List<IToken> ParseWord(InputProvider inputs)
{
return new IntegerToken(inputs.YieldWord());
return new List<IToken>() { new IntegerToken(inputs.YieldWord()) };
}
public override bool CanParseWord(InputProvider inputs)

View File

@ -0,0 +1,35 @@
namespace Parsing.Schema.BuildingBlocks;
using Parsing.Tokenization;
class LongBlock : BuildingBlockBase
{
public LongBlock()
{
}
public override List<IToken> ParseWord(InputProvider inputs)
{
return new List<IToken>() { new LongToken(inputs.YieldWord()) };
}
public override bool CanParseWord(InputProvider inputs)
{
using (inputs.GetLookaheadContext())
{
return this.CanParseWord(inputs.YieldWord());
}
}
public override bool CanParseWord(string word)
{
long number = 0;
return long.TryParse(word, out number);
}
public override BlockType GetBlockType()
{
return BlockType.Long;
}
}

View File

@ -9,9 +9,9 @@ class StringBlock : BuildingBlockBase
{
}
public override IToken ParseWord(InputProvider inputs)
public override List<IToken> ParseWord(InputProvider inputs)
{
return new StringToken(inputs.YieldWord());
return new List<IToken>() { new StringToken(inputs.YieldWord()) };
}
public override bool CanParseWord(InputProvider inputs)
@ -28,6 +28,12 @@ class StringBlock : BuildingBlockBase
public override bool CanParseWord(string word)
{
// Here we need to ensure we are not matching any non-string tokens, since string can match pretty much anything
LongBlock longBlock = new LongBlock();
if (longBlock.CanParseWord(word))
{
return false;
}
IntegerBlock intBlock = new IntegerBlock();
if (intBlock.CanParseWord(word))
{

View File

@ -16,11 +16,13 @@ public class FragmentSchema : ISchema<FragmentSchemaContext>
{
private string fragmentRegex;
private List<string> namedGroups = new List<string>();
private List<string> namedLiterals = new List<string>();
public FragmentSchema(string fragmentRegex, List<string> namedGroups)
public FragmentSchema(string fragmentRegex, List<string> namedGroups, List<string> namedLiterals)
{
this.fragmentRegex = fragmentRegex;
this.namedGroups = namedGroups;
this.namedLiterals = namedLiterals;
}
public List<IToken> ProcessNextWord(FragmentSchemaContext currentContext, InputProvider inputs)
@ -42,6 +44,15 @@ public class FragmentSchema : ISchema<FragmentSchemaContext>
}
newToken.AddMatch(groupName, captureList);
}
foreach (var literalName in this.namedLiterals)
{
var captureList = new List<string>();
if (match.Groups.Keys.Contains(literalName) && match.Groups[literalName].Length > 0)
{
captureList.Add(match.Groups[literalName].Length.ToString());
}
newToken.AddMatch(literalName, captureList);
}
tokenList.Add(newToken);
}

View File

@ -8,6 +8,7 @@ public class FragmentSchemaBuilder : RepetitionSchemaBuilder<FragmentSchemaBuild
protected string fragmentRegex = @"";
private List<string> namedGroups = new List<string>();
private List<string> namedLiterals = new List<string>();
public FragmentSchemaBuilder()
{
@ -57,9 +58,15 @@ public class FragmentSchemaBuilder : RepetitionSchemaBuilder<FragmentSchemaBuild
return this;
}
public FragmentSchemaBuilder Expect(string literal)
public FragmentSchemaBuilder Expect(string literal, string name = "")
{
this.fragmentRegex += Regex.Escape(literal);
string groupNamePrefix = "";
if (!string.IsNullOrEmpty(name))
{
groupNamePrefix = "?<" + name + ">";
namedLiterals.Add(name);
}
this.fragmentRegex += "(" + groupNamePrefix + Regex.Escape(literal) + ")";
return this;
}
@ -100,6 +107,10 @@ public class FragmentSchemaBuilder : RepetitionSchemaBuilder<FragmentSchemaBuild
throw new Exception("Invalid repetition definitions!");
}
var oldSchemaBuilder = currentBuilder.UpperLayerBuilder;
if (oldSchemaBuilder == null)
{
throw new Exception("Something went terribly wrong!");
}
var currentRegex = "(" + currentBuilder.fragmentRegex + ")";
switch (currentBuilder.RepetitionType)
@ -125,7 +136,7 @@ public class FragmentSchemaBuilder : RepetitionSchemaBuilder<FragmentSchemaBuild
public FragmentSchema Build()
{
var schema = new FragmentSchema(this.fragmentRegex, this.namedGroups);
var schema = new FragmentSchema(this.fragmentRegex, this.namedGroups, this.namedLiterals);
return schema;
}
}

View File

@ -28,15 +28,13 @@ public class InputSchema : ISchema<InputSchemaContext>
public List<IToken> ProcessNextWord(InputSchemaContext currentContext, InputProvider inputs)
{
var nextBlock = this.buildingBlocks[currentContext.lastProcessedBlockIndex];
var token = nextBlock.ParseWord(inputs);
var tokens = nextBlock.ParseWord(inputs);
if (!nextBlock.IsRepetitionType() || nextBlock.CheckIsDoneParsingAndReset(inputs))
{
currentContext.lastProcessedBlockIndex++;
currentContext.HasFinished = currentContext.lastProcessedBlockIndex >= this.buildingBlocks.Count;
}
var newTokenList = new List<IToken>();
newTokenList.Add(token);
return newTokenList;
return tokens;
}
public bool CanProcessNextWord(InputSchemaContext currentContext, InputProvider inputs)

View File

@ -21,6 +21,27 @@ public class InputSchemaBuilder : RepetitionSchemaBuilder<InputSchemaBuilder, In
case InputType.Integer:
block = new IntegerBlock();
break;
case InputType.Long:
block = new LongBlock();
break;
case InputType.Char:
block = new CharBlock();
break;
default:
throw new Exception("Unrecognized InputType");
}
schema.AddBuildingBlock(block);
return this;
}
public InputSchemaBuilder Expect<T>(InputType type, InputType definedInputType, Func<string, T> wordConverter)
{
IBuildingBlock block;
switch (type)
{
case InputType.Custom:
block = new CustomInputBlock<T>(definedInputType, wordConverter);
break;
default:
throw new Exception("Unrecognized InputType");
}
@ -55,22 +76,26 @@ public class InputSchemaBuilder : RepetitionSchemaBuilder<InputSchemaBuilder, In
{
throw new Exception("Invalid repetition definitions!");
}
var oldInputSchemaBuilder = currentBuilder.UpperLayerBuilder;
var oldSchemaBuilder = currentBuilder.UpperLayerBuilder;
if (oldSchemaBuilder == null)
{
throw new Exception("Something went terribly wrong!");
}
var currentSchema = currentBuilder.Build();
switch (currentBuilder.RepetitionType)
{
case RepetitionType.FixedRepetition:
oldInputSchemaBuilder.schema.AddBuildingBlock(new FixedRepetitionBlock(currentSchema, currentBuilder.NumRepetition));
oldSchemaBuilder.schema.AddBuildingBlock(new FixedRepetitionBlock(currentSchema, currentBuilder.NumRepetition));
break;
case RepetitionType.GreedyRepetition:
oldInputSchemaBuilder.schema.AddBuildingBlock(new GreedyRepetitionBlock(currentSchema));
oldSchemaBuilder.schema.AddBuildingBlock(new GreedyRepetitionBlock(currentSchema));
break;
default:
throw new Exception("Unrecognized RepetitionType");
}
return oldInputSchemaBuilder;
return oldSchemaBuilder;
}
public InputSchema Build()

View File

@ -6,4 +6,7 @@ public enum InputType
Integer = BlockType.Integer,
String = BlockType.String,
Fragment = BlockType.Fragment,
Char = BlockType.Char,
Custom = BlockType.Custom,
Long = BlockType.Long,
}

View File

@ -2,9 +2,18 @@
using System;
using System.Collections.Generic;
using Parsing.Data;
using Parsing.Schema;
using Parsing.Tokenization;
public static class TextParser
{
public static TextParser<TContext> Create<TContext>(ISchema<TContext> schema, string[]? delimiters = null, bool removeEmptyEntries = true) where TContext : ISchemaContext
{
return new TextParser<TContext>(schema, delimiters, removeEmptyEntries);
}
}
public class TextParser<T> : TokenConverter where T : ISchemaContext
{
private LineParser<T> lineParser;

View File

@ -0,0 +1,34 @@
namespace Parsing.Tokenization;
using Parsing.Schema;
public class CustomToken<T> : IValueToken<T>
{
private string word;
private InputType definedInputType;
private Func<string, T> wordConverter;
public CustomToken(string word, InputType definedInputType, Func<string, T> wordConverter)
{
this.word = word;
this.wordConverter = wordConverter;
this.definedInputType = definedInputType;
}
public string GetText()
{
return word;
}
public T GetValue()
{
return wordConverter(word);
}
public InputType GetInputType()
{
return this.definedInputType;
}
}

View File

@ -0,0 +1,28 @@
namespace Parsing.Tokenization;
using Parsing.Schema;
public class LongToken : IValueToken<long>
{
private string word;
public LongToken(string word)
{
this.word = word;
}
public string GetText()
{
return word;
}
public long GetValue()
{
return long.Parse(word);
}
public InputType GetInputType()
{
return InputType.Long;
}
}

View File

@ -1 +1 @@
0.5.0
0.10.0