Compare commits

...

14 Commits
0.7.1 ... main

Author SHA1 Message Date
f8c00da2b8 release: version 0.10.0 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m45s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m26s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m24s
Upload Python Package / deploy (push) Successful in 1m48s
2024-12-13 16:04:23 +01:00
d4ac62c592 feat: add support for custom token types and longs, ref: NOISSUE 2024-12-13 16:04:19 +01:00
0f533c2018 release: version 0.9.3 🚀
All checks were successful
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m39s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m39s
Upload Python Package / Create Release (push) Successful in 13s
Upload Python Package / deploy (push) Successful in 1m56s
2024-12-13 13:50:04 +01:00
2067fe06fc fix: remove duplicate TokenConverter definition, ref: NOISSUE 2024-12-13 13:50:01 +01:00
cc0f0a24d9 release: version 0.9.2 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m12s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m20s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m22s
Upload Python Package / deploy (push) Successful in 1m42s
2024-12-13 13:30:50 +01:00
c41d665ab8 fix: more bugfixes, ref: NOISSUE 2024-12-13 13:30:46 +01:00
2fbdafa0e9 release: version 0.9.1 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m45s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m18s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m16s
Upload Python Package / deploy (push) Successful in 1m50s
2024-12-13 13:23:06 +01:00
f942954678 fix: fix build issues, ref: NOISSUE 2024-12-13 13:23:02 +01:00
4c67e8efb0 release: version 0.9.0 🚀
Some checks failed
SonarQube Scan / SonarQube Trigger (push) Successful in 2m22s
Upload Python Package / Create Release (push) Successful in 2m16s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Failing after 2m23s
Upload Python Package / deploy (push) Failing after 1m55s
2024-12-13 13:13:50 +01:00
e83e99758a feat: add filter option to TokenConverter, ref: NOISSUE 2024-12-13 13:13:43 +01:00
81ac797b4c release: version 0.8.0 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m39s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m15s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m18s
Upload Python Package / deploy (push) Successful in 1m42s
2024-12-12 20:03:14 +01:00
e9aa60524c feat: adding sensible index constructors refs: NOISSUE 2024-12-12 20:03:07 +01:00
7e5ab9f799 release: version 0.7.2 🚀
All checks were successful
Upload Python Package / Create Release (push) Successful in 1m45s
CI / tests_linux (9.0.X, ubuntu-latest) (push) Successful in 2m7s
SonarQube Scan / SonarQube Trigger (push) Successful in 2m8s
Upload Python Package / deploy (push) Successful in 1m43s
2024-12-06 00:33:43 +01:00
fc137ebd03 fix: add some missing API methods, ref: NOISSUE 2024-12-06 00:33:37 +01:00
17 changed files with 334 additions and 242 deletions

View File

@ -4,12 +4,78 @@ Changelog
(unreleased) (unreleased)
------------ ------------
- Feat: add support for custom token types and longs, ref: NOISSUE.
[Simon Diesenreiter]
0.9.3 (2024-12-13)
------------------
Fix
~~~
- Remove duplicate TokenConverter definition, ref: NOISSUE. [Simon
Diesenreiter]
Other
~~~~~
0.9.2 (2024-12-13)
------------------
Fix
~~~
- More bugfixes, ref: NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.1 (2024-12-13)
------------------
Fix
~~~
- Fix build issues, ref: NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.9.0 (2024-12-13)
------------------
- Feat: add filter option to TokenConverter, ref: NOISSUE. [Simon
Diesenreiter]
0.8.0 (2024-12-12)
------------------
- Feat: adding sensible index constructors refs: NOISSUE. [Simon
Diesenreiter]
0.7.2 (2024-12-05)
------------------
Fix
~~~
- Add some missing API methods, ref: NOISSUE. [Simon Diesenreiter]
Other
~~~~~
0.7.1 (2024-12-05)
------------------
Fix Fix
~~~ ~~~
- Allow for parsing single chars as input, ref: NOISSUE. [Simon - Allow for parsing single chars as input, ref: NOISSUE. [Simon
Diesenreiter] Diesenreiter]
Other
~~~~~
0.7.0 (2024-12-05) 0.7.0 (2024-12-05)
------------------ ------------------

View File

@ -33,6 +33,7 @@ public class TextParserTests
private const string testInput10 = @"abc private const string testInput10 = @"abc
bca bca
cab"; cab";
private const string testInput11 = @"2 x y 4 x y 6 x y 4 x y 1 x y";
[Fact] [Fact]
public void LineParser_TestSimpleRepetition() public void LineParser_TestSimpleRepetition()
@ -394,7 +395,7 @@ public class TextParserTests
} }
[Fact] [Fact]
public void TextPArser_TestReadingChars() public void TextParser_TestReadingChars()
{ {
var schemaBuilder = new InputSchemaBuilder(); var schemaBuilder = new InputSchemaBuilder();
var schema = schemaBuilder var schema = schemaBuilder
@ -415,4 +416,31 @@ public class TextParserTests
Assert.Equal(3, row[1].Count); Assert.Equal(3, row[1].Count);
Assert.Equal(3, row[2].Count); Assert.Equal(3, row[2].Count);
} }
[Fact]
public void TextParser_TestFilter()
{
var schemaBuilder = new InputSchemaBuilder();
var schema = schemaBuilder
.Repeat()
.Expect(InputType.Integer)
.Expect(InputType.Char)
.Expect(InputType.Char)
.EndRepetition()
.Build();
var parser = new TextParser<InputSchemaContext>(schema);
var numbers = parser
.SetInputText(testInput11)
.Parse()
.Filter(InputType.Integer)
.AsSingleStream<int>();
Assert.Equal(5, numbers.Count);
Assert.Equal(2, numbers[0]);
Assert.Equal(4, numbers[1]);
Assert.Equal(6, numbers[2]);
Assert.Equal(4, numbers[3]);
Assert.Equal(1, numbers[4]);
}
} }

View File

@ -20,4 +20,24 @@ public static class DataManipulationHelpers
{ {
return reducer(data); return reducer(data);
} }
public static List<TNewType> TransformData<TType, TNewType>(this List<TType> data, Func<TType, TNewType> transformer)
{
var newList = new List<TNewType>();
foreach (TType dataItem in data)
{
newList.Add(transformer(dataItem));
}
return newList;
}
public static List<TNewType> TransformData<TType, TNewType>(this List<List<TType>> data, Func<List<TType>, TNewType> transformer)
{
var newList = new List<TNewType>();
foreach (List<TType> dataItemList in data)
{
newList.Add(transformer(dataItemList));
}
return newList;
}
} }

View File

@ -4,11 +4,22 @@ namespace Parsing.Data;
public class SearchResult<TIndexType> public class SearchResult<TIndexType>
{ {
public SearchResult(IDataIndex<TIndexType> dataIndex)
{
this.DataIndex = dataIndex;
}
public IDataIndex<TIndexType>? DataIndex { get; set; } public IDataIndex<TIndexType>? DataIndex { get; set; }
} }
public class DirectionalSearchResult<TIndexType> : SearchResult<TIndexType> public class DirectionalSearchResult<TIndexType> : SearchResult<TIndexType>
{ {
public DirectionalSearchResult(IDataIndex<TIndexType> dataIndex, Direction direction, int length): base(dataIndex)
{
this.Direction = direction;
this.Length = length;
}
public Direction Direction { get; set; } public Direction Direction { get; set; }
public int Length { get; set; } public int Length { get; set; }
} }
@ -111,10 +122,7 @@ public abstract class DataSetManipulatorBase<TCollectedType, TDataType, TIndexTy
} }
if (searchIndex == data.Count) if (searchIndex == data.Count)
{ {
var result = new DirectionalSearchResult<TIndexType>(); var result = new DirectionalSearchResult<TIndexType>(currentPosition, direction, searchIndex);
result.DataIndex = currentPosition;
result.Direction = direction;
result.Length = searchIndex;
results.Add(result); results.Add(result);
} }
} }
@ -123,7 +131,7 @@ public abstract class DataSetManipulatorBase<TCollectedType, TDataType, TIndexTy
return results; return results;
} }
public List<DirectionalSearchResult<TIndexType>> FindInSet(List<TDataType> data) public List<DirectionalSearchResult<TIndexType>> FindInSet(List<TDataType> data, Direction directions)
{ {
var result = new List<DirectionalSearchResult<TIndexType>>(); var result = new List<DirectionalSearchResult<TIndexType>>();
@ -131,11 +139,16 @@ public abstract class DataSetManipulatorBase<TCollectedType, TDataType, TIndexTy
var startingPoints = this.FindInSet(data[0]); var startingPoints = this.FindInSet(data[0]);
foreach (var startingPoint in startingPoints) foreach (var startingPoint in startingPoints)
{ {
foreach (var results in this.FindAtPosition(startingPoint.DataIndex, data)) foreach (var results in this.FindAtPosition(startingPoint.DataIndex, data, directions))
{ {
result.AddRange(results); result.AddRange(results);
} }
} }
return result; return result;
} }
public List<DirectionalSearchResult<TIndexType>> FindInSet(List<TDataType> data)
{
return this.FindInSet(data, this.ValidDirections());
}
} }

View File

@ -49,8 +49,7 @@ public class DefaultOneDimensionalManipulator<TDataType> : DataSetManipulatorBas
{ {
if (EqualityComparer<TDataType>.Default.Equals(this.dataSet[i], data)) if (EqualityComparer<TDataType>.Default.Equals(this.dataSet[i], data))
{ {
var singleResult = new SearchResult<int>(); var singleResult = new SearchResult<int>(new DefaultPositionalDataIndex(i));
singleResult.DataIndex = new DefaultPositionalDataIndex(i);
results.Add(singleResult); results.Add(singleResult);
} }
} }

View File

@ -71,8 +71,7 @@ public class DefaultTwoDimensionalManipulator<TDataType> : DataSetManipulatorBas
{ {
if (EqualityComparer<TDataType>.Default.Equals(this.dataSet[this.dataSet.Count - y - 1][x], data)) if (EqualityComparer<TDataType>.Default.Equals(this.dataSet[this.dataSet.Count - y - 1][x], data))
{ {
var singleResult = new SearchResult<int>(); var singleResult = new SearchResult<int>(new DefaultPositionalDataIndex(x, y));
singleResult.DataIndex = new DefaultPositionalDataIndex(x, y);
results.Add(singleResult); results.Add(singleResult);
} }
} }

View File

@ -13,7 +13,7 @@ public class TokenConverter
{ {
} }
private List<T> AsGenericCollection<T, U>() where T : ICollection<U>, new() private List<T> AsGenericCollection<T, U>() where T : List<U>, new()
{ {
List<T> returnData = new List<T>(); List<T> returnData = new List<T>();
foreach (var tokenRow in this.rawTokens) foreach (var tokenRow in this.rawTokens)
@ -25,11 +25,15 @@ public class TokenConverter
{ {
throw new Exception("No token was provided, but token was expected!"); throw new Exception("No token was provided, but token was expected!");
} }
IValueToken<U>? valueToken = token as IValueToken<U>;
if (valueToken == null) if (!token.GetType().IsAssignableTo(typeof(IValueToken<U>)))
{ {
throw new Exception("Provided token is not a ValueToken"); Console.WriteLine(token.GetText());
Type t = token.GetType();
throw new Exception("Provided token is not a ValueToken - type: " + t.ToString());
} }
IValueToken<U> valueToken = token as IValueToken<U>;
newRow.Add(valueToken.GetValue()); newRow.Add(valueToken.GetValue());
} }
@ -156,4 +160,26 @@ public class TokenConverter
return newList; return newList;
} }
public TokenConverter Filter(params InputType[] inputTypes)
{
var newTokenListList = new List<List<IToken>>();
foreach(var tokenList in this.rawTokens)
{
var newTokenList = new List<IToken>();
foreach(var token in tokenList)
{
if(inputTypes.Contains(token.GetInputType()))
{
newTokenList.Add(token);
}
}
newTokenListList.Add(newTokenList);
}
this.rawTokens = newTokenListList;
return this;
}
} }

View File

@ -11,4 +11,6 @@ public enum BlockType
FixedRepetition = 16, FixedRepetition = 16,
GreedyRepetition = 32, GreedyRepetition = 32,
NonZeroRepetition = 64, NonZeroRepetition = 64,
Custom = 128,
Long = 256,
} }

View File

@ -0,0 +1,42 @@
namespace Parsing.Schema.BuildingBlocks;
using Parsing.Tokenization;
class CustomInputBlock<T> : BuildingBlockBase
{
private InputType definedInputType;
private Func<string, T> wordConverter;
public CustomInputBlock(InputType definedInputType, Func<string, T> wordConverter)
{
this.definedInputType = definedInputType;
this.wordConverter = wordConverter;
}
public override List<IToken> ParseWord(InputProvider inputs)
{
return new List<IToken>() { new CustomToken<T>(inputs.YieldWord(), this.definedInputType, this.wordConverter) };
}
public override bool CanParseWord(InputProvider inputs)
{
string word = string.Empty;
using (inputs.GetLookaheadContext())
{
word = inputs.YieldWord();
}
return this.CanParseWord(word);
}
public override bool CanParseWord(string word)
{
return true;
}
public override BlockType GetBlockType()
{
return BlockType.Custom;
}
}

View File

@ -0,0 +1,35 @@
namespace Parsing.Schema.BuildingBlocks;
using Parsing.Tokenization;
class LongBlock : BuildingBlockBase
{
public LongBlock()
{
}
public override List<IToken> ParseWord(InputProvider inputs)
{
return new List<IToken>() { new LongToken(inputs.YieldWord()) };
}
public override bool CanParseWord(InputProvider inputs)
{
using (inputs.GetLookaheadContext())
{
return this.CanParseWord(inputs.YieldWord());
}
}
public override bool CanParseWord(string word)
{
long number = 0;
return long.TryParse(word, out number);
}
public override BlockType GetBlockType()
{
return BlockType.Long;
}
}

View File

@ -28,6 +28,12 @@ class StringBlock : BuildingBlockBase
public override bool CanParseWord(string word) public override bool CanParseWord(string word)
{ {
// Here we need to ensure we are not matching any non-string tokens, since string can match pretty much anything // Here we need to ensure we are not matching any non-string tokens, since string can match pretty much anything
LongBlock longBlock = new LongBlock();
if (longBlock.CanParseWord(word))
{
return false;
}
IntegerBlock intBlock = new IntegerBlock(); IntegerBlock intBlock = new IntegerBlock();
if (intBlock.CanParseWord(word)) if (intBlock.CanParseWord(word))
{ {

View File

@ -21,6 +21,9 @@ public class InputSchemaBuilder : RepetitionSchemaBuilder<InputSchemaBuilder, In
case InputType.Integer: case InputType.Integer:
block = new IntegerBlock(); block = new IntegerBlock();
break; break;
case InputType.Long:
block = new LongBlock();
break;
case InputType.Char: case InputType.Char:
block = new CharBlock(); block = new CharBlock();
break; break;
@ -31,6 +34,21 @@ public class InputSchemaBuilder : RepetitionSchemaBuilder<InputSchemaBuilder, In
return this; return this;
} }
public InputSchemaBuilder Expect<T>(InputType type, InputType definedInputType, Func<string, T> wordConverter)
{
IBuildingBlock block;
switch (type)
{
case InputType.Custom:
block = new CustomInputBlock<T>(definedInputType, wordConverter);
break;
default:
throw new Exception("Unrecognized InputType");
}
schema.AddBuildingBlock(block);
return this;
}
public InputSchemaBuilder Repeat(int repetitionCount) public InputSchemaBuilder Repeat(int repetitionCount)
{ {
// add another layer of parsing // add another layer of parsing

View File

@ -7,4 +7,6 @@ public enum InputType
String = BlockType.String, String = BlockType.String,
Fragment = BlockType.Fragment, Fragment = BlockType.Fragment,
Char = BlockType.Char, Char = BlockType.Char,
Custom = BlockType.Custom,
Long = BlockType.Long,
} }

View File

@ -1,226 +0,0 @@
namespace Parsing;
using System;
using System.Collections.Generic;
using Parsing.Schema;
using Parsing.Tokenization;
public static class DataConversionHelpers
{
public static List<TNewType> ConvertData<TTokenType, TNewType, TOldType>(this List<IToken> tokenList, Func<TOldType, TNewType> converter) where TTokenType : IValueToken<TOldType>
{
var newList = new List<TNewType>();
foreach (var token in tokenList)
{
var typedToken = token as IValueToken<TOldType>;
if (typedToken == null)
{
throw new Exception("Invalid Token type encountered during value conversion");
}
newList.Add(converter(typedToken.GetValue()));
}
return newList;
}
public static List<TNewType> ConvertData<TTokenType, TNewType, TOldType>(this List<IToken> tokenList, Func<TOldType, List<TNewType>> converter) where TTokenType : IValueToken<TOldType>
{
var newList = new List<TNewType>();
foreach (var token in tokenList)
{
var typedToken = token as IValueToken<TOldType>;
if (typedToken == null)
{
throw new Exception("Invalid Token type encountered during value conversion");
}
newList.AddRange(converter(typedToken.GetValue()));
}
return newList;
}
public static List<List<TNewType>> ConvertData<TTokenType, TNewType, TOldType>(this List<List<IToken>> tokenListList, Func<TOldType, TNewType> converter) where TTokenType : IValueToken<TOldType>
{
var newListList = new List<List<TNewType>>();
foreach (var tokenList in tokenListList)
{
newListList.Add(tokenList.ConvertData<TTokenType, TNewType, TOldType>(converter));
}
return newListList;
}
}
public static class DataManipulationHelpers
{
public static TType ReduceData<TType>(this List<TType> data, Func<TType, TType, TType> reducer)
{
if (data.Count < 2)
{
return data[0];
}
TType result = data[0];
for (int i = 1; i < data.Count; i++)
{
result = reducer(result, data[i]);
}
return result;
}
public static TType ReduceData<TType>(this List<TType> data, Func<List<TType>, TType> reducer)
{
return reducer(data);
}
}
public class TokenConverter
{
protected List<List<IToken>> rawTokens = new List<List<IToken>>();
public TokenConverter()
{
}
private List<T> AsGenericCollection<T, U>() where T : ICollection<U>, new()
{
List<T> returnData = new List<T>();
foreach (var tokenRow in this.rawTokens)
{
T newRow = new T();
foreach (IToken token in tokenRow)
{
if (token == null)
{
throw new Exception("No token was provided, but token was expected!");
}
IValueToken<U>? valueToken = token as IValueToken<U>;
if (valueToken == null)
{
throw new Exception("Provided token is not a ValueToken");
}
newRow.Add(valueToken.GetValue());
}
returnData.Add(newRow);
}
return returnData;
}
private void CheckConversionPrerequisites()
{
// in order to convert rows to columns or grid we require every row to have the same length
int rowLength = this.rawTokens[0].Count;
foreach (var tokenRow in this.rawTokens)
{
if (tokenRow.Count != rowLength)
{
throw new Exception("Attempted to convert token dataset that is not able to be converted!");
}
}
}
public List<T> AsSingleStream<T>()
{
List<T> returnData = new List<T>();
foreach (var tokenRow in this.rawTokens)
{
foreach (IToken token in tokenRow)
{
if (token == null)
{
throw new Exception("No token was provided, but token was expected!");
}
IValueToken<T>? valueToken = token as IValueToken<T>;
if (valueToken == null)
{
throw new Exception("Provided token is not a ValueToken");
}
returnData.Add(valueToken.GetValue());
}
}
return returnData;
}
public List<T[]> AsRows<T>()
{
var listRows = this.AsListRows<T>();
var newList = new List<T[]>();
foreach (var rowList in listRows)
{
newList.Add(rowList.ToArray());
}
return newList;
}
public List<List<T>> AsListRows<T>()
{
return this.AsGenericCollection<List<T>, T>();
}
public List<T[]> AsColumns<T>()
{
var listColumns = this.AsListColumns<T>();
var newList = new List<T[]>();
foreach (var columnList in listColumns)
{
newList.Add(columnList.ToArray());
}
return newList;
}
public List<List<T>> AsListColumns<T>()
{
this.CheckConversionPrerequisites();
var rows = AsListRows<T>();
var columns = new List<List<T>>();
for (int i = 0; i < rows[0].Count; i++)
{
columns.Add(new List<T>());
}
foreach (var row in rows)
{
for (int i = 0; i < row.Count; i++)
{
columns[i].Add(row[i]);
}
}
return columns;
}
public T[][] AsGrid<T>()
{
this.CheckConversionPrerequisites();
var rowsList = AsRows<T>();
return rowsList.ToArray();
}
public List<List<IToken>> AsRawData()
{
return this.rawTokens;
}
public List<Fragment> AsFragments()
{
var items = this.AsSingleStream<Fragment>();
var newList = new List<Fragment>();
foreach (var item in items)
{
var typedItem = item as Fragment;
if (typedItem == null)
{
throw new Exception("Invalid token type encountered");
}
newList.Add(typedItem);
}
return newList;
}
}

View File

@ -0,0 +1,34 @@
namespace Parsing.Tokenization;
using Parsing.Schema;
public class CustomToken<T> : IValueToken<T>
{
private string word;
private InputType definedInputType;
private Func<string, T> wordConverter;
public CustomToken(string word, InputType definedInputType, Func<string, T> wordConverter)
{
this.word = word;
this.wordConverter = wordConverter;
this.definedInputType = definedInputType;
}
public string GetText()
{
return word;
}
public T GetValue()
{
return wordConverter(word);
}
public InputType GetInputType()
{
return this.definedInputType;
}
}

View File

@ -0,0 +1,28 @@
namespace Parsing.Tokenization;
using Parsing.Schema;
public class LongToken : IValueToken<long>
{
private string word;
public LongToken(string word)
{
this.word = word;
}
public string GetText()
{
return word;
}
public long GetValue()
{
return long.Parse(word);
}
public InputType GetInputType()
{
return InputType.Long;
}
}

View File

@ -1 +1 @@
0.7.1 0.10.0