From 43acd3e54c6b7217c96fa14ec99c31eef9642636 Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Thu, 10 Sep 2020 22:18:41 +0100 Subject: [PATCH 01/11] #65 - added some initial domain model classes and updated unit tests --- src/Kingsland.MofParser.Sample/Program.cs | 38 +- .../Helpers/ModelAssert.cs | 47 +++ .../Helpers/TestUtils.cs | 2 - .../Kingsland.MofParser.UnitTests.csproj | 1 + .../Lexer/LexerHelper.cs | 42 +-- .../Lexer/LexerTests.cs | 3 +- .../Parsing/ParserTests.cs | 99 +++++ .../Ast/PropertyValueAst.cs | 3 +- .../CodeGen/AstMofGenerator.cs | 154 +++----- .../Kingsland.MofParser.csproj | 1 + src/Kingsland.MofParser/Model/Class.cs | 63 ++++ src/Kingsland.MofParser/Model/Instance.cs | 125 +++++++ .../Model/ModelConverter.cs | 341 ++++++++++++++++++ src/Kingsland.MofParser/Model/Module.cs | 60 +++ src/Kingsland.MofParser/Model/Property.cs | 90 +++++ src/Kingsland.MofParser/Parsing/Parser.cs | 23 ++ 16 files changed, 954 insertions(+), 138 deletions(-) create mode 100644 src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs create mode 100644 src/Kingsland.MofParser/Model/Class.cs create mode 100644 src/Kingsland.MofParser/Model/Instance.cs create mode 100644 src/Kingsland.MofParser/Model/ModelConverter.cs create mode 100644 src/Kingsland.MofParser/Model/Module.cs create mode 100644 src/Kingsland.MofParser/Model/Property.cs diff --git a/src/Kingsland.MofParser.Sample/Program.cs b/src/Kingsland.MofParser.Sample/Program.cs index 38041656..2bf930ab 100644 --- a/src/Kingsland.MofParser.Sample/Program.cs +++ b/src/Kingsland.MofParser.Sample/Program.cs @@ -1,5 +1,9 @@ using Kingsland.MofParser; +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Model; +using Kingsland.MofParser.Parsing; using System; +using System.Linq; namespace Kingsland.FileFormat.Mof.Tests { @@ -10,28 +14,32 @@ class Program static void Main(string[] args) { - const string filename = "dsc\\MyServer.mof"; + const string mof = @" + instance of MSFT_RoleResource as $MSFT_RoleResource1ref + { + ResourceID = ""[WindowsFeature]IIS""; + Ensure = ""Present""; + SourceInfo = ""D:\\dsc\\MyServerConfig.ps1::6::9::WindowsFeature""; + Name = ""Web-Server""; + ModuleName = ""PSDesiredStateConfiguration""; + ModuleVersion = ""1.0""; + };"; // parse the mof file - var instances = PowerShellDscHelper.ParseMofFileInstances(filename); + var module = Parser.ParseText(mof); // display the instances - foreach (var instance in instances) + foreach (var instance in module.Instances) { - Console.WriteLine("--------------------------"); - if (string.IsNullOrEmpty(instance.Alias)) - { - Console.WriteLine(string.Format("instance of {0}", instance.ClassName)); - } - else - { - Console.WriteLine(string.Format("instance of {0} as ${1}", instance.ClassName, instance.Alias)); - } - foreach(var property in instance.Properties) + Console.WriteLine($"----------------------------------"); + Console.WriteLine($"typename = {instance.TypeName}"); + Console.WriteLine($"alias = {instance.Alias}"); + Console.WriteLine($"properties:"); + foreach (var property in instance.Properties) { - Console.WriteLine(" {0} = {1}", property.Key.PadRight(14), property.Value.ToString()); + Console.WriteLine(" {0} = {1}", property.Name.PadRight(13), property.Value); } - Console.WriteLine("--------------------------"); + Console.WriteLine($"----------------------------------"); } } diff --git a/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs b/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs new file mode 100644 index 00000000..7bd6720c --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs @@ -0,0 +1,47 @@ +using Kingsland.MofParser.Model; +using NUnit.Framework; +using System.Linq; + +namespace Kingsland.MofParser.UnitTests.Helpers +{ + + internal static class ModelAssert + { + + public static void AreEqual(Module obj1, Module obj2) + { + Assert.IsNotNull(obj1); + Assert.IsNotNull(obj2); + Assert.AreEqual(obj1.Instances.Count, obj2.Instances.Count); + foreach(var pair in obj1.Instances + .Zip(obj2.Instances, (i1, i2) => (i1, i2))) + { + ModelAssert.AreEqual(pair.i1, pair.i2); + } + } + + public static void AreEqual(Instance obj1, Instance obj2) + { + Assert.IsNotNull(obj1); + Assert.IsNotNull(obj2); + Assert.AreEqual(obj1.TypeName, obj2.TypeName); + Assert.AreEqual(obj1.Alias, obj2.Alias); + Assert.AreEqual(obj1.Properties.Count, obj2.Properties.Count); + foreach (var pair in obj1.Properties + .Zip(obj2.Properties, (p1, p2) => (p1, p2))) + { + ModelAssert.AreEqual(pair.p1, pair.p2); + } + } + + public static void AreEqual(Property obj1, Property obj2) + { + Assert.IsNotNull(obj1); + Assert.IsNotNull(obj2); + Assert.AreEqual(obj1.Name, obj2.Name); + Assert.AreEqual(obj1.Value, obj2.Value); + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Helpers/TestUtils.cs b/src/Kingsland.MofParser.UnitTests/Helpers/TestUtils.cs index ea6ceabe..65185ba2 100644 --- a/src/Kingsland.MofParser.UnitTests/Helpers/TestUtils.cs +++ b/src/Kingsland.MofParser.UnitTests/Helpers/TestUtils.cs @@ -4,8 +4,6 @@ using System.Collections.Generic; using System.IO; using System.Reflection; -using System.Runtime.Serialization; -using System.Xml; namespace Kingsland.MofParser.UnitTests.Helpers { diff --git a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj index 8658d55b..a5d63b31 100644 --- a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj +++ b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj @@ -36,6 +36,7 @@ + diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs b/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs index 1266b1d1..1c85f1d9 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs @@ -68,127 +68,127 @@ private static void AssertAreEqualInternal(SyntaxToken expectedToken, SyntaxToke LexerHelper.GetAssertErrorMessage($"actual Text does not match expected value", index)); switch (expectedToken) { - case AliasIdentifierToken token: + case AliasIdentifierToken _: Assert.IsTrue( TokenComparer.AreEqual((AliasIdentifierToken)expectedToken, (AliasIdentifierToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case AttributeCloseToken token: + case AttributeCloseToken _: Assert.IsTrue( TokenComparer.AreEqual((AttributeCloseToken)expectedToken, (AttributeCloseToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case AttributeOpenToken token: + case AttributeOpenToken _: Assert.IsTrue( TokenComparer.AreEqual((AttributeOpenToken)expectedToken, (AttributeOpenToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case BlockCloseToken token: + case BlockCloseToken _: Assert.IsTrue( TokenComparer.AreEqual((BlockCloseToken)expectedToken, (BlockCloseToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case BlockOpenToken token: + case BlockOpenToken _: Assert.IsTrue( TokenComparer.AreEqual((BlockOpenToken)expectedToken, (BlockOpenToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case BooleanLiteralToken token: + case BooleanLiteralToken _: Assert.IsTrue( TokenComparer.AreEqual((BooleanLiteralToken)expectedToken, (BooleanLiteralToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case ColonToken token: + case ColonToken _: Assert.IsTrue( TokenComparer.AreEqual((ColonToken)expectedToken, (ColonToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case CommaToken token: + case CommaToken _: Assert.IsTrue( TokenComparer.AreEqual((CommaToken)expectedToken, (CommaToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case CommentToken token: + case CommentToken _: Assert.IsTrue( TokenComparer.AreEqual((CommentToken)expectedToken, (CommentToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case DotOperatorToken token: + case DotOperatorToken _: Assert.IsTrue( TokenComparer.AreEqual((DotOperatorToken)expectedToken, (DotOperatorToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case EqualsOperatorToken token: + case EqualsOperatorToken _: Assert.IsTrue( TokenComparer.AreEqual((EqualsOperatorToken)expectedToken, (EqualsOperatorToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case IdentifierToken token: + case IdentifierToken _: Assert.IsTrue( TokenComparer.AreEqual((IdentifierToken)expectedToken, (IdentifierToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case IntegerLiteralToken token: + case IntegerLiteralToken _: Assert.IsTrue( TokenComparer.AreEqual((IntegerLiteralToken)expectedToken, (IntegerLiteralToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case NullLiteralToken token: + case NullLiteralToken _: Assert.IsTrue( TokenComparer.AreEqual((NullLiteralToken)expectedToken, (NullLiteralToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case ParenthesisCloseToken token: + case ParenthesisCloseToken _: Assert.IsTrue( TokenComparer.AreEqual((ParenthesisCloseToken)expectedToken, (ParenthesisCloseToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case ParenthesisOpenToken token: + case ParenthesisOpenToken _: Assert.IsTrue( TokenComparer.AreEqual((ParenthesisOpenToken)expectedToken, (ParenthesisOpenToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case PragmaToken token: + case PragmaToken _: Assert.IsTrue( TokenComparer.AreEqual((PragmaToken)expectedToken, (PragmaToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case RealLiteralToken token: + case RealLiteralToken _: Assert.IsTrue( TokenComparer.AreEqual((RealLiteralToken)expectedToken, (RealLiteralToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case StatementEndToken token: + case StatementEndToken _: Assert.IsTrue( TokenComparer.AreEqual((StatementEndToken)expectedToken, (StatementEndToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case StringLiteralToken token: + case StringLiteralToken _: Assert.IsTrue( TokenComparer.AreEqual((StringLiteralToken)expectedToken, (StringLiteralToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; - case WhitespaceToken token: + case WhitespaceToken _: Assert.IsTrue( TokenComparer.AreEqual((WhitespaceToken)expectedToken, (WhitespaceToken)actualToken), LexerHelper.GetAssertErrorMessage($"actual token does not match expected token", index) diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs b/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs index 894c7a1f..9daffa3c 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs @@ -2063,5 +2063,4 @@ private static void LexMethodTest(string mofFilename) } -} -/// \ No newline at end of file +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs index e91731eb..39f25bde 100644 --- a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs +++ b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs @@ -1,4 +1,5 @@ using Kingsland.MofParser.Ast; +using Kingsland.MofParser.Model; using Kingsland.MofParser.Parsing; using Kingsland.MofParser.Tokens; using Kingsland.MofParser.UnitTests.Helpers; @@ -110,6 +111,21 @@ public static void ParsePropetyValueWithLiteralString() var actualJson = TestUtils.ConvertToJson(actualAst); var expectedJson = TestUtils.ConvertToJson(expectedAst); Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias0000006E", + Properties = new List { + new Property("ServerURL", "https://URL") + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); } [Test] @@ -199,6 +215,21 @@ public static void ParsePropetyValueWithAliasIdentifier() var actualJson = TestUtils.ConvertToJson(actualAst); var expectedJson = TestUtils.ConvertToJson(expectedAst); Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("Reference", "Alias0000006E") + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); } [Test] @@ -279,6 +310,21 @@ public static void ParsePropetyValueWithEmptyArray() var actualJson = TestUtils.ConvertToJson(actualAst); var expectedJson = TestUtils.ConvertToJson(expectedAst); Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("Reference", new List()) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); } [Test] @@ -374,6 +420,23 @@ public static void ParsePropetyValueArrayWithAliasIdentifier() var actualJson = TestUtils.ConvertToJson(actualAst); var expectedJson = TestUtils.ConvertToJson(expectedAst); Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("Reference", new List { + "Alias0000006E" + }) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); } [Test] @@ -485,6 +548,23 @@ public static void ParsePropetyValueArrayWithLiteralStrings() var actualJson = TestUtils.ConvertToJson(actualAst); var expectedJson = TestUtils.ConvertToJson(expectedAst); Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("ServerURLs", new List { + "https://URL1", "https://URL2" + }) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); } [Test] @@ -618,6 +698,25 @@ public static void ParsePropetyValueArrayWithNumericLiteralValues() var actualJson = TestUtils.ConvertToJson(actualAst); var expectedJson = TestUtils.ConvertToJson(expectedAst); Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("MyBinaryValue", 42), + new Property("MyOctalValue", 149796), + new Property("MyHexValue", 11256099), + new Property("MyDecimalValue", 12345), + new Property("MyRealValue", 123.45) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); } } diff --git a/src/Kingsland.MofParser/Ast/PropertyValueAst.cs b/src/Kingsland.MofParser/Ast/PropertyValueAst.cs index d16fd1a7..77d69b20 100644 --- a/src/Kingsland.MofParser/Ast/PropertyValueAst.cs +++ b/src/Kingsland.MofParser/Ast/PropertyValueAst.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Parsing; namespace Kingsland.MofParser.Ast { diff --git a/src/Kingsland.MofParser/CodeGen/AstMofGenerator.cs b/src/Kingsland.MofParser/CodeGen/AstMofGenerator.cs index 03977c52..0c78aac3 100644 --- a/src/Kingsland.MofParser/CodeGen/AstMofGenerator.cs +++ b/src/Kingsland.MofParser/CodeGen/AstMofGenerator.cs @@ -118,27 +118,18 @@ public static string ConvertMofSpecificationAst(MofSpecificationAst node, MofQui public static string ConvertMofProductionAst(MofProductionAst node, MofQuirks quirks = MofQuirks.None) { - switch (node) - { - case CompilerDirectiveAst ast: - return AstMofGenerator.ConvertCompilerDirectiveAst(ast, quirks); - case StructureDeclarationAst ast: - return AstMofGenerator.ConvertStructureDeclarationAst(ast, quirks); - case ClassDeclarationAst ast: - return AstMofGenerator.ConvertClassDeclarationAst(ast, quirks); - case AssociationDeclarationAst ast: - return AstMofGenerator.ConvertAssociationDeclarationAst(ast, quirks); - case EnumerationDeclarationAst ast: - return AstMofGenerator.ConvertEnumerationDeclarationAst(ast, quirks); - case InstanceValueDeclarationAst ast: - return AstMofGenerator.ConvertInstanceValueDeclarationAst(ast, quirks); - case StructureValueDeclarationAst ast: - return AstMofGenerator.ConvertStructureValueDeclarationAst(ast, quirks); - case QualifierTypeDeclarationAst ast: - return AstMofGenerator.ConvertQualifierTypeDeclarationAst(ast, quirks); - default: - throw new NotImplementedException(); - } + return node switch + { + CompilerDirectiveAst ast => AstMofGenerator.ConvertCompilerDirectiveAst(ast, quirks), + StructureDeclarationAst ast => AstMofGenerator.ConvertStructureDeclarationAst(ast, quirks), + ClassDeclarationAst ast => AstMofGenerator.ConvertClassDeclarationAst(ast, quirks), + AssociationDeclarationAst ast => AstMofGenerator.ConvertAssociationDeclarationAst(ast, quirks), + EnumerationDeclarationAst ast => AstMofGenerator.ConvertEnumerationDeclarationAst(ast, quirks), + InstanceValueDeclarationAst ast => AstMofGenerator.ConvertInstanceValueDeclarationAst(ast, quirks), + StructureValueDeclarationAst ast => AstMofGenerator.ConvertStructureValueDeclarationAst(ast, quirks), + QualifierTypeDeclarationAst ast => AstMofGenerator.ConvertQualifierTypeDeclarationAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } #endregion @@ -319,17 +310,13 @@ public static string ConvertStructureDeclarationAst(StructureDeclarationAst node public static string ConvertStructureFeatureAst(IStructureFeatureAst node, MofQuirks quirks = MofQuirks.None, string indent = "") { - switch (node) + return node switch { - case StructureDeclarationAst ast: - return AstMofGenerator.ConvertStructureDeclarationAst(ast, quirks, indent); - case EnumerationDeclarationAst ast: - return AstMofGenerator.ConvertEnumerationDeclarationAst(ast, quirks, indent); - case PropertyDeclarationAst ast: - return AstMofGenerator.ConvertPropertyDeclarationAst(ast, quirks); - default: - throw new NotImplementedException(); - } + StructureDeclarationAst ast => AstMofGenerator.ConvertStructureDeclarationAst(ast, quirks, indent), + EnumerationDeclarationAst ast => AstMofGenerator.ConvertEnumerationDeclarationAst(ast, quirks, indent), + PropertyDeclarationAst ast => AstMofGenerator.ConvertPropertyDeclarationAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } #endregion @@ -366,15 +353,12 @@ public static string ConvertClassDeclarationAst(ClassDeclarationAst node, MofQui public static string ConvertClassFeatureAst(IClassFeatureAst node, MofQuirks quirks = MofQuirks.None, string indent = "") { - switch (node) + return node switch { - case IStructureFeatureAst ast: - return AstMofGenerator.ConvertStructureFeatureAst(ast, quirks, indent); - case MethodDeclarationAst ast: - return AstMofGenerator.ConvertMethodDeclarationAst(ast, quirks); - default: - throw new NotImplementedException(); - } + IStructureFeatureAst ast => AstMofGenerator.ConvertStructureFeatureAst(ast, quirks, indent), + MethodDeclarationAst ast => AstMofGenerator.ConvertMethodDeclarationAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } #endregion @@ -460,15 +444,12 @@ public static string ConvertEnumElementAst(EnumElementAst node, MofQuirks quirks public static string ConvertIEnumElementValueAst(IEnumElementValueAst node, MofQuirks quirks = MofQuirks.None) { - switch (node) + return node switch { - case IntegerValueAst ast: - return AstMofGenerator.ConvertIntegerValueAst(ast, quirks); - case StringValueAst ast: - return AstMofGenerator.ConvertStringValueAst(ast, quirks); - default: - throw new NotImplementedException(); - } + IntegerValueAst ast => AstMofGenerator.ConvertIntegerValueAst(ast, quirks), + StringValueAst ast => AstMofGenerator.ConvertStringValueAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } #endregion @@ -588,15 +569,12 @@ public static string ConvertParameterDeclarationAst(ParameterDeclarationAst node public static string ConvertComplexTypeValueAst(ComplexTypeValueAst node, MofQuirks quirks = MofQuirks.None, string indent = "") { - switch (node) + return node switch { - case ComplexValueArrayAst ast: - return AstMofGenerator.ConvertComplexValueArrayAst(ast, quirks, indent); - case ComplexValueAst ast: - return AstMofGenerator.ConvertComplexValueAst(ast, quirks, indent); - default: - throw new NotImplementedException(); - } + ComplexValueArrayAst ast => AstMofGenerator.ConvertComplexValueArrayAst(ast, quirks, indent), + ComplexValueAst ast => AstMofGenerator.ConvertComplexValueAst(ast, quirks, indent), + _ => throw new NotImplementedException(), + }; } public static string ConvertComplexValueArrayAst(ComplexValueArrayAst node, MofQuirks quirks = MofQuirks.None, string indent = "") @@ -665,18 +643,14 @@ public static string ConvertPropertyValueListAst(PropertyValueListAst node, MofQ public static string ConvertPropertyValueAst(PropertyValueAst node, MofQuirks quirks = MofQuirks.None, string indent = "") { - switch (node) + return node switch { - case PrimitiveTypeValueAst ast: - return AstMofGenerator.ConvertPrimitiveTypeValueAst(ast, quirks, indent); - case ComplexTypeValueAst ast: - return AstMofGenerator.ConvertComplexTypeValueAst(ast, quirks, indent); - //case ReferenceTypeValueAst ast: - case EnumTypeValueAst ast: - return AstMofGenerator.ConvertEnumTypeValueAst(ast, quirks); - default: - throw new NotImplementedException(); - } + PrimitiveTypeValueAst ast => AstMofGenerator.ConvertPrimitiveTypeValueAst(ast, quirks, indent), + ComplexTypeValueAst ast => AstMofGenerator.ConvertComplexTypeValueAst(ast, quirks, indent), + //ReferenceTypeValueAst ast => + EnumTypeValueAst ast => AstMofGenerator.ConvertEnumTypeValueAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } #endregion @@ -685,34 +659,25 @@ public static string ConvertPropertyValueAst(PropertyValueAst node, MofQuirks qu public static string ConvertPrimitiveTypeValueAst(PrimitiveTypeValueAst node, MofQuirks quirks = MofQuirks.None, string indent = "") { - switch (node) + return node switch { - case LiteralValueAst ast: - return AstMofGenerator.ConvertLiteralValueAst(ast, quirks); - case LiteralValueArrayAst ast: - return AstMofGenerator.ConvertLiteralValueArrayAst(ast, quirks); - default: - throw new NotImplementedException(); - } + LiteralValueAst ast => AstMofGenerator.ConvertLiteralValueAst(ast, quirks), + LiteralValueArrayAst ast => AstMofGenerator.ConvertLiteralValueArrayAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } public static string ConvertLiteralValueAst(LiteralValueAst node, MofQuirks quirks = MofQuirks.None) { - switch (node) + return node switch { - case IntegerValueAst ast: - return AstMofGenerator.ConvertIntegerValueAst(ast, quirks); - case RealValueAst ast: - return AstMofGenerator.ConvertRealValueAst(ast, quirks); - case BooleanValueAst ast: - return AstMofGenerator.ConvertBooleanValueAst(ast, quirks); - case NullValueAst ast: - return AstMofGenerator.ConvertNullValueAst(ast, quirks); - case StringValueAst ast: - return AstMofGenerator.ConvertStringValueAst(ast, quirks); - default: - throw new NotImplementedException(); - } + IntegerValueAst ast => AstMofGenerator.ConvertIntegerValueAst(ast, quirks), + RealValueAst ast => AstMofGenerator.ConvertRealValueAst(ast, quirks), + BooleanValueAst ast => AstMofGenerator.ConvertBooleanValueAst(ast, quirks), + NullValueAst ast => AstMofGenerator.ConvertNullValueAst(ast, quirks), + StringValueAst ast => AstMofGenerator.ConvertStringValueAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } public static string ConvertLiteralValueArrayAst(LiteralValueArrayAst node, MofQuirks quirks = MofQuirks.None) @@ -882,15 +847,12 @@ public static string ConvertStructureValueDeclarationAst(StructureValueDeclarati public static string ConvertEnumTypeValueAst(EnumTypeValueAst node, MofQuirks quirks = MofQuirks.None) { - switch (node) + return node switch { - case EnumValueAst ast: - return AstMofGenerator.ConvertEnumValueAst(ast, quirks); - case EnumValueArrayAst ast: - return AstMofGenerator.ConvertEnumValueArrayAst(ast, quirks); - default: - throw new NotImplementedException(); - } + EnumValueAst ast => AstMofGenerator.ConvertEnumValueAst(ast, quirks), + EnumValueArrayAst ast => AstMofGenerator.ConvertEnumValueArrayAst(ast, quirks), + _ => throw new NotImplementedException(), + }; } public static string ConvertEnumValueAst(EnumValueAst node, MofQuirks quirks = MofQuirks.None) diff --git a/src/Kingsland.MofParser/Kingsland.MofParser.csproj b/src/Kingsland.MofParser/Kingsland.MofParser.csproj index cfb64ed5..d33baee2 100644 --- a/src/Kingsland.MofParser/Kingsland.MofParser.csproj +++ b/src/Kingsland.MofParser/Kingsland.MofParser.csproj @@ -8,6 +8,7 @@ https://github.com/KingslandConsulting/Kingsland.MofParser PackageReference + 8.0 diff --git a/src/Kingsland.MofParser/Model/Class.cs b/src/Kingsland.MofParser/Model/Class.cs new file mode 100644 index 00000000..d47a3cd8 --- /dev/null +++ b/src/Kingsland.MofParser/Model/Class.cs @@ -0,0 +1,63 @@ +namespace Kingsland.MofParser.Model +{ + + public sealed class Class + { + + #region BUilder + + public sealed class Builder + { + + public string ClassName + { + get; + set; + } + + public string SuperClass + { + get; + set; + } + + public Class Build() + { + return new Class + { + ClassName = this.ClassName, + SuperClass = this.SuperClass + }; + } + + } + + #endregion + + #region Constructors + + private Class() + { + } + + #endregion + + #region Properties + + public string ClassName + { + get; + private set; + } + + public string SuperClass + { + get; + private set; + } + + #endregion + + } + +} diff --git a/src/Kingsland.MofParser/Model/Instance.cs b/src/Kingsland.MofParser/Model/Instance.cs new file mode 100644 index 00000000..68b8a92d --- /dev/null +++ b/src/Kingsland.MofParser/Model/Instance.cs @@ -0,0 +1,125 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Text; +using Kingsland.MofParser.Parsing; + +namespace Kingsland.MofParser.Model +{ + + public sealed class Instance + { + + #region Builder + + public sealed class Builder + { + + public string TypeName + { + get; + set; + } + + public string Alias + { + get; + set; + } + + public List Properties + { + get; + set; + } + + public Instance Build() + { + return new Instance + { + TypeName = this.TypeName, + Alias = this.Alias, + Properties = new ReadOnlyCollection( + this.Properties ?? new List() + ) + }; + } + + } + + #endregion + + #region Constructors + + private Instance() + { + } + + #endregion + + #region Properties + + public string TypeName + { + get; + private set; + } + + public string Alias + { + get; + private set; + } + + public ReadOnlyCollection Properties + { + get; + private set; + } + + #endregion + + #region Methods + + //public T GetValue(string name) + //{ + // return (T)this.Properties.Single(p => p.Name == name).Value; + //} + + //public bool TryGetValue(string name, out T result) + //{ + // var property = this.Properties.SingleOrDefault(p => p.Name == name); + // if (property == null) + // { + // result = default; + // return false; + // } + // var value = property.Value; + // if (value is T typed) + // { + // result = typed; + // return true; + // } + // result = default; + // return false; + //} + + #endregion + + #region Object Interface + + public override string ToString() + { + var result = new StringBuilder(); + result.Append($"{Constants.INSTANCE} {Constants.OF} {this.TypeName}"); + if (!string.IsNullOrEmpty(this.Alias)) + { + result.Append($" {Constants.AS} ${this.Alias}"); + } + return result.ToString(); + } + + #endregion + + } + +} diff --git a/src/Kingsland.MofParser/Model/ModelConverter.cs b/src/Kingsland.MofParser/Model/ModelConverter.cs new file mode 100644 index 00000000..9978644e --- /dev/null +++ b/src/Kingsland.MofParser/Model/ModelConverter.cs @@ -0,0 +1,341 @@ +using Kingsland.MofParser.Ast; +using System; +using System.Collections.ObjectModel; +using System.Linq; + +namespace Kingsland.MofParser.Model +{ + + internal static class ModelConverter + { + + #region 7.2 MOF specification + + public static Module ConvertMofSpecificationAst(MofSpecificationAst node) + { + return new Module.Builder + { + Instances = node.Productions + .OfType() + .Select(ModelConverter.ConvertInstanceValueDeclarationAst) + .ToList() + }.Build(); + } + + #endregion + + #region 7.3 Compiler directives + + public static void ConvertCompilerDirectiveAst(CompilerDirectiveAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.4 Qualifiers + + public static void ConvertQualifierTypeDeclarationAst(QualifierTypeDeclarationAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.4.1 QualifierList + + public static void ConvertQualifierListAst(QualifierListAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertQualifierValueAst(QualifierValueAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertIQualifierInitializerAst(IQualifierInitializerAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertQualifierValueInitializerAst(QualifierValueInitializerAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertQualifierValueArrayInitializerAst(QualifierValueArrayInitializerAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.1 Structure declaration + + public static void ConvertStructureDeclarationAst(StructureDeclarationAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertStructureFeatureAst(IStructureFeatureAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.2 Class declaration + + public static void ConvertClassDeclarationAst(ClassDeclarationAst node) + { + // return new Class.Builder + // { + // ClassName = node.ClassName.Name, + // SuperClass = node.SuperClass.Name + // }.Build(); + throw new NotImplementedException(); + } + + public static void ConvertClassFeatureAst(IClassFeatureAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.3 Association declaration + + public static void ConvertAssociationDeclarationAst(AssociationDeclarationAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.4 Enumeration declaration + + public static void ConvertEnumerationDeclarationAst(EnumerationDeclarationAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertEnumElementAst(EnumElementAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertIEnumElementValueAst(IEnumElementValueAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.5 Property declaration + + public static void ConvertPropertyDeclarationAst(PropertyDeclarationAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.6 Method declaration + + public static void ConvertMethodDeclarationAst(MethodDeclarationAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.7 Parameter declaration + + public static void ConvertParameterDeclarationAst(ParameterDeclarationAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.5.9 Complex type value + + public static object ConvertComplexTypeValueAst(ComplexTypeValueAst node) + { + return node switch + { + ComplexValueArrayAst n => ModelConverter.ConvertComplexValueArrayAst(n), + ComplexValueAst n => ModelConverter.ConvertComplexValueAst(n), + _ => throw new NotImplementedException(), + }; + } + + public static ReadOnlyCollection ConvertComplexValueArrayAst(ComplexValueArrayAst node) + { + return node.Values + .Select(ModelConverter.ConvertComplexValueAst) + .ToList() + .AsReadOnly(); + } + + public static object ConvertComplexValueAst(ComplexValueAst node) + { + if (node.IsAlias) + { + return node.Alias.Name; + }; + return node switch + { + _ => throw new NotImplementedException() + }; + } + + public static ReadOnlyCollection ConvertPropertyValueListAst(PropertyValueListAst node) + { + return node.PropertyValues + .Select( + kvp => new Property.Builder + { + Name = kvp.Key, + Value = ModelConverter.ConvertPropertyValueAst(kvp.Value) + }.Build() + ).ToList() + .AsReadOnly(); + } + + public static object ConvertPropertyValueAst(PropertyValueAst node) + { + return node switch + { + PrimitiveTypeValueAst n => ModelConverter.ConvertPrimitiveTypeValueAst(n), + ComplexTypeValueAst n => ModelConverter.ConvertComplexTypeValueAst(n), + //ReferenceTypeValueAst n => ModelConverter.FromReferenceTypeValueAst(n), + //EnumTypeValueAst n => ModelConverter.FromEnumTypeValueAst(n), + _ => throw new NotImplementedException() + }; + } + + #endregion + + #region 7.6.1 Primitive type value + + public static object ConvertPrimitiveTypeValueAst(PrimitiveTypeValueAst node) + { + return node switch + { + LiteralValueAst n => ModelConverter.ConvertLiteralValueAst(n), + LiteralValueArrayAst n => ModelConverter.ConvertLiteralValueArrayAst(n), + _ => throw new NotImplementedException(), + }; + } + + public static object ConvertLiteralValueAst(LiteralValueAst node) + { + return node switch + { + IntegerValueAst n => ModelConverter.ConvertIntegerValueAst(n), + RealValueAst n => ModelConverter.ConvertRealValueAst(n), + //BooleanValueAst n => ModelConverter.ConvertBooleanValueAst(n), + //NullValueAst n => ModelConverter.ConvertNullValueAst(n), + StringValueAst n => ModelConverter.ConvertStringValueAst(n), + _ => throw new NotImplementedException(), + }; + } + + public static ReadOnlyCollection ConvertLiteralValueArrayAst(LiteralValueArrayAst node) + { + return node.Values + .Select(ModelConverter.ConvertLiteralValueAst) + .ToList() + .AsReadOnly(); + } + + #endregion + + #region 7.6.1.1 Integer value + + public static long ConvertIntegerValueAst(IntegerValueAst node) + { + return node.Value; + } + + #endregion + + #region 7.6.1.2 Real value + + public static double ConvertRealValueAst(RealValueAst node) + { + return node.Value; + } + + #endregion + + #region 7.6.1.3 String values + + public static string ConvertStringValueAst(StringValueAst node) + { + return node.Value; + } + + #endregion + + #region 7.6.1.5 Boolean value + + public static void ConvertBooleanValueAst(BooleanValueAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.6.1.6 Null value + + public static void ConvertNullValueAst(NullValueAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.6.2 Complex type value + + public static Instance ConvertInstanceValueDeclarationAst(InstanceValueDeclarationAst node) + { + return new Instance.Builder + { + TypeName = node.TypeName.Name, + Alias = node.Alias?.Name, + Properties = ModelConverter.ConvertPropertyValueListAst(node.PropertyValues).ToList() + }.Build(); + } + + public static void ConvertStructureValueDeclarationAst(StructureValueDeclarationAst node) + { + throw new NotImplementedException(); + } + + #endregion + + #region 7.6.3 Enum type value + + public static void ConvertEnumTypeValueAst(EnumTypeValueAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertEnumValueAst(EnumValueAst node) + { + throw new NotImplementedException(); + } + + public static void ConvertEnumValueArrayAst(EnumValueArrayAst node) + { + throw new NotImplementedException(); + } + + #endregion + + } + +} diff --git a/src/Kingsland.MofParser/Model/Module.cs b/src/Kingsland.MofParser/Model/Module.cs new file mode 100644 index 00000000..02812386 --- /dev/null +++ b/src/Kingsland.MofParser/Model/Module.cs @@ -0,0 +1,60 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; + +namespace Kingsland.MofParser.Model +{ + + public sealed class Module + { + + #region Builder + + public sealed class Builder + { + + public Builder() + { + this.Instances = new List(); + } + + public List Instances + { + get; + set; + } + + public Module Build() + { + return new Module + { + Instances = new ReadOnlyCollection( + this.Instances ?? new List() + ) + }; + } + + } + + #endregion + + #region Constructors + + private Module() + { + } + + #endregion + + #region Properties + + public ReadOnlyCollection Instances + { + get; + private set; + } + + #endregion + + } + +} diff --git a/src/Kingsland.MofParser/Model/Property.cs b/src/Kingsland.MofParser/Model/Property.cs new file mode 100644 index 00000000..3ecba7f2 --- /dev/null +++ b/src/Kingsland.MofParser/Model/Property.cs @@ -0,0 +1,90 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Parsing; +using System.Text; + +namespace Kingsland.MofParser.Model +{ + + public sealed class Property + { + + #region Builder + + public sealed class Builder + { + + public string Name + { + get; + set; + } + + public object Value + { + get; + set; + } + + public Property Build() + { + return new Property(this.Name, this.Value); + } + + } + + #endregion + + #region Constructors + + private Property() + { + } + + public Property(string name, object value) + { + this.Name = name; + this.Value = value; + } + + #endregion + + #region Properties + + public string Name + { + get; + private set; + } + + public object Value + { + get; + private set; + } + + #endregion + + #region Object Interface + + public override string ToString() + { + var result = new StringBuilder(); + result.Append($"{this.Name} = "); + result.Append( + this.Value switch + { + null => Constants.NULL, + true => Constants.TRUE, + false => Constants.FALSE, + string s => $"\"{AstMofGenerator.EscapeString(s)}\"", + _ => $"!!!{this.Value.GetType().FullName}!!!" + } + ); + return result.ToString(); + } + + #endregion + + } + +} diff --git a/src/Kingsland.MofParser/Parsing/Parser.cs b/src/Kingsland.MofParser/Parsing/Parser.cs index e5d2c0b5..51ad49b6 100644 --- a/src/Kingsland.MofParser/Parsing/Parser.cs +++ b/src/Kingsland.MofParser/Parsing/Parser.cs @@ -1,9 +1,13 @@ using System.Collections.Generic; +using System.IO; using System.Linq; using Kingsland.MofParser.Ast; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Model; using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Parsing; using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; namespace Kingsland.MofParser.Parsing { @@ -25,6 +29,25 @@ public static MofSpecificationAst Parse(List lexerTokens, ParserQui } + public static Module ParseFile(string filename) + { + return Parser.ParseText(File.ReadAllText(filename)); + } + + public static Module ParseText(string mofText) + { + // turn the text into a stream of characters for lexing + var reader = SourceReader.From(mofText); + // lex the characters into a sequence of tokens + var tokens = Lexer.Lex(reader); + // parse the tokens into an ast tree + var mofSpecificationAst = Parser.Parse(tokens); + // convert the ast into a Module + var module = ModelConverter.ConvertMofSpecificationAst(mofSpecificationAst); + // return the result + return module; + } + } } From 937ecd12d524f7661daaf2dea861c84ea56c8316 Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Thu, 10 Sep 2020 23:29:09 +0100 Subject: [PATCH 02/11] #65 split lexer unit tests into separate partial class files by token type --- .../CodeGen/MofGeneratorTests.cs | 13 +- .../Helpers/ModelAssert.cs | 47 - .../Helpers/TokenComparer.cs | 398 ---- .../Kingsland.MofParser.UnitTests.csproj | 23 +- .../Lexer/LexerTests.cs | 2066 ----------------- .../LexerHelper.cs => Lexing/LexerAssert.cs} | 121 +- .../Lexing/LexerTests.cs | 120 + .../Lexing/LexerTests_AliasIdentifier.cs | 199 ++ .../Lexing/LexerTests_BooleanLiteral.cs | 143 ++ .../Lexing/LexerTests_Comments.cs | 371 +++ .../Lexing/LexerTests_Identifier.cs | 64 + .../Lexing/LexerTests_IntegerLiteral.cs | 471 ++++ .../Lexing/LexerTests_NullLiteral.cs | 80 + .../Lexing/LexerTests_Pragma.cs | 80 + .../Lexing/LexerTests_RealLiteral.cs | 182 ++ .../Lexing/LexerTests_StringLiteral.cs | 83 + .../Lexing/LexerTests_Symbols.cs | 320 +++ .../Lexing/LexerTests_Whitespace.cs | 137 ++ .../{Lexer => Lexing}/TestCases/MyServer.json | 0 .../{Lexer => Lexing}/TestCases/MyServer.mof | Bin .../Model/ModelAssert.cs | 47 + .../Parsing/ParserTests.cs | 16 +- .../Tokens/TokenAssert.cs | 398 ++++ 23 files changed, 2789 insertions(+), 2590 deletions(-) delete mode 100644 src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs rename src/Kingsland.MofParser.UnitTests/{Lexer/LexerHelper.cs => Lexing/LexerAssert.cs} (57%) create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs rename src/Kingsland.MofParser.UnitTests/{Lexer => Lexing}/TestCases/MyServer.json (100%) rename src/Kingsland.MofParser.UnitTests/{Lexer => Lexing}/TestCases/MyServer.mof (100%) create mode 100644 src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs index 4437fee1..fa1582db 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs @@ -1,4 +1,5 @@ using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Parsing; using Kingsland.ParseFx.Parsing; using Kingsland.ParseFx.Text; @@ -89,7 +90,7 @@ public static void QualifierWithMofV2FlavorsAndQuirksDisabledShouldThrow() "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => @@ -172,7 +173,7 @@ public static void InvalidStructureFeatureShouldThrow() "{\r\n" + "\t100\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); Assert.AreEqual(sourceMof, tokensMof); var ex = Assert.Throws( @@ -305,7 +306,7 @@ public static void InvalidClassFeatureShouldThrow() "{\r\n" + "\t100\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => { @@ -473,7 +474,7 @@ public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksEna "{\r\n" + "\tJuly = \"July\"\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => @@ -1390,7 +1391,7 @@ public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldT "{\r\n" + "\tMonth = {MonthEnums.July};\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => @@ -1449,7 +1450,7 @@ public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldT private static void AssertRoundtrip(string sourceMof, ParserQuirks parserQuirks = ParserQuirks.None) { // check the lexer tokens roundtrips ok - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); Assert.AreEqual(sourceMof, tokensMof); // check the parser ast roundtrips ok diff --git a/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs b/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs deleted file mode 100644 index 7bd6720c..00000000 --- a/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs +++ /dev/null @@ -1,47 +0,0 @@ -using Kingsland.MofParser.Model; -using NUnit.Framework; -using System.Linq; - -namespace Kingsland.MofParser.UnitTests.Helpers -{ - - internal static class ModelAssert - { - - public static void AreEqual(Module obj1, Module obj2) - { - Assert.IsNotNull(obj1); - Assert.IsNotNull(obj2); - Assert.AreEqual(obj1.Instances.Count, obj2.Instances.Count); - foreach(var pair in obj1.Instances - .Zip(obj2.Instances, (i1, i2) => (i1, i2))) - { - ModelAssert.AreEqual(pair.i1, pair.i2); - } - } - - public static void AreEqual(Instance obj1, Instance obj2) - { - Assert.IsNotNull(obj1); - Assert.IsNotNull(obj2); - Assert.AreEqual(obj1.TypeName, obj2.TypeName); - Assert.AreEqual(obj1.Alias, obj2.Alias); - Assert.AreEqual(obj1.Properties.Count, obj2.Properties.Count); - foreach (var pair in obj1.Properties - .Zip(obj2.Properties, (p1, p2) => (p1, p2))) - { - ModelAssert.AreEqual(pair.p1, pair.p2); - } - } - - public static void AreEqual(Property obj1, Property obj2) - { - Assert.IsNotNull(obj1); - Assert.IsNotNull(obj2); - Assert.AreEqual(obj1.Name, obj2.Name); - Assert.AreEqual(obj1.Value, obj2.Value); - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs b/src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs deleted file mode 100644 index 9d6c90be..00000000 --- a/src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs +++ /dev/null @@ -1,398 +0,0 @@ -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Text; - -namespace Kingsland.MofParser.UnitTests.Helpers -{ - - internal static class TokenComparer - { - - #region Token Comparison Methods - - public static bool AreEqual(AliasIdentifierToken obj1, AliasIdentifierToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Name == obj2.Name); - } - } - - public static bool AreEqual(AttributeCloseToken obj1, AttributeCloseToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - public static bool AreEqual(AttributeOpenToken obj1, AttributeOpenToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(BlockCloseToken obj1, BlockCloseToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(BlockOpenToken obj1, BlockOpenToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(BooleanLiteralToken obj1, BooleanLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(ColonToken obj1, ColonToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(CommaToken obj1, CommaToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(CommentToken obj1, CommentToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(DotOperatorToken obj1, DotOperatorToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(EqualsOperatorToken obj1, EqualsOperatorToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(IdentifierToken obj1, IdentifierToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Name == obj2.Name); - } - } - - public static bool AreEqual(IntegerLiteralToken obj1, IntegerLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Kind == obj2.Kind) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(NullLiteralToken obj1, NullLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(ParenthesisCloseToken obj1, ParenthesisCloseToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(ParenthesisOpenToken obj1, ParenthesisOpenToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(PragmaToken obj1, PragmaToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(RealLiteralToken obj1, RealLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(StatementEndToken obj1, StatementEndToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(StringLiteralToken obj1, StringLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(WhitespaceToken obj1, WhitespaceToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - #endregion - - #region Helper Methods - - public static bool AreEqual(SourceExtent obj1, SourceExtent obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.StartPosition, obj2.StartPosition) && - TokenComparer.AreEqual(obj1.EndPosition, obj2.EndPosition) && - (obj1.Text == obj2.Text); - } - } - - public static bool AreEqual(SourcePosition obj1, SourcePosition obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return (obj1.Position == obj2.Position) && - (obj1.LineNumber == obj2.LineNumber) && - (obj1.ColumnNumber == obj2.ColumnNumber); - } - } - - #endregion - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj index a5d63b31..3e528016 100644 --- a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj +++ b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj @@ -36,18 +36,29 @@ - + + + + + + + + + + + + - - - + + + - - + + Always diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs b/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs deleted file mode 100644 index 9daffa3c..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs +++ /dev/null @@ -1,2066 +0,0 @@ -using Kingsland.MofParser.Tokens; -using Kingsland.MofParser.UnitTests.Helpers; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; -using System.IO; - -namespace Kingsland.MofParser.UnitTests.Lexer -{ - - [TestFixture] - public static class LexerTests - { - - [TestFixture] - public static class EmptyFileTests - { - - [Test] - public static void ShouldReadEmptyFile() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(string.Empty) - ); - var expectedTokens = new List(); - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class MiscTests - { - - [Test] - public static void MissingWhitespaceTest() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("12345myIdentifier") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "12345" - ), - IntegerKind.DecimalValue, 12345 - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(5, 1, 6), - new SourcePosition(16, 1, 17), - "myIdentifier" - ), - "myIdentifier" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - #region Symbols - - [TestFixture] - public static class ReadAttributeCloseTokenMethod - { - - [Test] - public static void ShouldReadAttributeCloseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("]") - ); - var expectedTokens = new List { - new AttributeCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "]" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadAttributeOpenTokenMethod - { - - [Test] - public static void ShouldReadAttributeOpenToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("[") - ); - var expectedTokens = new List { - new AttributeOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "[" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBlockCloseTokenMethod - { - - [Test] - public static void ShouldReadBlockCloseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("}") - ); - var expectedTokens = new List { - new BlockCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "}" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBlockOpenTokenMethod - { - - [Test] - public static void ShouldReaBlockOpenToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("{") - ); - var expectedTokens = new List { - new BlockOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "{" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadColonTokenMethod - { - - [Test] - public static void ShouldReadColonToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(":") - ); - var expectedTokens = new List { - new ColonToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ":" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadCommaTokenMethod - { - - [Test] - public static void ShouldReadCommaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(",") - ); - var expectedTokens = new List { - new CommaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "," - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadDotOperatorTokenMethod - { - - [Test] - public static void ShouldReadDotOperatorToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(".") - ); - var expectedTokens = new List { - new DotOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "." - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDotOperatorTokenWithTrailingNonDecimalDigit() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(".abc") - ); - var expectedTokens = new List { - new DotOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "." - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(1, 1, 2), - new SourcePosition(3, 1, 4), - "abc" - ), - "abc" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadEqualsOperatorTokenMethod - { - - [Test] - public static void ShouldReadEqualsOperatorToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("=") - ); - var expectedTokens = new List { - new EqualsOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "=" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadParenthesesCloseTokenMethod - { - - [Test] - public static void ShouldReadEqualsOperatorToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(")") - ); - var expectedTokens = new List { - new ParenthesisCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ")" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadParenthesesOpenTokenMethod - { - - [Test] - public static void ShouldReadParenthesesOpenToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("(") - ); - var expectedTokens = new List { - new ParenthesisOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "(" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadStatementEndTokenMethod - { - - [Test] - public static void ShouldReadStatementEndToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(";") - ); - var expectedTokens = new List { - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ";" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - #endregion - - [TestFixture] - public static class ReadWhitespaceTokenMethod - { - - [Test] - public static void ShouldReadSpaceWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(" ") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - " " - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadTabWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\t\t\t\t\t") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "\t\t\t\t\t" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadCrWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\r\r\r\r\r") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 5, 1), - "\r\r\r\r\r" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadLfWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\n\n\n\n\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 5, 1), - "\n\n\n\n\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadCrLfWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\r\n\r\n\r\n\r\n\r\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(9, 5, 2), - "\r\n\r\n\r\n\r\n\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(" \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(29, 14, 2), - " \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadCommentTokenMethod - { - - [Test] - public static void ShouldReadSingleLineEofCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("// single line comment") - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(21, 1, 22), - "// single line comment" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadSingleLineEolCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("// single line comment\r\n") - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(21, 1, 22), - "// single line comment" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(22, 1, 23), - new SourcePosition(23, 1, 24), - "\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineEofCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(120, 6, 2), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineUnclosedCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(118, 5, 27), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineInlineAsterisks() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*************\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*************/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(144, 6, 14), - "/*************\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*************/" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineMultiple() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*//*\r\n" + - "@TargetNode='MyServer2'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(120, 6, 2), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(121, 6, 3), - new SourcePosition(242, 11, 2), - "/*\r\n" + - "@TargetNode='MyServer2'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ) - - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadExample1CommentToken() - { - // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("Integer MyProperty; // This is an example of a single-line comment") - ); - var expectedTokens = new List { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "Integer" - ), - "Integer" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(7, 1, 8), - new SourcePosition(7, 1, 8), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(8, 1, 9), - new SourcePosition(17, 1, 18), - "MyProperty" - ), - "MyProperty" - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(18, 1, 19), - new SourcePosition(18, 1, 19), - ";" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(19, 1, 20), - new SourcePosition(19, 1, 20), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(20, 1, 21), - new SourcePosition(65, 1, 66), - "// This is an example of a single-line comment" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadExample2CommentToken() - { - // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/* example of a comment between property definition tokens and a multi-line comment */\r\n" + - "Integer /* 16-bit integer property */ MyProperty; /* and a multi-line\r\n" + - " comment */" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(85, 1, 86), - "/* example of a comment between property definition tokens and a multi-line comment */" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(86, 1, 87), - new SourcePosition(87, 1, 88), - "\r\n" - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(88, 2, 1), - new SourcePosition(94, 2, 7), - "Integer" - ), - "Integer" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(95, 2, 8), - new SourcePosition(95, 2, 8), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(96, 2, 9), - new SourcePosition(124, 2, 37), - "/* 16-bit integer property */" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(125, 2, 38), - new SourcePosition(125, 2, 38), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(126, 2, 39), - new SourcePosition(135, 2, 48), - "MyProperty" - ), - "MyProperty" - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(136, 2, 49), - new SourcePosition(136, 2, 49), - ";" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(137, 2, 50), - new SourcePosition(137, 2, 50), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(138, 2, 51), - new SourcePosition(192, 3, 34), - "/* and a multi-line\r\n" + - " comment */" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBooleanLiteralTokenMethod - { - - [Test] - public static void ShouldReadLowerCaseFalseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("false") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "false" - ), - false - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseFalseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("False") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "False" - ), - false - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseFalseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("FALSE") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "FALSE" - ), - false - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadLowerCaseTrueToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("true") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "true" - ), - true - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseTrueToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("True") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "True" - ), - true - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseTrueToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("TRUE") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "TRUE" - ), - true - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadNullLiteralTokenMethod - { - - [Test] - public static void ShouldReadLowerCaseNullToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("null") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "null" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseNullToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("Null") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "Null" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseNullToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("NULL") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "NULL" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadPragmaTokenMethod - { - - [Test] - public static void ShouldReadLowerCasePragmaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("#pragma") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#pragma" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCasePragmaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("#Pragma") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#Pragma" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCasePragmaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("#PRAGMA") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#PRAGMA" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadIdentifierTokenMethod - { - - [Test] - public static void ShouldReadIdentifierToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From - ( - "myIdentifier\r\n" + - "myIdentifier2" - ) - ); - var expectedTokens = new List { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(11, 1, 12), - "myIdentifier" - ), - "myIdentifier" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(12, 1, 13), - new SourcePosition(13, 1, 14), - "\r\n" - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(14, 2, 1), - new SourcePosition(26, 2, 13), - "myIdentifier2" - ), - "myIdentifier2" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadAliasIdentifierTokenMethod - { - - [Test] - public static void ShouldReadAliasIdentifierToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From - ( - "$myAliasIdentifier\r\n" + - "$myAliasIdentifier2" - ) - ); - var expectedTokens = new List { - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(17, 1, 18), - "$myAliasIdentifier" - ), - "myAliasIdentifier" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(18, 1, 19), - new SourcePosition(19, 1, 20), - "\r\n" - ) - ), - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(20, 2, 1), - new SourcePosition(38, 2, 19), - "$myAliasIdentifier2" - ), - "myAliasIdentifier2" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadInstanceWithAliasIdentifier() - { - // test case for https://github.com/mikeclayton/MofParser/issues/4 - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From - ( - "instance of cTentacleAgent as $cTentacleAgent1ref\r\n" + - "{\r\n" + - "};" - ) - ); - var expectedTokens = new List - { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(8, 1, 9), - new SourcePosition(8, 1, 9), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(11, 1, 12), - new SourcePosition(11, 1, 12), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(12, 1, 13), - new SourcePosition(25, 1, 26), - "cTentacleAgent" - ), - "cTentacleAgent" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(26, 1, 27), - new SourcePosition(26, 1, 27), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(27, 1, 28), - new SourcePosition(28, 1, 29), - "as" - ), - "as" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(29, 1, 30), - new SourcePosition(29, 1, 30), - " " - ) - ), - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(30, 1, 31), - new SourcePosition(48, 1, 49), - "$cTentacleAgent1ref" - ), - "cTentacleAgent1ref" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(49, 1, 50), - new SourcePosition(50, 1, 51), - "\r\n" - ) - ), - new BlockOpenToken( - new SourceExtent - ( - new SourcePosition(51, 2, 1), - new SourcePosition(51, 2, 1), - "{" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(52, 2, 2), - new SourcePosition(53, 2, 3), - "\r\n" - ) - ), - new BlockCloseToken( - new SourceExtent - ( - new SourcePosition(54, 3, 1), - new SourcePosition(54, 3, 1), - "}" - ) - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(55, 3, 2), - new SourcePosition(55, 3, 2), - ";" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadIntegerLiteralTokenMethod - { - - // binaryValue - - [Test] - public static void ShouldReadBinaryValue0b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "0b" - ), - IntegerKind.BinaryValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue1b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("1b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "1b" - ), - IntegerKind.BinaryValue, 1 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue00000b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("00000b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "00000b" - ), - IntegerKind.BinaryValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue10000b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("10000b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "10000b" - ), - IntegerKind.BinaryValue, 16 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue11111b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("11111b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "11111b" - ), - IntegerKind.BinaryValue, 31 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - // octalValue - - [Test] - public static void ShouldReadOctalValue00() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("00") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "00" - ), - IntegerKind.OctalValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("01") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "01" - ), - IntegerKind.OctalValue, 1 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue00000() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("00000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "00000" - ), - IntegerKind.OctalValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01000() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("01000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "01000" - ), - IntegerKind.OctalValue, 512 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01111() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("01111") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "01111" - ), - IntegerKind.OctalValue, 585 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue04444() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("04444") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "04444" - ), - IntegerKind.OctalValue, 2340 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue07777() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("07777") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "07777" - ), - IntegerKind.OctalValue, 4095 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - // hexValue - - [Test] - public static void ShouldReadHexValue0x0() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0x0") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - "0x0" - ), - IntegerKind.HexValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0x0000() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0x0000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0x0000" - ), - IntegerKind.HexValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0x8888() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0x8888") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0x8888" - ), - IntegerKind.HexValue, 34952 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0xabcd() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0xabcd") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0xabcd" - ), - IntegerKind.HexValue, 43981 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0xABCD() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0xABCD") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0xABCD" - ), - IntegerKind.HexValue, 43981 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - // decimalValue - - [Test] - public static void ShouldReadDecimalValue0() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "0" - ), - IntegerKind.DecimalValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValue12345() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "12345" - ), - IntegerKind.DecimalValue, 12345 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValuePlus12345() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("+12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "+12345" - ), - IntegerKind.DecimalValue, 12345 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValueMinus12345() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("-12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "-12345" - ), - IntegerKind.DecimalValue, -12345 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValue1234567890() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("1234567890") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(9, 1, 10), - "1234567890" - ), - IntegerKind.DecimalValue, 1234567890 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadRealLiteralTokenMethod - { - - [Test] - public static void ShouldReadRealValue0_0() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0.0") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - "0.0" - ), - 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue123_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "123.45" - ), - 123.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValuePlus123_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("+123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "+123.45" - ), - 123.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValueMinus123_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("-123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "-123.45" - ), - -123.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue1234567890_00() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("1234567890.00") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(12, 1, 13), - "1234567890.00" - ), - 1234567890.00 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(".45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - ".45" - ), - 0.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValuePlus_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("+.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "+.45" - ), - 0.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValueMinus_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("-.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "-.45" - ), - -0.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadStringLiteralTokenMethod - { - - [Test] - public static void ShouldReadEmptyString() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\"\"") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "\"\"" - ), - string.Empty - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBasicString() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\"my string literal\"") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(18, 1, 19), - "\"my string literal\"" - ), - "my string literal" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadEscapedString() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(@"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(72, 1, 73), - @"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""" - ), - "my \\ string \" literal \' with \b lots \t and \n lots \f of \r escapes" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class LexMethodTestCases - { - //[Test, TestCaseSource(typeof(LexMethodTestCases), "GetTestCases")] - public static void LexMethodTestsFromDisk(string mofFilename) - { - LexerTests.LexMethodTest(mofFilename); - } - public static IEnumerable GetTestCases - { - get - { - return TestUtils.GetMofTestCase("cim_schema_2.51.0Final-MOFs"); - } - } - } - - [TestFixture] - public static class LexCimSpec - { - //[Test, TestCaseSource(typeof(LexCimSpec), "GetTestCases")] - public static void LexMethodTestsFromDisk(string mofFilename) - { - LexerTests.LexMethodTest(mofFilename); - } - public static IEnumerable GetTestCases - { - get - { - return TestUtils.GetMofTestCase("Lexer\\TestCases"); - } - } - } - - private static void LexMethodTest(string mofFilename) - { - var mofText = File.ReadAllText(mofFilename); - var reader = SourceReader.From(mofText); - var actualTokens = Lexing.Lexer.Lex(reader); - var actualText = TestUtils.ConvertToJson(actualTokens); - var expectedFilename = Path.Combine(Path.GetDirectoryName(mofFilename), - Path.GetFileNameWithoutExtension(mofFilename) + ".json"); - if (!File.Exists(expectedFilename)) - { - File.WriteAllText(expectedFilename, actualText); - } - var expectedText = File.ReadAllText(expectedFilename); - Assert.AreEqual(expectedText, actualText); - } - - } - -} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs similarity index 57% rename from src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs index 1c85f1d9..1424b745 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs @@ -1,21 +1,22 @@ using Kingsland.MofParser.Tokens; -using Kingsland.MofParser.UnitTests.Helpers; +using Kingsland.MofParser.UnitTests.Tokens; using Kingsland.ParseFx.Syntax; using NUnit.Framework; using System; using System.Collections.Generic; -namespace Kingsland.MofParser.UnitTests.Lexer +namespace Kingsland.MofParser.UnitTests.Lexing { - public sealed class LexerHelper + + public sealed class LexerAssert { - public static void AssertAreEqual(SyntaxToken expectedToken, SyntaxToken actualToken) + public static void AreEqual(SyntaxToken expectedToken, SyntaxToken actualToken) { - LexerHelper.AssertAreEqualInternal(expectedToken, actualToken); + LexerAssert.AreEqualInternal(expectedToken, actualToken); } - public static void AssertAreEqual(List expectedTokens, List actualTokens) + public static void AreEqual(List expectedTokens, List actualTokens) { if ((expectedTokens == null) && (actualTokens == null)) { @@ -31,12 +32,12 @@ public static void AssertAreEqual(List expectedTokens, List(); + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class MiscTests + { + + [Test] + public static void MissingWhitespaceTest() + { + var actualTokens = Lexer.Lex( + SourceReader.From("12345myIdentifier") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "12345" + ), + IntegerKind.DecimalValue, 12345 + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(5, 1, 6), + new SourcePosition(16, 1, 17), + "myIdentifier" + ), + "myIdentifier" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class LexMethodTestCases + { + //[Test, TestCaseSource(typeof(LexMethodTestCases), "GetTestCases")] + public static void LexMethodTestsFromDisk(string mofFilename) + { + LexerTests.LexMethodTest(mofFilename); + } + public static IEnumerable GetTestCases + { + get + { + return TestUtils.GetMofTestCase("cim_schema_2.51.0Final-MOFs"); + } + } + } + + [TestFixture] + public static class LexCimSpec + { + //[Test, TestCaseSource(typeof(LexCimSpec), "GetTestCases")] + public static void LexMethodTestsFromDisk(string mofFilename) + { + LexerTests.LexMethodTest(mofFilename); + } + public static IEnumerable GetTestCases + { + get + { + return TestUtils.GetMofTestCase("Lexer\\TestCases"); + } + } + } + + private static void LexMethodTest(string mofFilename) + { + var mofText = File.ReadAllText(mofFilename); + var reader = SourceReader.From(mofText); + var actualTokens = Lexer.Lex(reader); + var actualText = TestUtils.ConvertToJson(actualTokens); + var expectedFilename = Path.Combine(Path.GetDirectoryName(mofFilename), + Path.GetFileNameWithoutExtension(mofFilename) + ".json"); + if (!File.Exists(expectedFilename)) + { + File.WriteAllText(expectedFilename, actualText); + } + var expectedText = File.ReadAllText(expectedFilename); + Assert.AreEqual(expectedText, actualText); + } + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs new file mode 100644 index 00000000..7edd8060 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs @@ -0,0 +1,199 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadAliasIdentifierTokenMethod + { + + [Test] + public static void ShouldReadAliasIdentifierToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From + ( + "$myAliasIdentifier\r\n" + + "$myAliasIdentifier2" + ) + ); + var expectedTokens = new List { + new AliasIdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(17, 1, 18), + "$myAliasIdentifier" + ), + "myAliasIdentifier" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(18, 1, 19), + new SourcePosition(19, 1, 20), + "\r\n" + ) + ), + new AliasIdentifierToken( + new SourceExtent + ( + new SourcePosition(20, 2, 1), + new SourcePosition(38, 2, 19), + "$myAliasIdentifier2" + ), + "myAliasIdentifier2" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadInstanceWithAliasIdentifier() + { + // test case for https://github.com/mikeclayton/MofParser/issues/4 + var actualTokens = Lexer.Lex( + SourceReader.From + ( + "instance of cTentacleAgent as $cTentacleAgent1ref\r\n" + + "{\r\n" + + "};" + ) + ); + var expectedTokens = new List + { + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(8, 1, 9), + new SourcePosition(8, 1, 9), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(11, 1, 12), + new SourcePosition(11, 1, 12), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(12, 1, 13), + new SourcePosition(25, 1, 26), + "cTentacleAgent" + ), + "cTentacleAgent" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(26, 1, 27), + new SourcePosition(26, 1, 27), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(27, 1, 28), + new SourcePosition(28, 1, 29), + "as" + ), + "as" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(29, 1, 30), + new SourcePosition(29, 1, 30), + " " + ) + ), + new AliasIdentifierToken( + new SourceExtent + ( + new SourcePosition(30, 1, 31), + new SourcePosition(48, 1, 49), + "$cTentacleAgent1ref" + ), + "cTentacleAgent1ref" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(49, 1, 50), + new SourcePosition(50, 1, 51), + "\r\n" + ) + ), + new BlockOpenToken( + new SourceExtent + ( + new SourcePosition(51, 2, 1), + new SourcePosition(51, 2, 1), + "{" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(52, 2, 2), + new SourcePosition(53, 2, 3), + "\r\n" + ) + ), + new BlockCloseToken( + new SourceExtent + ( + new SourcePosition(54, 3, 1), + new SourcePosition(54, 3, 1), + "}" + ) + ), + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(55, 3, 2), + new SourcePosition(55, 3, 2), + ";" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs new file mode 100644 index 00000000..6c911516 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs @@ -0,0 +1,143 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadBooleanLiteralTokenMethod + { + + [Test] + public static void ShouldReadLowerCaseFalseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("false") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "false" + ), + false + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCaseFalseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("False") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "False" + ), + false + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCaseFalseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("FALSE") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "FALSE" + ), + false + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadLowerCaseTrueToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("true") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "true" + ), + true + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCaseTrueToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("True") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "True" + ), + true + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCaseTrueToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("TRUE") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "TRUE" + ), + true + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs new file mode 100644 index 00000000..1fcc920d --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs @@ -0,0 +1,371 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadCommentTokenMethod + { + + [Test] + public static void ShouldReadSingleLineEofCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("// single line comment") + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(21, 1, 22), + "// single line comment" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadSingleLineEolCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("// single line comment\r\n") + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(21, 1, 22), + "// single line comment" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(22, 1, 23), + new SourcePosition(23, 1, 24), + "\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineEofCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(120, 6, 2), + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineUnclosedCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(118, 5, 27), + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineInlineAsterisks() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*************\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*************/" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(144, 6, 14), + "/*************\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*************/" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineMultiple() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*//*\r\n" + + "@TargetNode='MyServer2'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(120, 6, 2), + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(121, 6, 3), + new SourcePosition(242, 11, 2), + "/*\r\n" + + "@TargetNode='MyServer2'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ) + + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadExample1CommentToken() + { + // see DSP0221_3.0.1.pdf "5.4 Comments" + var actualTokens = Lexer.Lex( + SourceReader.From("Integer MyProperty; // This is an example of a single-line comment") + ); + var expectedTokens = new List { + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "Integer" + ), + "Integer" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(7, 1, 8), + new SourcePosition(7, 1, 8), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(8, 1, 9), + new SourcePosition(17, 1, 18), + "MyProperty" + ), + "MyProperty" + ), + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(18, 1, 19), + new SourcePosition(18, 1, 19), + ";" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(19, 1, 20), + new SourcePosition(19, 1, 20), + " " + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(20, 1, 21), + new SourcePosition(65, 1, 66), + "// This is an example of a single-line comment" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadExample2CommentToken() + { + // see DSP0221_3.0.1.pdf "5.4 Comments" + var actualTokens = Lexer.Lex( + SourceReader.From( + "/* example of a comment between property definition tokens and a multi-line comment */\r\n" + + "Integer /* 16-bit integer property */ MyProperty; /* and a multi-line\r\n" + + " comment */" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(85, 1, 86), + "/* example of a comment between property definition tokens and a multi-line comment */" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(86, 1, 87), + new SourcePosition(87, 1, 88), + "\r\n" + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(88, 2, 1), + new SourcePosition(94, 2, 7), + "Integer" + ), + "Integer" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(95, 2, 8), + new SourcePosition(95, 2, 8), + " " + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(96, 2, 9), + new SourcePosition(124, 2, 37), + "/* 16-bit integer property */" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(125, 2, 38), + new SourcePosition(125, 2, 38), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(126, 2, 39), + new SourcePosition(135, 2, 48), + "MyProperty" + ), + "MyProperty" + ), + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(136, 2, 49), + new SourcePosition(136, 2, 49), + ";" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(137, 2, 50), + new SourcePosition(137, 2, 50), + " " + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(138, 2, 51), + new SourcePosition(192, 3, 34), + "/* and a multi-line\r\n" + + " comment */" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs new file mode 100644 index 00000000..0b69c3f5 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs @@ -0,0 +1,64 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadIdentifierTokenMethod + { + + [Test] + public static void ShouldReadIdentifierToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From + ( + "myIdentifier\r\n" + + "myIdentifier2" + ) + ); + var expectedTokens = new List { + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(11, 1, 12), + "myIdentifier" + ), + "myIdentifier" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(12, 1, 13), + new SourcePosition(13, 1, 14), + "\r\n" + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(14, 2, 1), + new SourcePosition(26, 2, 13), + "myIdentifier2" + ), + "myIdentifier2" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs new file mode 100644 index 00000000..e694b0e8 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs @@ -0,0 +1,471 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadIntegerLiteralTokenMethod + { + + // binaryValue + + [Test] + public static void ShouldReadBinaryValue0b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "0b" + ), + IntegerKind.BinaryValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue1b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("1b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "1b" + ), + IntegerKind.BinaryValue, 1 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue00000b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("00000b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "00000b" + ), + IntegerKind.BinaryValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue10000b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("10000b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "10000b" + ), + IntegerKind.BinaryValue, 16 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue11111b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("11111b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "11111b" + ), + IntegerKind.BinaryValue, 31 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + // octalValue + + [Test] + public static void ShouldReadOctalValue00() + { + var actualTokens = Lexer.Lex( + SourceReader.From("00") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "00" + ), + IntegerKind.OctalValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue01() + { + var actualTokens = Lexer.Lex( + SourceReader.From("01") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "01" + ), + IntegerKind.OctalValue, 1 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue00000() + { + var actualTokens = Lexer.Lex( + SourceReader.From("00000") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "00000" + ), + IntegerKind.OctalValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue01000() + { + var actualTokens = Lexer.Lex( + SourceReader.From("01000") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "01000" + ), + IntegerKind.OctalValue, 512 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue01111() + { + var actualTokens = Lexer.Lex( + SourceReader.From("01111") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "01111" + ), + IntegerKind.OctalValue, 585 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue04444() + { + var actualTokens = Lexer.Lex( + SourceReader.From("04444") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "04444" + ), + IntegerKind.OctalValue, 2340 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue07777() + { + var actualTokens = Lexer.Lex( + SourceReader.From("07777") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "07777" + ), + IntegerKind.OctalValue, 4095 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + // hexValue + + [Test] + public static void ShouldReadHexValue0x0() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0x0") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(2, 1, 3), + "0x0" + ), + IntegerKind.HexValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0x0000() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0x0000") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0x0000" + ), + IntegerKind.HexValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0x8888() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0x8888") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0x8888" + ), + IntegerKind.HexValue, 34952 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0xabcd() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0xabcd") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0xabcd" + ), + IntegerKind.HexValue, 43981 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0xABCD() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0xABCD") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0xABCD" + ), + IntegerKind.HexValue, 43981 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + // decimalValue + + [Test] + public static void ShouldReadDecimalValue0() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "0" + ), + IntegerKind.DecimalValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValue12345() + { + var actualTokens = Lexer.Lex( + SourceReader.From("12345") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "12345" + ), + IntegerKind.DecimalValue, 12345 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValuePlus12345() + { + var actualTokens = Lexer.Lex( + SourceReader.From("+12345") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "+12345" + ), + IntegerKind.DecimalValue, 12345 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValueMinus12345() + { + var actualTokens = Lexer.Lex( + SourceReader.From("-12345") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "-12345" + ), + IntegerKind.DecimalValue, -12345 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValue1234567890() + { + var actualTokens = Lexer.Lex( + SourceReader.From("1234567890") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(9, 1, 10), + "1234567890" + ), + IntegerKind.DecimalValue, 1234567890 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs new file mode 100644 index 00000000..139f61ba --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs @@ -0,0 +1,80 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadNullLiteralTokenMethod + { + + [Test] + public static void ShouldReadLowerCaseNullToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("null") + ); + var expectedTokens = new List { + new NullLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "null" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCaseNullToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("Null") + ); + var expectedTokens = new List { + new NullLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "Null" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCaseNullToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("NULL") + ); + var expectedTokens = new List { + new NullLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "NULL" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs new file mode 100644 index 00000000..35dc87c4 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs @@ -0,0 +1,80 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadPragmaTokenMethod + { + + [Test] + public static void ShouldReadLowerCasePragmaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("#pragma") + ); + var expectedTokens = new List { + new PragmaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "#pragma" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCasePragmaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("#Pragma") + ); + var expectedTokens = new List { + new PragmaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "#Pragma" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCasePragmaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("#PRAGMA") + ); + var expectedTokens = new List { + new PragmaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "#PRAGMA" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs new file mode 100644 index 00000000..6ffd7f45 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs @@ -0,0 +1,182 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadRealLiteralTokenMethod + { + + [Test] + public static void ShouldReadRealValue0_0() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0.0") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(2, 1, 3), + "0.0" + ), + 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValue123_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("123.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "123.45" + ), + 123.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValuePlus123_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("+123.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "+123.45" + ), + 123.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValueMinus123_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("-123.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "-123.45" + ), + -123.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValue1234567890_00() + { + var actualTokens = Lexer.Lex( + SourceReader.From("1234567890.00") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(12, 1, 13), + "1234567890.00" + ), + 1234567890.00 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValue_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From(".45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(2, 1, 3), + ".45" + ), + 0.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValuePlus_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("+.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "+.45" + ), + 0.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValueMinus_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("-.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "-.45" + ), + -0.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs new file mode 100644 index 00000000..205830cf --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs @@ -0,0 +1,83 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadStringLiteralTokenMethod + { + + [Test] + public static void ShouldReadEmptyString() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\"\"") + ); + var expectedTokens = new List { + new StringLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "\"\"" + ), + string.Empty + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBasicString() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\"my string literal\"") + ); + var expectedTokens = new List { + new StringLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(18, 1, 19), + "\"my string literal\"" + ), + "my string literal" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadEscapedString() + { + var actualTokens = Lexer.Lex( + SourceReader.From(@"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""") + ); + var expectedTokens = new List { + new StringLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(72, 1, 73), + @"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""" + ), + "my \\ string \" literal \' with \b lots \t and \n lots \f of \r escapes" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs new file mode 100644 index 00000000..fca5ca60 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs @@ -0,0 +1,320 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadAttributeCloseTokenMethod + { + + [Test] + public static void ShouldReadAttributeCloseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("]") + ); + var expectedTokens = new List { + new AttributeCloseToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "]" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadAttributeOpenTokenMethod + { + + [Test] + public static void ShouldReadAttributeOpenToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("[") + ); + var expectedTokens = new List { + new AttributeOpenToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "[" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadBlockCloseTokenMethod + { + + [Test] + public static void ShouldReadBlockCloseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("}") + ); + var expectedTokens = new List { + new BlockCloseToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "}" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadBlockOpenTokenMethod + { + + [Test] + public static void ShouldReaBlockOpenToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("{") + ); + var expectedTokens = new List { + new BlockOpenToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "{" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadColonTokenMethod + { + + [Test] + public static void ShouldReadColonToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(":") + ); + var expectedTokens = new List { + new ColonToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + ":" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadCommaTokenMethod + { + + [Test] + public static void ShouldReadCommaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(",") + ); + var expectedTokens = new List { + new CommaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "," + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadDotOperatorTokenMethod + { + + [Test] + public static void ShouldReadDotOperatorToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(".") + ); + var expectedTokens = new List { + new DotOperatorToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "." + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDotOperatorTokenWithTrailingNonDecimalDigit() + { + var actualTokens = Lexer.Lex( + SourceReader.From(".abc") + ); + var expectedTokens = new List { + new DotOperatorToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "." + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(1, 1, 2), + new SourcePosition(3, 1, 4), + "abc" + ), + "abc" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadEqualsOperatorTokenMethod + { + + [Test] + public static void ShouldReadEqualsOperatorToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("=") + ); + var expectedTokens = new List { + new EqualsOperatorToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "=" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadParenthesesCloseTokenMethod + { + + [Test] + public static void ShouldReadEqualsOperatorToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(")") + ); + var expectedTokens = new List { + new ParenthesisCloseToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + ")" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadParenthesesOpenTokenMethod + { + + [Test] + public static void ShouldReadParenthesesOpenToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("(") + ); + var expectedTokens = new List { + new ParenthesisOpenToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "(" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadStatementEndTokenMethod + { + + [Test] + public static void ShouldReadStatementEndToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(";") + ); + var expectedTokens = new List { + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + ";" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs new file mode 100644 index 00000000..0493c6d2 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs @@ -0,0 +1,137 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadWhitespaceTokenMethod + { + + [Test] + public static void ShouldReadSpaceWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(" ") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + " " + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadTabWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\t\t\t\t\t") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "\t\t\t\t\t" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadCrWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\r\r\r\r\r") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 5, 1), + "\r\r\r\r\r" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadLfWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\n\n\n\n\n") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 5, 1), + "\n\n\n\n\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadCrLfWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\r\n\r\n\r\n\r\n\r\n") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(9, 5, 2), + "\r\n\r\n\r\n\r\n\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(" \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(29, 14, 2), + " \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.json b/src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.json similarity index 100% rename from src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.json rename to src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.json diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.mof b/src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.mof similarity index 100% rename from src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.mof rename to src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.mof diff --git a/src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs b/src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs new file mode 100644 index 00000000..ba1be88a --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs @@ -0,0 +1,47 @@ +using Kingsland.MofParser.Model; +using NUnit.Framework; +using System.Linq; + +namespace Kingsland.MofParser.UnitTests.Model +{ + + internal static class ModelAssert + { + + public static void AreEqual(Module expected, Module actual) + { + Assert.IsNotNull(expected); + Assert.IsNotNull(actual); + Assert.AreEqual(expected.Instances.Count, actual.Instances.Count); + foreach (var pair in expected.Instances + .Zip(actual.Instances, (exp, act) => (exp, act))) + { + ModelAssert.AreEqual(pair.exp, pair.act); + } + } + + public static void AreEqual(Instance expected, Instance actual) + { + Assert.IsNotNull(expected); + Assert.IsNotNull(actual); + Assert.AreEqual(expected.TypeName, actual.TypeName); + Assert.AreEqual(expected.Alias, actual.Alias); + Assert.AreEqual(expected.Properties.Count, actual.Properties.Count); + foreach (var pair in expected.Properties + .Zip(actual.Properties, (exp, act) => (exp, act))) + { + ModelAssert.AreEqual(pair.exp, pair.act); + } + } + + public static void AreEqual(Property expected, Property actual) + { + Assert.IsNotNull(expected); + Assert.IsNotNull(actual); + Assert.AreEqual(expected.Name, actual.Name); + Assert.AreEqual(expected.Value, actual.Value); + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs index 39f25bde..602c1e6f 100644 --- a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs +++ b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs @@ -1,8 +1,10 @@ using Kingsland.MofParser.Ast; +using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Model; using Kingsland.MofParser.Parsing; using Kingsland.MofParser.Tokens; using Kingsland.MofParser.UnitTests.Helpers; +using Kingsland.MofParser.UnitTests.Model; using Kingsland.ParseFx.Text; using NUnit.Framework; using System; @@ -24,7 +26,7 @@ public static class PropertyValueTests [Test] public static void ParsePropetyValueWithLiteralString() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias0000006E\r\n" + "{\r\n" + @@ -131,7 +133,7 @@ public static void ParsePropetyValueWithLiteralString() [Test] public static void ParsePropetyValueWithAliasIdentifier() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -235,7 +237,7 @@ public static void ParsePropetyValueWithAliasIdentifier() [Test] public static void ParsePropetyValueWithEmptyArray() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -330,7 +332,7 @@ public static void ParsePropetyValueWithEmptyArray() [Test] public static void ParsePropetyValueArrayWithAliasIdentifier() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -442,7 +444,7 @@ public static void ParsePropetyValueArrayWithAliasIdentifier() [Test] public static void ParsePropetyValueArrayWithLiteralStrings() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -570,7 +572,7 @@ public static void ParsePropetyValueArrayWithLiteralStrings() [Test] public static void ParsePropetyValueArrayWithNumericLiteralValues() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -773,7 +775,7 @@ private static void ParseMethodTest(string mofFilename) { var mofText = File.ReadAllText(mofFilename); var reader = SourceReader.From(mofText); - var tokens = Lexing.Lexer.Lex(reader); + var tokens = Lexer.Lex(reader); var ast = Parser.Parse( tokens, ParserQuirks.AllowMofV2Qualifiers | diff --git a/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs b/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs new file mode 100644 index 00000000..e623fa40 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs @@ -0,0 +1,398 @@ +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Text; + +namespace Kingsland.MofParser.UnitTests.Tokens +{ + + internal static class TokenAssert + { + + #region Token Comparison Methods + + public static bool AreEqual(AliasIdentifierToken expected, AliasIdentifierToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Name == actual.Name); + } + } + + public static bool AreEqual(AttributeCloseToken expected, AttributeCloseToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + public static bool AreEqual(AttributeOpenToken expected, AttributeOpenToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(BlockCloseToken expected, BlockCloseToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(BlockOpenToken expected, BlockOpenToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(BooleanLiteralToken expected, BooleanLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(ColonToken expected, ColonToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(CommaToken expected, CommaToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(CommentToken expected, CommentToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(DotOperatorToken expected, DotOperatorToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(EqualsOperatorToken expected, EqualsOperatorToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(IdentifierToken expected, IdentifierToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Name == actual.Name); + } + } + + public static bool AreEqual(IntegerLiteralToken expected, IntegerLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Kind == actual.Kind) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(NullLiteralToken expected, NullLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(ParenthesisCloseToken expected, ParenthesisCloseToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(ParenthesisOpenToken expected, ParenthesisOpenToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(PragmaToken expected, PragmaToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(RealLiteralToken expected, RealLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(StatementEndToken expected, StatementEndToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(StringLiteralToken expected, StringLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(WhitespaceToken expected, WhitespaceToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + #endregion + + #region Helper Methods + + public static bool AreEqual(SourceExtent expected, SourceExtent actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.StartPosition, actual.StartPosition) && + TokenAssert.AreEqual(expected.EndPosition, actual.EndPosition) && + (expected.Text == actual.Text); + } + } + + public static bool AreEqual(SourcePosition expected, SourcePosition actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return (expected.Position == actual.Position) && + (expected.LineNumber == actual.LineNumber) && + (expected.ColumnNumber == actual.ColumnNumber); + } + } + + #endregion + + } + +} From 625ab21c437fdd943403dca5f4cd83a651925c5b Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Thu, 10 Sep 2020 23:29:09 +0100 Subject: [PATCH 03/11] #65 split lexer unit tests into separate partial class files by token type --- .../CodeGen/MofGeneratorTests.cs | 13 +- .../Helpers/ModelAssert.cs | 47 - .../Helpers/TokenComparer.cs | 398 ---- .../Kingsland.MofParser.UnitTests.csproj | 23 +- .../Lexer/LexerTests.cs | 2066 ----------------- .../LexerHelper.cs => Lexing/LexerAssert.cs} | 121 +- .../Lexing/LexerTests.cs | 120 + .../Lexing/LexerTests_AliasIdentifier.cs | 199 ++ .../Lexing/LexerTests_BooleanLiteral.cs | 143 ++ .../Lexing/LexerTests_Comments.cs | 371 +++ .../Lexing/LexerTests_Identifier.cs | 64 + .../Lexing/LexerTests_IntegerLiteral.cs | 471 ++++ .../Lexing/LexerTests_NullLiteral.cs | 80 + .../Lexing/LexerTests_Pragma.cs | 80 + .../Lexing/LexerTests_RealLiteral.cs | 182 ++ .../Lexing/LexerTests_StringLiteral.cs | 83 + .../Lexing/LexerTests_Symbols.cs | 320 +++ .../Lexing/LexerTests_Whitespace.cs | 137 ++ .../{Lexer => Lexing}/TestCases/MyServer.json | 0 .../{Lexer => Lexing}/TestCases/MyServer.mof | Bin .../Model/ModelAssert.cs | 47 + .../Parsing/ParserTests.cs | 16 +- .../Tokens/TokenAssert.cs | 398 ++++ 23 files changed, 2789 insertions(+), 2590 deletions(-) delete mode 100644 src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs rename src/Kingsland.MofParser.UnitTests/{Lexer/LexerHelper.cs => Lexing/LexerAssert.cs} (57%) create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs rename src/Kingsland.MofParser.UnitTests/{Lexer => Lexing}/TestCases/MyServer.json (100%) rename src/Kingsland.MofParser.UnitTests/{Lexer => Lexing}/TestCases/MyServer.mof (100%) create mode 100644 src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs index 4437fee1..fa1582db 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs @@ -1,4 +1,5 @@ using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Parsing; using Kingsland.ParseFx.Parsing; using Kingsland.ParseFx.Text; @@ -89,7 +90,7 @@ public static void QualifierWithMofV2FlavorsAndQuirksDisabledShouldThrow() "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => @@ -172,7 +173,7 @@ public static void InvalidStructureFeatureShouldThrow() "{\r\n" + "\t100\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); Assert.AreEqual(sourceMof, tokensMof); var ex = Assert.Throws( @@ -305,7 +306,7 @@ public static void InvalidClassFeatureShouldThrow() "{\r\n" + "\t100\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => { @@ -473,7 +474,7 @@ public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksEna "{\r\n" + "\tJuly = \"July\"\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => @@ -1390,7 +1391,7 @@ public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldT "{\r\n" + "\tMonth = {MonthEnums.July};\r\n" + "};"; - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => @@ -1449,7 +1450,7 @@ public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldT private static void AssertRoundtrip(string sourceMof, ParserQuirks parserQuirks = ParserQuirks.None) { // check the lexer tokens roundtrips ok - var tokens = Lexing.Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); Assert.AreEqual(sourceMof, tokensMof); // check the parser ast roundtrips ok diff --git a/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs b/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs deleted file mode 100644 index 7bd6720c..00000000 --- a/src/Kingsland.MofParser.UnitTests/Helpers/ModelAssert.cs +++ /dev/null @@ -1,47 +0,0 @@ -using Kingsland.MofParser.Model; -using NUnit.Framework; -using System.Linq; - -namespace Kingsland.MofParser.UnitTests.Helpers -{ - - internal static class ModelAssert - { - - public static void AreEqual(Module obj1, Module obj2) - { - Assert.IsNotNull(obj1); - Assert.IsNotNull(obj2); - Assert.AreEqual(obj1.Instances.Count, obj2.Instances.Count); - foreach(var pair in obj1.Instances - .Zip(obj2.Instances, (i1, i2) => (i1, i2))) - { - ModelAssert.AreEqual(pair.i1, pair.i2); - } - } - - public static void AreEqual(Instance obj1, Instance obj2) - { - Assert.IsNotNull(obj1); - Assert.IsNotNull(obj2); - Assert.AreEqual(obj1.TypeName, obj2.TypeName); - Assert.AreEqual(obj1.Alias, obj2.Alias); - Assert.AreEqual(obj1.Properties.Count, obj2.Properties.Count); - foreach (var pair in obj1.Properties - .Zip(obj2.Properties, (p1, p2) => (p1, p2))) - { - ModelAssert.AreEqual(pair.p1, pair.p2); - } - } - - public static void AreEqual(Property obj1, Property obj2) - { - Assert.IsNotNull(obj1); - Assert.IsNotNull(obj2); - Assert.AreEqual(obj1.Name, obj2.Name); - Assert.AreEqual(obj1.Value, obj2.Value); - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs b/src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs deleted file mode 100644 index 9d6c90be..00000000 --- a/src/Kingsland.MofParser.UnitTests/Helpers/TokenComparer.cs +++ /dev/null @@ -1,398 +0,0 @@ -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Text; - -namespace Kingsland.MofParser.UnitTests.Helpers -{ - - internal static class TokenComparer - { - - #region Token Comparison Methods - - public static bool AreEqual(AliasIdentifierToken obj1, AliasIdentifierToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Name == obj2.Name); - } - } - - public static bool AreEqual(AttributeCloseToken obj1, AttributeCloseToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - public static bool AreEqual(AttributeOpenToken obj1, AttributeOpenToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(BlockCloseToken obj1, BlockCloseToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(BlockOpenToken obj1, BlockOpenToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(BooleanLiteralToken obj1, BooleanLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(ColonToken obj1, ColonToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(CommaToken obj1, CommaToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(CommentToken obj1, CommentToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(DotOperatorToken obj1, DotOperatorToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(EqualsOperatorToken obj1, EqualsOperatorToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(IdentifierToken obj1, IdentifierToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Name == obj2.Name); - } - } - - public static bool AreEqual(IntegerLiteralToken obj1, IntegerLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Kind == obj2.Kind) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(NullLiteralToken obj1, NullLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(ParenthesisCloseToken obj1, ParenthesisCloseToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(ParenthesisOpenToken obj1, ParenthesisOpenToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(PragmaToken obj1, PragmaToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(RealLiteralToken obj1, RealLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(StatementEndToken obj1, StatementEndToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - public static bool AreEqual(StringLiteralToken obj1, StringLiteralToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent) && - (obj1.Value == obj2.Value); - } - } - - public static bool AreEqual(WhitespaceToken obj1, WhitespaceToken obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.Extent, obj2.Extent); - } - } - - #endregion - - #region Helper Methods - - public static bool AreEqual(SourceExtent obj1, SourceExtent obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return TokenComparer.AreEqual(obj1.StartPosition, obj2.StartPosition) && - TokenComparer.AreEqual(obj1.EndPosition, obj2.EndPosition) && - (obj1.Text == obj2.Text); - } - } - - public static bool AreEqual(SourcePosition obj1, SourcePosition obj2) - { - if ((obj1 == null) && (obj2 == null)) - { - return true; - } - else if ((obj1 == null) || (obj2 == null)) - { - return false; - } - else - { - return (obj1.Position == obj2.Position) && - (obj1.LineNumber == obj2.LineNumber) && - (obj1.ColumnNumber == obj2.ColumnNumber); - } - } - - #endregion - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj index a5d63b31..3e528016 100644 --- a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj +++ b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj @@ -36,18 +36,29 @@ - + + + + + + + + + + + + - - - + + + - - + + Always diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs b/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs deleted file mode 100644 index 9daffa3c..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexer/LexerTests.cs +++ /dev/null @@ -1,2066 +0,0 @@ -using Kingsland.MofParser.Tokens; -using Kingsland.MofParser.UnitTests.Helpers; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; -using System.IO; - -namespace Kingsland.MofParser.UnitTests.Lexer -{ - - [TestFixture] - public static class LexerTests - { - - [TestFixture] - public static class EmptyFileTests - { - - [Test] - public static void ShouldReadEmptyFile() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(string.Empty) - ); - var expectedTokens = new List(); - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class MiscTests - { - - [Test] - public static void MissingWhitespaceTest() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("12345myIdentifier") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "12345" - ), - IntegerKind.DecimalValue, 12345 - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(5, 1, 6), - new SourcePosition(16, 1, 17), - "myIdentifier" - ), - "myIdentifier" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - #region Symbols - - [TestFixture] - public static class ReadAttributeCloseTokenMethod - { - - [Test] - public static void ShouldReadAttributeCloseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("]") - ); - var expectedTokens = new List { - new AttributeCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "]" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadAttributeOpenTokenMethod - { - - [Test] - public static void ShouldReadAttributeOpenToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("[") - ); - var expectedTokens = new List { - new AttributeOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "[" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBlockCloseTokenMethod - { - - [Test] - public static void ShouldReadBlockCloseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("}") - ); - var expectedTokens = new List { - new BlockCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "}" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBlockOpenTokenMethod - { - - [Test] - public static void ShouldReaBlockOpenToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("{") - ); - var expectedTokens = new List { - new BlockOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "{" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadColonTokenMethod - { - - [Test] - public static void ShouldReadColonToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(":") - ); - var expectedTokens = new List { - new ColonToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ":" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadCommaTokenMethod - { - - [Test] - public static void ShouldReadCommaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(",") - ); - var expectedTokens = new List { - new CommaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "," - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadDotOperatorTokenMethod - { - - [Test] - public static void ShouldReadDotOperatorToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(".") - ); - var expectedTokens = new List { - new DotOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "." - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDotOperatorTokenWithTrailingNonDecimalDigit() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(".abc") - ); - var expectedTokens = new List { - new DotOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "." - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(1, 1, 2), - new SourcePosition(3, 1, 4), - "abc" - ), - "abc" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadEqualsOperatorTokenMethod - { - - [Test] - public static void ShouldReadEqualsOperatorToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("=") - ); - var expectedTokens = new List { - new EqualsOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "=" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadParenthesesCloseTokenMethod - { - - [Test] - public static void ShouldReadEqualsOperatorToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(")") - ); - var expectedTokens = new List { - new ParenthesisCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ")" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadParenthesesOpenTokenMethod - { - - [Test] - public static void ShouldReadParenthesesOpenToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("(") - ); - var expectedTokens = new List { - new ParenthesisOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "(" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadStatementEndTokenMethod - { - - [Test] - public static void ShouldReadStatementEndToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(";") - ); - var expectedTokens = new List { - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ";" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - #endregion - - [TestFixture] - public static class ReadWhitespaceTokenMethod - { - - [Test] - public static void ShouldReadSpaceWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(" ") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - " " - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadTabWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\t\t\t\t\t") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "\t\t\t\t\t" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadCrWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\r\r\r\r\r") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 5, 1), - "\r\r\r\r\r" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadLfWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\n\n\n\n\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 5, 1), - "\n\n\n\n\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadCrLfWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\r\n\r\n\r\n\r\n\r\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(9, 5, 2), - "\r\n\r\n\r\n\r\n\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedWhitespaceToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(" \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(29, 14, 2), - " \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadCommentTokenMethod - { - - [Test] - public static void ShouldReadSingleLineEofCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("// single line comment") - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(21, 1, 22), - "// single line comment" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadSingleLineEolCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("// single line comment\r\n") - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(21, 1, 22), - "// single line comment" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(22, 1, 23), - new SourcePosition(23, 1, 24), - "\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineEofCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(120, 6, 2), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineUnclosedCommentToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(118, 5, 27), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineInlineAsterisks() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*************\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*************/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(144, 6, 14), - "/*************\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*************/" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineMultiple() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*//*\r\n" + - "@TargetNode='MyServer2'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(120, 6, 2), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(121, 6, 3), - new SourcePosition(242, 11, 2), - "/*\r\n" + - "@TargetNode='MyServer2'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ) - - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadExample1CommentToken() - { - // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("Integer MyProperty; // This is an example of a single-line comment") - ); - var expectedTokens = new List { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "Integer" - ), - "Integer" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(7, 1, 8), - new SourcePosition(7, 1, 8), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(8, 1, 9), - new SourcePosition(17, 1, 18), - "MyProperty" - ), - "MyProperty" - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(18, 1, 19), - new SourcePosition(18, 1, 19), - ";" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(19, 1, 20), - new SourcePosition(19, 1, 20), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(20, 1, 21), - new SourcePosition(65, 1, 66), - "// This is an example of a single-line comment" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadExample2CommentToken() - { - // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From( - "/* example of a comment between property definition tokens and a multi-line comment */\r\n" + - "Integer /* 16-bit integer property */ MyProperty; /* and a multi-line\r\n" + - " comment */" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(85, 1, 86), - "/* example of a comment between property definition tokens and a multi-line comment */" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(86, 1, 87), - new SourcePosition(87, 1, 88), - "\r\n" - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(88, 2, 1), - new SourcePosition(94, 2, 7), - "Integer" - ), - "Integer" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(95, 2, 8), - new SourcePosition(95, 2, 8), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(96, 2, 9), - new SourcePosition(124, 2, 37), - "/* 16-bit integer property */" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(125, 2, 38), - new SourcePosition(125, 2, 38), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(126, 2, 39), - new SourcePosition(135, 2, 48), - "MyProperty" - ), - "MyProperty" - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(136, 2, 49), - new SourcePosition(136, 2, 49), - ";" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(137, 2, 50), - new SourcePosition(137, 2, 50), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(138, 2, 51), - new SourcePosition(192, 3, 34), - "/* and a multi-line\r\n" + - " comment */" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBooleanLiteralTokenMethod - { - - [Test] - public static void ShouldReadLowerCaseFalseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("false") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "false" - ), - false - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseFalseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("False") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "False" - ), - false - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseFalseToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("FALSE") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "FALSE" - ), - false - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadLowerCaseTrueToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("true") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "true" - ), - true - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseTrueToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("True") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "True" - ), - true - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseTrueToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("TRUE") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "TRUE" - ), - true - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadNullLiteralTokenMethod - { - - [Test] - public static void ShouldReadLowerCaseNullToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("null") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "null" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseNullToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("Null") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "Null" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseNullToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("NULL") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "NULL" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadPragmaTokenMethod - { - - [Test] - public static void ShouldReadLowerCasePragmaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("#pragma") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#pragma" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCasePragmaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("#Pragma") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#Pragma" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCasePragmaToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("#PRAGMA") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#PRAGMA" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadIdentifierTokenMethod - { - - [Test] - public static void ShouldReadIdentifierToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From - ( - "myIdentifier\r\n" + - "myIdentifier2" - ) - ); - var expectedTokens = new List { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(11, 1, 12), - "myIdentifier" - ), - "myIdentifier" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(12, 1, 13), - new SourcePosition(13, 1, 14), - "\r\n" - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(14, 2, 1), - new SourcePosition(26, 2, 13), - "myIdentifier2" - ), - "myIdentifier2" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadAliasIdentifierTokenMethod - { - - [Test] - public static void ShouldReadAliasIdentifierToken() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From - ( - "$myAliasIdentifier\r\n" + - "$myAliasIdentifier2" - ) - ); - var expectedTokens = new List { - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(17, 1, 18), - "$myAliasIdentifier" - ), - "myAliasIdentifier" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(18, 1, 19), - new SourcePosition(19, 1, 20), - "\r\n" - ) - ), - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(20, 2, 1), - new SourcePosition(38, 2, 19), - "$myAliasIdentifier2" - ), - "myAliasIdentifier2" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadInstanceWithAliasIdentifier() - { - // test case for https://github.com/mikeclayton/MofParser/issues/4 - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From - ( - "instance of cTentacleAgent as $cTentacleAgent1ref\r\n" + - "{\r\n" + - "};" - ) - ); - var expectedTokens = new List - { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(8, 1, 9), - new SourcePosition(8, 1, 9), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(11, 1, 12), - new SourcePosition(11, 1, 12), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(12, 1, 13), - new SourcePosition(25, 1, 26), - "cTentacleAgent" - ), - "cTentacleAgent" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(26, 1, 27), - new SourcePosition(26, 1, 27), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(27, 1, 28), - new SourcePosition(28, 1, 29), - "as" - ), - "as" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(29, 1, 30), - new SourcePosition(29, 1, 30), - " " - ) - ), - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(30, 1, 31), - new SourcePosition(48, 1, 49), - "$cTentacleAgent1ref" - ), - "cTentacleAgent1ref" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(49, 1, 50), - new SourcePosition(50, 1, 51), - "\r\n" - ) - ), - new BlockOpenToken( - new SourceExtent - ( - new SourcePosition(51, 2, 1), - new SourcePosition(51, 2, 1), - "{" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(52, 2, 2), - new SourcePosition(53, 2, 3), - "\r\n" - ) - ), - new BlockCloseToken( - new SourceExtent - ( - new SourcePosition(54, 3, 1), - new SourcePosition(54, 3, 1), - "}" - ) - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(55, 3, 2), - new SourcePosition(55, 3, 2), - ";" - ) - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadIntegerLiteralTokenMethod - { - - // binaryValue - - [Test] - public static void ShouldReadBinaryValue0b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "0b" - ), - IntegerKind.BinaryValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue1b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("1b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "1b" - ), - IntegerKind.BinaryValue, 1 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue00000b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("00000b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "00000b" - ), - IntegerKind.BinaryValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue10000b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("10000b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "10000b" - ), - IntegerKind.BinaryValue, 16 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue11111b() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("11111b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "11111b" - ), - IntegerKind.BinaryValue, 31 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - // octalValue - - [Test] - public static void ShouldReadOctalValue00() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("00") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "00" - ), - IntegerKind.OctalValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("01") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "01" - ), - IntegerKind.OctalValue, 1 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue00000() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("00000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "00000" - ), - IntegerKind.OctalValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01000() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("01000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "01000" - ), - IntegerKind.OctalValue, 512 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01111() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("01111") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "01111" - ), - IntegerKind.OctalValue, 585 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue04444() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("04444") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "04444" - ), - IntegerKind.OctalValue, 2340 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue07777() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("07777") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "07777" - ), - IntegerKind.OctalValue, 4095 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - // hexValue - - [Test] - public static void ShouldReadHexValue0x0() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0x0") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - "0x0" - ), - IntegerKind.HexValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0x0000() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0x0000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0x0000" - ), - IntegerKind.HexValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0x8888() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0x8888") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0x8888" - ), - IntegerKind.HexValue, 34952 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0xabcd() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0xabcd") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0xabcd" - ), - IntegerKind.HexValue, 43981 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0xABCD() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0xABCD") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0xABCD" - ), - IntegerKind.HexValue, 43981 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - // decimalValue - - [Test] - public static void ShouldReadDecimalValue0() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "0" - ), - IntegerKind.DecimalValue, 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValue12345() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "12345" - ), - IntegerKind.DecimalValue, 12345 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValuePlus12345() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("+12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "+12345" - ), - IntegerKind.DecimalValue, 12345 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValueMinus12345() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("-12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "-12345" - ), - IntegerKind.DecimalValue, -12345 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValue1234567890() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("1234567890") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(9, 1, 10), - "1234567890" - ), - IntegerKind.DecimalValue, 1234567890 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadRealLiteralTokenMethod - { - - [Test] - public static void ShouldReadRealValue0_0() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("0.0") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - "0.0" - ), - 0 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue123_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "123.45" - ), - 123.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValuePlus123_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("+123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "+123.45" - ), - 123.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValueMinus123_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("-123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "-123.45" - ), - -123.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue1234567890_00() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("1234567890.00") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(12, 1, 13), - "1234567890.00" - ), - 1234567890.00 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(".45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - ".45" - ), - 0.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValuePlus_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("+.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "+.45" - ), - 0.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValueMinus_45() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("-.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "-.45" - ), - -0.45 - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadStringLiteralTokenMethod - { - - [Test] - public static void ShouldReadEmptyString() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\"\"") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "\"\"" - ), - string.Empty - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBasicString() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From("\"my string literal\"") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(18, 1, 19), - "\"my string literal\"" - ), - "my string literal" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadEscapedString() - { - var actualTokens = Lexing.Lexer.Lex( - SourceReader.From(@"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(72, 1, 73), - @"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""" - ), - "my \\ string \" literal \' with \b lots \t and \n lots \f of \r escapes" - ) - }; - LexerHelper.AssertAreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class LexMethodTestCases - { - //[Test, TestCaseSource(typeof(LexMethodTestCases), "GetTestCases")] - public static void LexMethodTestsFromDisk(string mofFilename) - { - LexerTests.LexMethodTest(mofFilename); - } - public static IEnumerable GetTestCases - { - get - { - return TestUtils.GetMofTestCase("cim_schema_2.51.0Final-MOFs"); - } - } - } - - [TestFixture] - public static class LexCimSpec - { - //[Test, TestCaseSource(typeof(LexCimSpec), "GetTestCases")] - public static void LexMethodTestsFromDisk(string mofFilename) - { - LexerTests.LexMethodTest(mofFilename); - } - public static IEnumerable GetTestCases - { - get - { - return TestUtils.GetMofTestCase("Lexer\\TestCases"); - } - } - } - - private static void LexMethodTest(string mofFilename) - { - var mofText = File.ReadAllText(mofFilename); - var reader = SourceReader.From(mofText); - var actualTokens = Lexing.Lexer.Lex(reader); - var actualText = TestUtils.ConvertToJson(actualTokens); - var expectedFilename = Path.Combine(Path.GetDirectoryName(mofFilename), - Path.GetFileNameWithoutExtension(mofFilename) + ".json"); - if (!File.Exists(expectedFilename)) - { - File.WriteAllText(expectedFilename, actualText); - } - var expectedText = File.ReadAllText(expectedFilename); - Assert.AreEqual(expectedText, actualText); - } - - } - -} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs similarity index 57% rename from src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs index 1c85f1d9..1424b745 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexer/LexerHelper.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs @@ -1,21 +1,22 @@ using Kingsland.MofParser.Tokens; -using Kingsland.MofParser.UnitTests.Helpers; +using Kingsland.MofParser.UnitTests.Tokens; using Kingsland.ParseFx.Syntax; using NUnit.Framework; using System; using System.Collections.Generic; -namespace Kingsland.MofParser.UnitTests.Lexer +namespace Kingsland.MofParser.UnitTests.Lexing { - public sealed class LexerHelper + + public sealed class LexerAssert { - public static void AssertAreEqual(SyntaxToken expectedToken, SyntaxToken actualToken) + public static void AreEqual(SyntaxToken expectedToken, SyntaxToken actualToken) { - LexerHelper.AssertAreEqualInternal(expectedToken, actualToken); + LexerAssert.AreEqualInternal(expectedToken, actualToken); } - public static void AssertAreEqual(List expectedTokens, List actualTokens) + public static void AreEqual(List expectedTokens, List actualTokens) { if ((expectedTokens == null) && (actualTokens == null)) { @@ -31,12 +32,12 @@ public static void AssertAreEqual(List expectedTokens, List(); + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class MiscTests + { + + [Test] + public static void MissingWhitespaceTest() + { + var actualTokens = Lexer.Lex( + SourceReader.From("12345myIdentifier") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "12345" + ), + IntegerKind.DecimalValue, 12345 + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(5, 1, 6), + new SourcePosition(16, 1, 17), + "myIdentifier" + ), + "myIdentifier" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class LexMethodTestCases + { + //[Test, TestCaseSource(typeof(LexMethodTestCases), "GetTestCases")] + public static void LexMethodTestsFromDisk(string mofFilename) + { + LexerTests.LexMethodTest(mofFilename); + } + public static IEnumerable GetTestCases + { + get + { + return TestUtils.GetMofTestCase("cim_schema_2.51.0Final-MOFs"); + } + } + } + + [TestFixture] + public static class LexCimSpec + { + //[Test, TestCaseSource(typeof(LexCimSpec), "GetTestCases")] + public static void LexMethodTestsFromDisk(string mofFilename) + { + LexerTests.LexMethodTest(mofFilename); + } + public static IEnumerable GetTestCases + { + get + { + return TestUtils.GetMofTestCase("Lexing\\TestCases"); + } + } + } + + private static void LexMethodTest(string mofFilename) + { + var mofText = File.ReadAllText(mofFilename); + var reader = SourceReader.From(mofText); + var actualTokens = Lexer.Lex(reader); + var actualText = TestUtils.ConvertToJson(actualTokens); + var expectedFilename = Path.Combine(Path.GetDirectoryName(mofFilename), + Path.GetFileNameWithoutExtension(mofFilename) + ".json"); + if (!File.Exists(expectedFilename)) + { + File.WriteAllText(expectedFilename, actualText); + } + var expectedText = File.ReadAllText(expectedFilename); + Assert.AreEqual(expectedText, actualText); + } + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs new file mode 100644 index 00000000..7edd8060 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs @@ -0,0 +1,199 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadAliasIdentifierTokenMethod + { + + [Test] + public static void ShouldReadAliasIdentifierToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From + ( + "$myAliasIdentifier\r\n" + + "$myAliasIdentifier2" + ) + ); + var expectedTokens = new List { + new AliasIdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(17, 1, 18), + "$myAliasIdentifier" + ), + "myAliasIdentifier" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(18, 1, 19), + new SourcePosition(19, 1, 20), + "\r\n" + ) + ), + new AliasIdentifierToken( + new SourceExtent + ( + new SourcePosition(20, 2, 1), + new SourcePosition(38, 2, 19), + "$myAliasIdentifier2" + ), + "myAliasIdentifier2" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadInstanceWithAliasIdentifier() + { + // test case for https://github.com/mikeclayton/MofParser/issues/4 + var actualTokens = Lexer.Lex( + SourceReader.From + ( + "instance of cTentacleAgent as $cTentacleAgent1ref\r\n" + + "{\r\n" + + "};" + ) + ); + var expectedTokens = new List + { + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(8, 1, 9), + new SourcePosition(8, 1, 9), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(11, 1, 12), + new SourcePosition(11, 1, 12), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(12, 1, 13), + new SourcePosition(25, 1, 26), + "cTentacleAgent" + ), + "cTentacleAgent" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(26, 1, 27), + new SourcePosition(26, 1, 27), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(27, 1, 28), + new SourcePosition(28, 1, 29), + "as" + ), + "as" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(29, 1, 30), + new SourcePosition(29, 1, 30), + " " + ) + ), + new AliasIdentifierToken( + new SourceExtent + ( + new SourcePosition(30, 1, 31), + new SourcePosition(48, 1, 49), + "$cTentacleAgent1ref" + ), + "cTentacleAgent1ref" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(49, 1, 50), + new SourcePosition(50, 1, 51), + "\r\n" + ) + ), + new BlockOpenToken( + new SourceExtent + ( + new SourcePosition(51, 2, 1), + new SourcePosition(51, 2, 1), + "{" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(52, 2, 2), + new SourcePosition(53, 2, 3), + "\r\n" + ) + ), + new BlockCloseToken( + new SourceExtent + ( + new SourcePosition(54, 3, 1), + new SourcePosition(54, 3, 1), + "}" + ) + ), + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(55, 3, 2), + new SourcePosition(55, 3, 2), + ";" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs new file mode 100644 index 00000000..6c911516 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs @@ -0,0 +1,143 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadBooleanLiteralTokenMethod + { + + [Test] + public static void ShouldReadLowerCaseFalseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("false") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "false" + ), + false + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCaseFalseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("False") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "False" + ), + false + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCaseFalseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("FALSE") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "FALSE" + ), + false + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadLowerCaseTrueToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("true") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "true" + ), + true + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCaseTrueToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("True") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "True" + ), + true + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCaseTrueToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("TRUE") + ); + var expectedTokens = new List { + new BooleanLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "TRUE" + ), + true + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs new file mode 100644 index 00000000..1fcc920d --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs @@ -0,0 +1,371 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadCommentTokenMethod + { + + [Test] + public static void ShouldReadSingleLineEofCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("// single line comment") + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(21, 1, 22), + "// single line comment" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadSingleLineEolCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("// single line comment\r\n") + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(21, 1, 22), + "// single line comment" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(22, 1, 23), + new SourcePosition(23, 1, 24), + "\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineEofCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(120, 6, 2), + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineUnclosedCommentToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(118, 5, 27), + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineInlineAsterisks() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*************\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*************/" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(144, 6, 14), + "/*************\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*************/" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMultilineMultiple() + { + var actualTokens = Lexer.Lex( + SourceReader.From( + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*//*\r\n" + + "@TargetNode='MyServer2'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(120, 6, 2), + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(121, 6, 3), + new SourcePosition(242, 11, 2), + "/*\r\n" + + "@TargetNode='MyServer2'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/" + ) + ) + + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadExample1CommentToken() + { + // see DSP0221_3.0.1.pdf "5.4 Comments" + var actualTokens = Lexer.Lex( + SourceReader.From("Integer MyProperty; // This is an example of a single-line comment") + ); + var expectedTokens = new List { + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "Integer" + ), + "Integer" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(7, 1, 8), + new SourcePosition(7, 1, 8), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(8, 1, 9), + new SourcePosition(17, 1, 18), + "MyProperty" + ), + "MyProperty" + ), + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(18, 1, 19), + new SourcePosition(18, 1, 19), + ";" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(19, 1, 20), + new SourcePosition(19, 1, 20), + " " + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(20, 1, 21), + new SourcePosition(65, 1, 66), + "// This is an example of a single-line comment" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadExample2CommentToken() + { + // see DSP0221_3.0.1.pdf "5.4 Comments" + var actualTokens = Lexer.Lex( + SourceReader.From( + "/* example of a comment between property definition tokens and a multi-line comment */\r\n" + + "Integer /* 16-bit integer property */ MyProperty; /* and a multi-line\r\n" + + " comment */" + ) + ); + var expectedTokens = new List { + new CommentToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(85, 1, 86), + "/* example of a comment between property definition tokens and a multi-line comment */" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(86, 1, 87), + new SourcePosition(87, 1, 88), + "\r\n" + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(88, 2, 1), + new SourcePosition(94, 2, 7), + "Integer" + ), + "Integer" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(95, 2, 8), + new SourcePosition(95, 2, 8), + " " + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(96, 2, 9), + new SourcePosition(124, 2, 37), + "/* 16-bit integer property */" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(125, 2, 38), + new SourcePosition(125, 2, 38), + " " + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(126, 2, 39), + new SourcePosition(135, 2, 48), + "MyProperty" + ), + "MyProperty" + ), + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(136, 2, 49), + new SourcePosition(136, 2, 49), + ";" + ) + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(137, 2, 50), + new SourcePosition(137, 2, 50), + " " + ) + ), + new CommentToken( + new SourceExtent + ( + new SourcePosition(138, 2, 51), + new SourcePosition(192, 3, 34), + "/* and a multi-line\r\n" + + " comment */" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs new file mode 100644 index 00000000..0b69c3f5 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs @@ -0,0 +1,64 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadIdentifierTokenMethod + { + + [Test] + public static void ShouldReadIdentifierToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From + ( + "myIdentifier\r\n" + + "myIdentifier2" + ) + ); + var expectedTokens = new List { + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(11, 1, 12), + "myIdentifier" + ), + "myIdentifier" + ), + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(12, 1, 13), + new SourcePosition(13, 1, 14), + "\r\n" + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(14, 2, 1), + new SourcePosition(26, 2, 13), + "myIdentifier2" + ), + "myIdentifier2" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs new file mode 100644 index 00000000..e694b0e8 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs @@ -0,0 +1,471 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadIntegerLiteralTokenMethod + { + + // binaryValue + + [Test] + public static void ShouldReadBinaryValue0b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "0b" + ), + IntegerKind.BinaryValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue1b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("1b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "1b" + ), + IntegerKind.BinaryValue, 1 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue00000b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("00000b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "00000b" + ), + IntegerKind.BinaryValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue10000b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("10000b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "10000b" + ), + IntegerKind.BinaryValue, 16 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBinaryValue11111b() + { + var actualTokens = Lexer.Lex( + SourceReader.From("11111b") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "11111b" + ), + IntegerKind.BinaryValue, 31 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + // octalValue + + [Test] + public static void ShouldReadOctalValue00() + { + var actualTokens = Lexer.Lex( + SourceReader.From("00") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "00" + ), + IntegerKind.OctalValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue01() + { + var actualTokens = Lexer.Lex( + SourceReader.From("01") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "01" + ), + IntegerKind.OctalValue, 1 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue00000() + { + var actualTokens = Lexer.Lex( + SourceReader.From("00000") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "00000" + ), + IntegerKind.OctalValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue01000() + { + var actualTokens = Lexer.Lex( + SourceReader.From("01000") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "01000" + ), + IntegerKind.OctalValue, 512 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue01111() + { + var actualTokens = Lexer.Lex( + SourceReader.From("01111") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "01111" + ), + IntegerKind.OctalValue, 585 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue04444() + { + var actualTokens = Lexer.Lex( + SourceReader.From("04444") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "04444" + ), + IntegerKind.OctalValue, 2340 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadOctalValue07777() + { + var actualTokens = Lexer.Lex( + SourceReader.From("07777") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "07777" + ), + IntegerKind.OctalValue, 4095 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + // hexValue + + [Test] + public static void ShouldReadHexValue0x0() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0x0") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(2, 1, 3), + "0x0" + ), + IntegerKind.HexValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0x0000() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0x0000") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0x0000" + ), + IntegerKind.HexValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0x8888() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0x8888") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0x8888" + ), + IntegerKind.HexValue, 34952 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0xabcd() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0xabcd") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0xabcd" + ), + IntegerKind.HexValue, 43981 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadHexValue0xABCD() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0xABCD") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "0xABCD" + ), + IntegerKind.HexValue, 43981 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + // decimalValue + + [Test] + public static void ShouldReadDecimalValue0() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "0" + ), + IntegerKind.DecimalValue, 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValue12345() + { + var actualTokens = Lexer.Lex( + SourceReader.From("12345") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "12345" + ), + IntegerKind.DecimalValue, 12345 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValuePlus12345() + { + var actualTokens = Lexer.Lex( + SourceReader.From("+12345") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "+12345" + ), + IntegerKind.DecimalValue, 12345 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValueMinus12345() + { + var actualTokens = Lexer.Lex( + SourceReader.From("-12345") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "-12345" + ), + IntegerKind.DecimalValue, -12345 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDecimalValue1234567890() + { + var actualTokens = Lexer.Lex( + SourceReader.From("1234567890") + ); + var expectedTokens = new List { + new IntegerLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(9, 1, 10), + "1234567890" + ), + IntegerKind.DecimalValue, 1234567890 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs new file mode 100644 index 00000000..139f61ba --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs @@ -0,0 +1,80 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadNullLiteralTokenMethod + { + + [Test] + public static void ShouldReadLowerCaseNullToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("null") + ); + var expectedTokens = new List { + new NullLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "null" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCaseNullToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("Null") + ); + var expectedTokens = new List { + new NullLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "Null" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCaseNullToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("NULL") + ); + var expectedTokens = new List { + new NullLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "NULL" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs new file mode 100644 index 00000000..35dc87c4 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs @@ -0,0 +1,80 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadPragmaTokenMethod + { + + [Test] + public static void ShouldReadLowerCasePragmaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("#pragma") + ); + var expectedTokens = new List { + new PragmaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "#pragma" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedCasePragmaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("#Pragma") + ); + var expectedTokens = new List { + new PragmaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "#Pragma" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadUpperCasePragmaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("#PRAGMA") + ); + var expectedTokens = new List { + new PragmaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "#PRAGMA" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs new file mode 100644 index 00000000..6ffd7f45 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs @@ -0,0 +1,182 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadRealLiteralTokenMethod + { + + [Test] + public static void ShouldReadRealValue0_0() + { + var actualTokens = Lexer.Lex( + SourceReader.From("0.0") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(2, 1, 3), + "0.0" + ), + 0 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValue123_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("123.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(5, 1, 6), + "123.45" + ), + 123.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValuePlus123_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("+123.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "+123.45" + ), + 123.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValueMinus123_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("-123.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(6, 1, 7), + "-123.45" + ), + -123.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValue1234567890_00() + { + var actualTokens = Lexer.Lex( + SourceReader.From("1234567890.00") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(12, 1, 13), + "1234567890.00" + ), + 1234567890.00 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValue_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From(".45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(2, 1, 3), + ".45" + ), + 0.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValuePlus_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("+.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "+.45" + ), + 0.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadRealValueMinus_45() + { + var actualTokens = Lexer.Lex( + SourceReader.From("-.45") + ); + var expectedTokens = new List { + new RealLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(3, 1, 4), + "-.45" + ), + -0.45 + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs new file mode 100644 index 00000000..205830cf --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs @@ -0,0 +1,83 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadStringLiteralTokenMethod + { + + [Test] + public static void ShouldReadEmptyString() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\"\"") + ); + var expectedTokens = new List { + new StringLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(1, 1, 2), + "\"\"" + ), + string.Empty + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadBasicString() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\"my string literal\"") + ); + var expectedTokens = new List { + new StringLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(18, 1, 19), + "\"my string literal\"" + ), + "my string literal" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadEscapedString() + { + var actualTokens = Lexer.Lex( + SourceReader.From(@"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""") + ); + var expectedTokens = new List { + new StringLiteralToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(72, 1, 73), + @"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""" + ), + "my \\ string \" literal \' with \b lots \t and \n lots \f of \r escapes" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs new file mode 100644 index 00000000..fca5ca60 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs @@ -0,0 +1,320 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadAttributeCloseTokenMethod + { + + [Test] + public static void ShouldReadAttributeCloseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("]") + ); + var expectedTokens = new List { + new AttributeCloseToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "]" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadAttributeOpenTokenMethod + { + + [Test] + public static void ShouldReadAttributeOpenToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("[") + ); + var expectedTokens = new List { + new AttributeOpenToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "[" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadBlockCloseTokenMethod + { + + [Test] + public static void ShouldReadBlockCloseToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("}") + ); + var expectedTokens = new List { + new BlockCloseToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "}" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadBlockOpenTokenMethod + { + + [Test] + public static void ShouldReaBlockOpenToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("{") + ); + var expectedTokens = new List { + new BlockOpenToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "{" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadColonTokenMethod + { + + [Test] + public static void ShouldReadColonToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(":") + ); + var expectedTokens = new List { + new ColonToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + ":" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadCommaTokenMethod + { + + [Test] + public static void ShouldReadCommaToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(",") + ); + var expectedTokens = new List { + new CommaToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "," + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadDotOperatorTokenMethod + { + + [Test] + public static void ShouldReadDotOperatorToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(".") + ); + var expectedTokens = new List { + new DotOperatorToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "." + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadDotOperatorTokenWithTrailingNonDecimalDigit() + { + var actualTokens = Lexer.Lex( + SourceReader.From(".abc") + ); + var expectedTokens = new List { + new DotOperatorToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "." + ) + ), + new IdentifierToken( + new SourceExtent + ( + new SourcePosition(1, 1, 2), + new SourcePosition(3, 1, 4), + "abc" + ), + "abc" + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadEqualsOperatorTokenMethod + { + + [Test] + public static void ShouldReadEqualsOperatorToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("=") + ); + var expectedTokens = new List { + new EqualsOperatorToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "=" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadParenthesesCloseTokenMethod + { + + [Test] + public static void ShouldReadEqualsOperatorToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(")") + ); + var expectedTokens = new List { + new ParenthesisCloseToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + ")" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadParenthesesOpenTokenMethod + { + + [Test] + public static void ShouldReadParenthesesOpenToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("(") + ); + var expectedTokens = new List { + new ParenthesisOpenToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + "(" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + [TestFixture] + public static class ReadStatementEndTokenMethod + { + + [Test] + public static void ShouldReadStatementEndToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(";") + ); + var expectedTokens = new List { + new StatementEndToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(0, 1, 1), + ";" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs new file mode 100644 index 00000000..0493c6d2 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs @@ -0,0 +1,137 @@ +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Syntax; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System.Collections.Generic; + +namespace Kingsland.MofParser.UnitTests.Lexing +{ + + [TestFixture] + public static partial class LexerTests + { + + [TestFixture] + public static class ReadWhitespaceTokenMethod + { + + [Test] + public static void ShouldReadSpaceWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(" ") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + " " + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadTabWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\t\t\t\t\t") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 1, 5), + "\t\t\t\t\t" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadCrWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\r\r\r\r\r") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 5, 1), + "\r\r\r\r\r" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadLfWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\n\n\n\n\n") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(4, 5, 1), + "\n\n\n\n\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadCrLfWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From("\r\n\r\n\r\n\r\n\r\n") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(9, 5, 2), + "\r\n\r\n\r\n\r\n\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + [Test] + public static void ShouldReadMixedWhitespaceToken() + { + var actualTokens = Lexer.Lex( + SourceReader.From(" \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n") + ); + var expectedTokens = new List { + new WhitespaceToken( + new SourceExtent + ( + new SourcePosition(0, 1, 1), + new SourcePosition(29, 14, 2), + " \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n" + ) + ) + }; + LexerAssert.AreEqual(expectedTokens, actualTokens); + } + + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.json b/src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.json similarity index 100% rename from src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.json rename to src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.json diff --git a/src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.mof b/src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.mof similarity index 100% rename from src/Kingsland.MofParser.UnitTests/Lexer/TestCases/MyServer.mof rename to src/Kingsland.MofParser.UnitTests/Lexing/TestCases/MyServer.mof diff --git a/src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs b/src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs new file mode 100644 index 00000000..ba1be88a --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Model/ModelAssert.cs @@ -0,0 +1,47 @@ +using Kingsland.MofParser.Model; +using NUnit.Framework; +using System.Linq; + +namespace Kingsland.MofParser.UnitTests.Model +{ + + internal static class ModelAssert + { + + public static void AreEqual(Module expected, Module actual) + { + Assert.IsNotNull(expected); + Assert.IsNotNull(actual); + Assert.AreEqual(expected.Instances.Count, actual.Instances.Count); + foreach (var pair in expected.Instances + .Zip(actual.Instances, (exp, act) => (exp, act))) + { + ModelAssert.AreEqual(pair.exp, pair.act); + } + } + + public static void AreEqual(Instance expected, Instance actual) + { + Assert.IsNotNull(expected); + Assert.IsNotNull(actual); + Assert.AreEqual(expected.TypeName, actual.TypeName); + Assert.AreEqual(expected.Alias, actual.Alias); + Assert.AreEqual(expected.Properties.Count, actual.Properties.Count); + foreach (var pair in expected.Properties + .Zip(actual.Properties, (exp, act) => (exp, act))) + { + ModelAssert.AreEqual(pair.exp, pair.act); + } + } + + public static void AreEqual(Property expected, Property actual) + { + Assert.IsNotNull(expected); + Assert.IsNotNull(actual); + Assert.AreEqual(expected.Name, actual.Name); + Assert.AreEqual(expected.Value, actual.Value); + } + + } + +} diff --git a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs index 39f25bde..602c1e6f 100644 --- a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs +++ b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs @@ -1,8 +1,10 @@ using Kingsland.MofParser.Ast; +using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Model; using Kingsland.MofParser.Parsing; using Kingsland.MofParser.Tokens; using Kingsland.MofParser.UnitTests.Helpers; +using Kingsland.MofParser.UnitTests.Model; using Kingsland.ParseFx.Text; using NUnit.Framework; using System; @@ -24,7 +26,7 @@ public static class PropertyValueTests [Test] public static void ParsePropetyValueWithLiteralString() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias0000006E\r\n" + "{\r\n" + @@ -131,7 +133,7 @@ public static void ParsePropetyValueWithLiteralString() [Test] public static void ParsePropetyValueWithAliasIdentifier() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -235,7 +237,7 @@ public static void ParsePropetyValueWithAliasIdentifier() [Test] public static void ParsePropetyValueWithEmptyArray() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -330,7 +332,7 @@ public static void ParsePropetyValueWithEmptyArray() [Test] public static void ParsePropetyValueArrayWithAliasIdentifier() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -442,7 +444,7 @@ public static void ParsePropetyValueArrayWithAliasIdentifier() [Test] public static void ParsePropetyValueArrayWithLiteralStrings() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -570,7 +572,7 @@ public static void ParsePropetyValueArrayWithLiteralStrings() [Test] public static void ParsePropetyValueArrayWithNumericLiteralValues() { - var tokens = Lexing.Lexer.Lex( + var tokens = Lexer.Lex( SourceReader.From( "instance of myType as $Alias00000070\r\n" + "{\r\n" + @@ -773,7 +775,7 @@ private static void ParseMethodTest(string mofFilename) { var mofText = File.ReadAllText(mofFilename); var reader = SourceReader.From(mofText); - var tokens = Lexing.Lexer.Lex(reader); + var tokens = Lexer.Lex(reader); var ast = Parser.Parse( tokens, ParserQuirks.AllowMofV2Qualifiers | diff --git a/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs b/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs new file mode 100644 index 00000000..e623fa40 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs @@ -0,0 +1,398 @@ +using Kingsland.MofParser.Tokens; +using Kingsland.ParseFx.Text; + +namespace Kingsland.MofParser.UnitTests.Tokens +{ + + internal static class TokenAssert + { + + #region Token Comparison Methods + + public static bool AreEqual(AliasIdentifierToken expected, AliasIdentifierToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Name == actual.Name); + } + } + + public static bool AreEqual(AttributeCloseToken expected, AttributeCloseToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + public static bool AreEqual(AttributeOpenToken expected, AttributeOpenToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(BlockCloseToken expected, BlockCloseToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(BlockOpenToken expected, BlockOpenToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(BooleanLiteralToken expected, BooleanLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(ColonToken expected, ColonToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(CommaToken expected, CommaToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(CommentToken expected, CommentToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(DotOperatorToken expected, DotOperatorToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(EqualsOperatorToken expected, EqualsOperatorToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(IdentifierToken expected, IdentifierToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Name == actual.Name); + } + } + + public static bool AreEqual(IntegerLiteralToken expected, IntegerLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Kind == actual.Kind) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(NullLiteralToken expected, NullLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(ParenthesisCloseToken expected, ParenthesisCloseToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(ParenthesisOpenToken expected, ParenthesisOpenToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(PragmaToken expected, PragmaToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(RealLiteralToken expected, RealLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(StatementEndToken expected, StatementEndToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + public static bool AreEqual(StringLiteralToken expected, StringLiteralToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + (expected.Value == actual.Value); + } + } + + public static bool AreEqual(WhitespaceToken expected, WhitespaceToken actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.Extent, actual.Extent); + } + } + + #endregion + + #region Helper Methods + + public static bool AreEqual(SourceExtent expected, SourceExtent actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return TokenAssert.AreEqual(expected.StartPosition, actual.StartPosition) && + TokenAssert.AreEqual(expected.EndPosition, actual.EndPosition) && + (expected.Text == actual.Text); + } + } + + public static bool AreEqual(SourcePosition expected, SourcePosition actual) + { + if ((expected == null) && (actual == null)) + { + return true; + } + else if ((expected == null) || (actual == null)) + { + return false; + } + else + { + return (expected.Position == actual.Position) && + (expected.LineNumber == actual.LineNumber) && + (expected.ColumnNumber == actual.ColumnNumber); + } + } + + #endregion + + } + +} From 5518e6192526914a52dfae4e8239e2d507e30ef0 Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Fri, 11 Sep 2020 23:25:10 +0100 Subject: [PATCH 04/11] Extracted common test logic from Lexer test cases into separate helper function --- .../Kingsland.MofParser.UnitTests.csproj | 22 +-- .../Lexing/LexerTests.cs | 52 +------ ....cs => LexerTests_AliasIdentifierToken.cs} | 29 ++-- ...l.cs => LexerTests_BooleanLiteralToken.cs} | 39 ++--- ...Comments.cs => LexerTests_CommentToken.cs} | 119 +++++++-------- ...ifier.cs => LexerTests_IdentifierToken.cs} | 15 +- ...l.cs => LexerTests_IntegerLiteralToken.cs} | 135 ++++++------------ ...eral.cs => LexerTests_NullLiteralToken.cs} | 21 +-- ...ts_Pragma.cs => LexerTests_PragmaToken.cs} | 21 +-- ...eral.cs => LexerTests_RealLiteralToken.cs} | 48 +++---- ...al.cs => LexerTests_StringLiteralToken.cs} | 18 +-- ...s_Symbols.cs => LexerTests_SymbolToken.cs} | 72 ++++------ ...space.cs => LexerTests_WhitespaceToken.cs} | 37 ++--- 13 files changed, 209 insertions(+), 419 deletions(-) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_AliasIdentifier.cs => LexerTests_AliasIdentifierToken.cs} (89%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_BooleanLiteral.cs => LexerTests_BooleanLiteralToken.cs} (75%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_Comments.cs => LexerTests_CommentToken.cs} (76%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_Identifier.cs => LexerTests_IdentifierToken.cs} (80%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_IntegerLiteral.cs => LexerTests_IntegerLiteralToken.cs} (75%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_NullLiteral.cs => LexerTests_NullLiteralToken.cs} (75%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_Pragma.cs => LexerTests_PragmaToken.cs} (74%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_RealLiteral.cs => LexerTests_RealLiteralToken.cs} (76%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_StringLiteral.cs => LexerTests_StringLiteralToken.cs} (77%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_Symbols.cs => LexerTests_SymbolToken.cs} (78%) rename src/Kingsland.MofParser.UnitTests/Lexing/{LexerTests_Whitespace.cs => LexerTests_WhitespaceToken.cs} (74%) diff --git a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj index 3e528016..41e005d3 100644 --- a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj +++ b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj @@ -36,17 +36,17 @@ - - - - - - - - - - - + + + + + + + + + + + diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests.cs index 082e4b39..541413f0 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests.cs @@ -1,11 +1,9 @@ using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Tokens; -using Kingsland.MofParser.UnitTests.Helpers; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; using System.Collections.Generic; -using System.IO; namespace Kingsland.MofParser.UnitTests.Lexing { @@ -65,54 +63,10 @@ public static void MissingWhitespaceTest() } - [TestFixture] - public static class LexMethodTestCases - { - //[Test, TestCaseSource(typeof(LexMethodTestCases), "GetTestCases")] - public static void LexMethodTestsFromDisk(string mofFilename) - { - LexerTests.LexMethodTest(mofFilename); - } - public static IEnumerable GetTestCases - { - get - { - return TestUtils.GetMofTestCase("cim_schema_2.51.0Final-MOFs"); - } - } - } - - [TestFixture] - public static class LexCimSpec + private static void AssertLexerTest(string sourceText, List expectedTokens) { - //[Test, TestCaseSource(typeof(LexCimSpec), "GetTestCases")] - public static void LexMethodTestsFromDisk(string mofFilename) - { - LexerTests.LexMethodTest(mofFilename); - } - public static IEnumerable GetTestCases - { - get - { - return TestUtils.GetMofTestCase("Lexing\\TestCases"); - } - } - } - - private static void LexMethodTest(string mofFilename) - { - var mofText = File.ReadAllText(mofFilename); - var reader = SourceReader.From(mofText); - var actualTokens = Lexer.Lex(reader); - var actualText = TestUtils.ConvertToJson(actualTokens); - var expectedFilename = Path.Combine(Path.GetDirectoryName(mofFilename), - Path.GetFileNameWithoutExtension(mofFilename) + ".json"); - if (!File.Exists(expectedFilename)) - { - File.WriteAllText(expectedFilename, actualText); - } - var expectedText = File.ReadAllText(expectedFilename); - Assert.AreEqual(expectedText, actualText); + var actualTokens = Lexer.Lex(SourceReader.From(sourceText)); + LexerAssert.AreEqual(expectedTokens, actualTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifierToken.cs similarity index 89% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifierToken.cs index 7edd8060..2dc7c644 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifierToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -19,13 +18,9 @@ public static class ReadAliasIdentifierTokenMethod [Test] public static void ShouldReadAliasIdentifierToken() { - var actualTokens = Lexer.Lex( - SourceReader.From - ( - "$myAliasIdentifier\r\n" + - "$myAliasIdentifier2" - ) - ); + var sourceText = + "$myAliasIdentifier\r\n" + + "$myAliasIdentifier2"; var expectedTokens = new List { new AliasIdentifierToken( new SourceExtent @@ -54,21 +49,17 @@ public static void ShouldReadAliasIdentifierToken() "myAliasIdentifier2" ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadInstanceWithAliasIdentifier() { // test case for https://github.com/mikeclayton/MofParser/issues/4 - var actualTokens = Lexer.Lex( - SourceReader.From - ( - "instance of cTentacleAgent as $cTentacleAgent1ref\r\n" + - "{\r\n" + - "};" - ) - ); + var sourceText = + "instance of cTentacleAgent as $cTentacleAgent1ref\r\n" + + "{\r\n" + + "};"; var expectedTokens = new List { new IdentifierToken( @@ -189,7 +180,7 @@ public static void ShouldReadInstanceWithAliasIdentifier() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteralToken.cs similarity index 75% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteralToken.cs index 6c911516..fecc6472 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteralToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -19,9 +18,7 @@ public static class ReadBooleanLiteralTokenMethod [Test] public static void ShouldReadLowerCaseFalseToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("false") - ); + var sourceText = "false"; var expectedTokens = new List { new BooleanLiteralToken( new SourceExtent @@ -33,15 +30,13 @@ public static void ShouldReadLowerCaseFalseToken() false ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMixedCaseFalseToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("False") - ); + var sourceText = "False"; var expectedTokens = new List { new BooleanLiteralToken( new SourceExtent @@ -53,15 +48,13 @@ public static void ShouldReadMixedCaseFalseToken() false ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadUpperCaseFalseToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("FALSE") - ); + var sourceText = "FALSE"; var expectedTokens = new List { new BooleanLiteralToken( new SourceExtent @@ -73,15 +66,13 @@ public static void ShouldReadUpperCaseFalseToken() false ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadLowerCaseTrueToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("true") - ); + var sourceText = "true"; var expectedTokens = new List { new BooleanLiteralToken( new SourceExtent @@ -93,15 +84,13 @@ public static void ShouldReadLowerCaseTrueToken() true ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMixedCaseTrueToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("True") - ); + var sourceText = "True"; var expectedTokens = new List { new BooleanLiteralToken( new SourceExtent @@ -113,15 +102,13 @@ public static void ShouldReadMixedCaseTrueToken() true ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadUpperCaseTrueToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("TRUE") - ); + var sourceText = "TRUE"; var expectedTokens = new List { new BooleanLiteralToken( new SourceExtent @@ -133,7 +120,7 @@ public static void ShouldReadUpperCaseTrueToken() true ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_CommentToken.cs similarity index 76% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_CommentToken.cs index 1fcc920d..a5974f1d 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_CommentToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -19,9 +18,7 @@ public static class ReadCommentTokenMethod [Test] public static void ShouldReadSingleLineEofCommentToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("// single line comment") - ); + var sourceText = "// single line comment"; var expectedTokens = new List { new CommentToken( new SourceExtent @@ -32,15 +29,13 @@ public static void ShouldReadSingleLineEofCommentToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadSingleLineEolCommentToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("// single line comment\r\n") - ); + var sourceText = "// single line comment\r\n"; var expectedTokens = new List { new CommentToken( new SourceExtent @@ -59,22 +54,19 @@ public static void ShouldReadSingleLineEolCommentToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMultilineEofCommentToken() { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); + var sourceText = + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/"; var expectedTokens = new List { new CommentToken( new SourceExtent @@ -90,21 +82,18 @@ public static void ShouldReadMultilineEofCommentToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMultilineUnclosedCommentToken() { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" - ) - ); + var sourceText = + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n"; var expectedTokens = new List { new CommentToken( new SourceExtent @@ -119,22 +108,19 @@ public static void ShouldReadMultilineUnclosedCommentToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMultilineInlineAsterisks() { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*************\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*************/" - ) - ); + var sourceText = + "/*************\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*************/"; var expectedTokens = new List { new CommentToken( new SourceExtent @@ -150,27 +136,24 @@ public static void ShouldReadMultilineInlineAsterisks() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMultilineMultiple() { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*//*\r\n" + - "@TargetNode='MyServer2'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); + var sourceText = + "/*\r\n" + + "@TargetNode='MyServer'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*//*\r\n" + + "@TargetNode='MyServer2'\r\n" + + "@GeneratedBy=mike.clayton\r\n" + + "@GenerationDate=07/19/2014 10:37:04\r\n" + + "@GenerationHost=MyDesktop\r\n" + + "*/"; var expectedTokens = new List { new CommentToken( new SourceExtent @@ -200,16 +183,15 @@ public static void ShouldReadMultilineMultiple() ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadExample1CommentToken() { // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexer.Lex( - SourceReader.From("Integer MyProperty; // This is an example of a single-line comment") - ); + var sourceText = + "Integer MyProperty; // This is an example of a single-line comment"; var expectedTokens = new List { new IdentifierToken( new SourceExtent @@ -262,20 +244,17 @@ public static void ShouldReadExample1CommentToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadExample2CommentToken() { // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexer.Lex( - SourceReader.From( - "/* example of a comment between property definition tokens and a multi-line comment */\r\n" + - "Integer /* 16-bit integer property */ MyProperty; /* and a multi-line\r\n" + - " comment */" - ) - ); + var sourceText = + "/* example of a comment between property definition tokens and a multi-line comment */\r\n" + + "Integer /* 16-bit integer property */ MyProperty; /* and a multi-line\r\n" + + " comment */"; var expectedTokens = new List { new CommentToken( new SourceExtent @@ -361,7 +340,7 @@ public static void ShouldReadExample2CommentToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IdentifierToken.cs similarity index 80% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IdentifierToken.cs index 0b69c3f5..f10cf33c 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IdentifierToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -19,13 +18,9 @@ public static class ReadIdentifierTokenMethod [Test] public static void ShouldReadIdentifierToken() { - var actualTokens = Lexer.Lex( - SourceReader.From - ( - "myIdentifier\r\n" + - "myIdentifier2" - ) - ); + var sourceText = + "myIdentifier\r\n" + + "myIdentifier2"; var expectedTokens = new List { new IdentifierToken( new SourceExtent @@ -54,7 +49,7 @@ public static void ShouldReadIdentifierToken() "myIdentifier2" ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteralToken.cs similarity index 75% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteralToken.cs index e694b0e8..ebe56cce 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteralToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -21,9 +20,7 @@ public static class ReadIntegerLiteralTokenMethod [Test] public static void ShouldReadBinaryValue0b() { - var actualTokens = Lexer.Lex( - SourceReader.From("0b") - ); + var sourceText = "0b"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -35,15 +32,13 @@ public static void ShouldReadBinaryValue0b() IntegerKind.BinaryValue, 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadBinaryValue1b() { - var actualTokens = Lexer.Lex( - SourceReader.From("1b") - ); + var sourceText = "1b"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -55,15 +50,13 @@ public static void ShouldReadBinaryValue1b() IntegerKind.BinaryValue, 1 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadBinaryValue00000b() { - var actualTokens = Lexer.Lex( - SourceReader.From("00000b") - ); + var sourceText = "00000b"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -75,15 +68,13 @@ public static void ShouldReadBinaryValue00000b() IntegerKind.BinaryValue, 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadBinaryValue10000b() { - var actualTokens = Lexer.Lex( - SourceReader.From("10000b") - ); + var sourceText = "10000b"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -95,15 +86,13 @@ public static void ShouldReadBinaryValue10000b() IntegerKind.BinaryValue, 16 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadBinaryValue11111b() { - var actualTokens = Lexer.Lex( - SourceReader.From("11111b") - ); + var sourceText = "11111b"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -115,7 +104,7 @@ public static void ShouldReadBinaryValue11111b() IntegerKind.BinaryValue, 31 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } // octalValue @@ -123,9 +112,7 @@ public static void ShouldReadBinaryValue11111b() [Test] public static void ShouldReadOctalValue00() { - var actualTokens = Lexer.Lex( - SourceReader.From("00") - ); + var sourceText = "00"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -137,15 +124,13 @@ public static void ShouldReadOctalValue00() IntegerKind.OctalValue, 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadOctalValue01() { - var actualTokens = Lexer.Lex( - SourceReader.From("01") - ); + var sourceText = "01"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -157,15 +142,13 @@ public static void ShouldReadOctalValue01() IntegerKind.OctalValue, 1 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadOctalValue00000() { - var actualTokens = Lexer.Lex( - SourceReader.From("00000") - ); + var sourceText = "00000"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -177,15 +160,13 @@ public static void ShouldReadOctalValue00000() IntegerKind.OctalValue, 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadOctalValue01000() { - var actualTokens = Lexer.Lex( - SourceReader.From("01000") - ); + var sourceText = "01000"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -197,15 +178,13 @@ public static void ShouldReadOctalValue01000() IntegerKind.OctalValue, 512 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadOctalValue01111() { - var actualTokens = Lexer.Lex( - SourceReader.From("01111") - ); + var sourceText = "01111"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -217,15 +196,13 @@ public static void ShouldReadOctalValue01111() IntegerKind.OctalValue, 585 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadOctalValue04444() { - var actualTokens = Lexer.Lex( - SourceReader.From("04444") - ); + var sourceText = "04444"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -237,15 +214,13 @@ public static void ShouldReadOctalValue04444() IntegerKind.OctalValue, 2340 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadOctalValue07777() { - var actualTokens = Lexer.Lex( - SourceReader.From("07777") - ); + var sourceText = "07777"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -257,7 +232,7 @@ public static void ShouldReadOctalValue07777() IntegerKind.OctalValue, 4095 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } // hexValue @@ -265,9 +240,7 @@ public static void ShouldReadOctalValue07777() [Test] public static void ShouldReadHexValue0x0() { - var actualTokens = Lexer.Lex( - SourceReader.From("0x0") - ); + var sourceText = "0x0"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -279,15 +252,13 @@ public static void ShouldReadHexValue0x0() IntegerKind.HexValue, 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadHexValue0x0000() { - var actualTokens = Lexer.Lex( - SourceReader.From("0x0000") - ); + var sourceText = "0x0000"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -299,15 +270,13 @@ public static void ShouldReadHexValue0x0000() IntegerKind.HexValue, 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadHexValue0x8888() { - var actualTokens = Lexer.Lex( - SourceReader.From("0x8888") - ); + var sourceText = "0x8888"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -319,15 +288,13 @@ public static void ShouldReadHexValue0x8888() IntegerKind.HexValue, 34952 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadHexValue0xabcd() { - var actualTokens = Lexer.Lex( - SourceReader.From("0xabcd") - ); + var sourceText = "0xabcd"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -339,15 +306,13 @@ public static void ShouldReadHexValue0xabcd() IntegerKind.HexValue, 43981 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadHexValue0xABCD() { - var actualTokens = Lexer.Lex( - SourceReader.From("0xABCD") - ); + var sourceText = "0xABCD"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -359,7 +324,7 @@ public static void ShouldReadHexValue0xABCD() IntegerKind.HexValue, 43981 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } // decimalValue @@ -367,9 +332,7 @@ public static void ShouldReadHexValue0xABCD() [Test] public static void ShouldReadDecimalValue0() { - var actualTokens = Lexer.Lex( - SourceReader.From("0") - ); + var sourceText = "0"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -381,15 +344,13 @@ public static void ShouldReadDecimalValue0() IntegerKind.DecimalValue, 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadDecimalValue12345() { - var actualTokens = Lexer.Lex( - SourceReader.From("12345") - ); + var sourceText = "12345"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -401,15 +362,13 @@ public static void ShouldReadDecimalValue12345() IntegerKind.DecimalValue, 12345 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadDecimalValuePlus12345() { - var actualTokens = Lexer.Lex( - SourceReader.From("+12345") - ); + var sourceText = "+12345"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -421,15 +380,13 @@ public static void ShouldReadDecimalValuePlus12345() IntegerKind.DecimalValue, 12345 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadDecimalValueMinus12345() { - var actualTokens = Lexer.Lex( - SourceReader.From("-12345") - ); + var sourceText = "-12345"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -441,15 +398,13 @@ public static void ShouldReadDecimalValueMinus12345() IntegerKind.DecimalValue, -12345 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadDecimalValue1234567890() { - var actualTokens = Lexer.Lex( - SourceReader.From("1234567890") - ); + var sourceText = "1234567890"; var expectedTokens = new List { new IntegerLiteralToken( new SourceExtent @@ -461,7 +416,7 @@ public static void ShouldReadDecimalValue1234567890() IntegerKind.DecimalValue, 1234567890 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteralToken.cs similarity index 75% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteralToken.cs index 139f61ba..3c002ca6 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteralToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -19,9 +18,7 @@ public static class ReadNullLiteralTokenMethod [Test] public static void ShouldReadLowerCaseNullToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("null") - ); + var sourceText = "null"; var expectedTokens = new List { new NullLiteralToken( new SourceExtent @@ -32,15 +29,13 @@ public static void ShouldReadLowerCaseNullToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMixedCaseNullToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("Null") - ); + var sourceText = "Null"; var expectedTokens = new List { new NullLiteralToken( new SourceExtent @@ -51,15 +46,13 @@ public static void ShouldReadMixedCaseNullToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadUpperCaseNullToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("NULL") - ); + var sourceText = "NULL"; var expectedTokens = new List { new NullLiteralToken( new SourceExtent @@ -70,7 +63,7 @@ public static void ShouldReadUpperCaseNullToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_PragmaToken.cs similarity index 74% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_PragmaToken.cs index 35dc87c4..6c4c045c 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_PragmaToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -19,9 +18,7 @@ public static class ReadPragmaTokenMethod [Test] public static void ShouldReadLowerCasePragmaToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("#pragma") - ); + var sourceText = "#pragma"; var expectedTokens = new List { new PragmaToken( new SourceExtent @@ -32,15 +29,13 @@ public static void ShouldReadLowerCasePragmaToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMixedCasePragmaToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("#Pragma") - ); + var sourceText = "#Pragma"; var expectedTokens = new List { new PragmaToken( new SourceExtent @@ -51,15 +46,13 @@ public static void ShouldReadMixedCasePragmaToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadUpperCasePragmaToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("#PRAGMA") - ); + var sourceText = "#PRAGMA"; var expectedTokens = new List { new PragmaToken( new SourceExtent @@ -70,7 +63,7 @@ public static void ShouldReadUpperCasePragmaToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteralToken.cs similarity index 76% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteralToken.cs index 6ffd7f45..9662e678 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteralToken.cs @@ -19,9 +19,7 @@ public static class ReadRealLiteralTokenMethod [Test] public static void ShouldReadRealValue0_0() { - var actualTokens = Lexer.Lex( - SourceReader.From("0.0") - ); + var sourceText = "0.0"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent @@ -33,15 +31,13 @@ public static void ShouldReadRealValue0_0() 0 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadRealValue123_45() { - var actualTokens = Lexer.Lex( - SourceReader.From("123.45") - ); + var sourceText = "123.45"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent @@ -53,15 +49,13 @@ public static void ShouldReadRealValue123_45() 123.45 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadRealValuePlus123_45() { - var actualTokens = Lexer.Lex( - SourceReader.From("+123.45") - ); + var sourceText = "+123.45"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent @@ -73,15 +67,13 @@ public static void ShouldReadRealValuePlus123_45() 123.45 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadRealValueMinus123_45() { - var actualTokens = Lexer.Lex( - SourceReader.From("-123.45") - ); + var sourceText = "-123.45"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent @@ -93,15 +85,13 @@ public static void ShouldReadRealValueMinus123_45() -123.45 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadRealValue1234567890_00() { - var actualTokens = Lexer.Lex( - SourceReader.From("1234567890.00") - ); + var sourceText = "1234567890.00"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent( @@ -112,15 +102,13 @@ public static void ShouldReadRealValue1234567890_00() 1234567890.00 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadRealValue_45() { - var actualTokens = Lexer.Lex( - SourceReader.From(".45") - ); + var sourceText = ".45"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent @@ -132,15 +120,13 @@ public static void ShouldReadRealValue_45() 0.45 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadRealValuePlus_45() { - var actualTokens = Lexer.Lex( - SourceReader.From("+.45") - ); + var sourceText = "+.45"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent @@ -152,15 +138,13 @@ public static void ShouldReadRealValuePlus_45() 0.45 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadRealValueMinus_45() { - var actualTokens = Lexer.Lex( - SourceReader.From("-.45") - ); + var sourceText = "-.45"; var expectedTokens = new List { new RealLiteralToken( new SourceExtent @@ -172,7 +156,7 @@ public static void ShouldReadRealValueMinus_45() -0.45 ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteralToken.cs similarity index 77% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteralToken.cs index 205830cf..f51eccaf 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteralToken.cs @@ -19,9 +19,7 @@ public static class ReadStringLiteralTokenMethod [Test] public static void ShouldReadEmptyString() { - var actualTokens = Lexer.Lex( - SourceReader.From("\"\"") - ); + var sourceText = "\"\""; var expectedTokens = new List { new StringLiteralToken( new SourceExtent @@ -33,15 +31,13 @@ public static void ShouldReadEmptyString() string.Empty ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadBasicString() { - var actualTokens = Lexer.Lex( - SourceReader.From("\"my string literal\"") - ); + var sourceText = "\"my string literal\""; var expectedTokens = new List { new StringLiteralToken( new SourceExtent @@ -53,15 +49,13 @@ public static void ShouldReadBasicString() "my string literal" ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadEscapedString() { - var actualTokens = Lexer.Lex( - SourceReader.From(@"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""") - ); + var sourceText = @"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes"""; var expectedTokens = new List { new StringLiteralToken( new SourceExtent @@ -73,7 +67,7 @@ public static void ShouldReadEscapedString() "my \\ string \" literal \' with \b lots \t and \n lots \f of \r escapes" ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_SymbolToken.cs similarity index 78% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_SymbolToken.cs index fca5ca60..39005b9b 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_SymbolToken.cs @@ -19,9 +19,7 @@ public static class ReadAttributeCloseTokenMethod [Test] public static void ShouldReadAttributeCloseToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("]") - ); + var sourceText = "]"; var expectedTokens = new List { new AttributeCloseToken( new SourceExtent @@ -32,7 +30,7 @@ public static void ShouldReadAttributeCloseToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -44,9 +42,7 @@ public static class ReadAttributeOpenTokenMethod [Test] public static void ShouldReadAttributeOpenToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("[") - ); + var sourceText = "["; var expectedTokens = new List { new AttributeOpenToken( new SourceExtent @@ -57,7 +53,7 @@ public static void ShouldReadAttributeOpenToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -69,9 +65,7 @@ public static class ReadBlockCloseTokenMethod [Test] public static void ShouldReadBlockCloseToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("}") - ); + var sourceText = "}"; var expectedTokens = new List { new BlockCloseToken( new SourceExtent @@ -82,7 +76,7 @@ public static void ShouldReadBlockCloseToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -94,9 +88,7 @@ public static class ReadBlockOpenTokenMethod [Test] public static void ShouldReaBlockOpenToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("{") - ); + var sourceText = "{"; var expectedTokens = new List { new BlockOpenToken( new SourceExtent @@ -107,7 +99,7 @@ public static void ShouldReaBlockOpenToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -119,9 +111,7 @@ public static class ReadColonTokenMethod [Test] public static void ShouldReadColonToken() { - var actualTokens = Lexer.Lex( - SourceReader.From(":") - ); + var sourceText = ":"; var expectedTokens = new List { new ColonToken( new SourceExtent @@ -132,7 +122,7 @@ public static void ShouldReadColonToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -144,9 +134,7 @@ public static class ReadCommaTokenMethod [Test] public static void ShouldReadCommaToken() { - var actualTokens = Lexer.Lex( - SourceReader.From(",") - ); + var sourceText = ","; var expectedTokens = new List { new CommaToken( new SourceExtent @@ -157,7 +145,7 @@ public static void ShouldReadCommaToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -169,9 +157,7 @@ public static class ReadDotOperatorTokenMethod [Test] public static void ShouldReadDotOperatorToken() { - var actualTokens = Lexer.Lex( - SourceReader.From(".") - ); + var sourceText = "."; var expectedTokens = new List { new DotOperatorToken( new SourceExtent @@ -182,15 +168,13 @@ public static void ShouldReadDotOperatorToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadDotOperatorTokenWithTrailingNonDecimalDigit() { - var actualTokens = Lexer.Lex( - SourceReader.From(".abc") - ); + var sourceText = ".abc"; var expectedTokens = new List { new DotOperatorToken( new SourceExtent @@ -210,7 +194,7 @@ public static void ShouldReadDotOperatorTokenWithTrailingNonDecimalDigit() "abc" ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -222,9 +206,7 @@ public static class ReadEqualsOperatorTokenMethod [Test] public static void ShouldReadEqualsOperatorToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("=") - ); + var sourceText = "="; var expectedTokens = new List { new EqualsOperatorToken( new SourceExtent @@ -235,7 +217,7 @@ public static void ShouldReadEqualsOperatorToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -247,9 +229,7 @@ public static class ReadParenthesesCloseTokenMethod [Test] public static void ShouldReadEqualsOperatorToken() { - var actualTokens = Lexer.Lex( - SourceReader.From(")") - ); + var sourceText = ")"; var expectedTokens = new List { new ParenthesisCloseToken( new SourceExtent @@ -260,7 +240,7 @@ public static void ShouldReadEqualsOperatorToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -272,9 +252,7 @@ public static class ReadParenthesesOpenTokenMethod [Test] public static void ShouldReadParenthesesOpenToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("(") - ); + var sourceText = "("; var expectedTokens = new List { new ParenthesisOpenToken( new SourceExtent @@ -285,7 +263,7 @@ public static void ShouldReadParenthesesOpenToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } @@ -297,9 +275,7 @@ public static class ReadStatementEndTokenMethod [Test] public static void ShouldReadStatementEndToken() { - var actualTokens = Lexer.Lex( - SourceReader.From(";") - ); + var sourceText = ";"; var expectedTokens = new List { new StatementEndToken( new SourceExtent @@ -310,7 +286,7 @@ public static void ShouldReadStatementEndToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_WhitespaceToken.cs similarity index 74% rename from src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs rename to src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_WhitespaceToken.cs index 0493c6d2..2ce37603 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_WhitespaceToken.cs @@ -19,9 +19,7 @@ public static class ReadWhitespaceTokenMethod [Test] public static void ShouldReadSpaceWhitespaceToken() { - var actualTokens = Lexer.Lex( - SourceReader.From(" ") - ); + var sourceText = " "; var expectedTokens = new List { new WhitespaceToken( new SourceExtent @@ -32,15 +30,13 @@ public static void ShouldReadSpaceWhitespaceToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadTabWhitespaceToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("\t\t\t\t\t") - ); + var sourceText = "\t\t\t\t\t"; var expectedTokens = new List { new WhitespaceToken( new SourceExtent @@ -51,15 +47,13 @@ public static void ShouldReadTabWhitespaceToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadCrWhitespaceToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("\r\r\r\r\r") - ); + var sourceText = "\r\r\r\r\r"; var expectedTokens = new List { new WhitespaceToken( new SourceExtent @@ -70,15 +64,13 @@ public static void ShouldReadCrWhitespaceToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadLfWhitespaceToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("\n\n\n\n\n") - ); + var sourceText = "\n\n\n\n\n"; var expectedTokens = new List { new WhitespaceToken( new SourceExtent @@ -89,15 +81,13 @@ public static void ShouldReadLfWhitespaceToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadCrLfWhitespaceToken() { - var actualTokens = Lexer.Lex( - SourceReader.From("\r\n\r\n\r\n\r\n\r\n") - ); + var sourceText = "\r\n\r\n\r\n\r\n\r\n"; var expectedTokens = new List { new WhitespaceToken( new SourceExtent @@ -108,15 +98,14 @@ public static void ShouldReadCrLfWhitespaceToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } [Test] public static void ShouldReadMixedWhitespaceToken() { - var actualTokens = Lexer.Lex( - SourceReader.From(" \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n") - ); + var sourceText = + " \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n"; var expectedTokens = new List { new WhitespaceToken( new SourceExtent @@ -127,7 +116,7 @@ public static void ShouldReadMixedWhitespaceToken() ) ) }; - LexerAssert.AreEqual(expectedTokens, actualTokens); + LexerTests.AssertLexerTest(sourceText, expectedTokens); } } From 307ad0a7f6ecee216bd9c37c263936416fd6a1c1 Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Sat, 12 Sep 2020 22:06:05 +0100 Subject: [PATCH 05/11] Split MofGeneratorTests and ParserTests into multiple partial class files --- .../CodeGen/MofGeneratorTests.cs | 1466 ----------------- .../CodeGen/RoundtripTests.cs | 65 + .../RoundtripTests_AssociationDeclaration.cs | 61 + .../CodeGen/RoundtripTests_BooleanValue.cs | 31 + .../RoundtripTests_ClassDeclaration.cs | 148 ++ .../RoundtripTests_CompilerDirective.cs | 36 + .../RoundtripTests_ComplexTypeValue.cs | 99 ++ .../RoundtripTests_ComplexTypeValue2.cs | 158 ++ .../CodeGen/RoundtripTests_EnumTypeValue.cs | 159 ++ .../RoundtripTests_EnumerationDeclaration.cs | 170 ++ .../CodeGen/RoundtripTests_IntegerValues.cs | 78 + .../RoundtripTests_MethodDeclaration.cs | 133 ++ .../RoundtripTests_PrimitiveTypeValue.cs | 102 ++ .../RoundtripTests_PropertyDeclaration.cs | 71 + .../CodeGen/RoundtripTests_Qualifier.cs | 31 + .../CodeGen/RoundtripTests_QualifierList.cs | 70 + .../CodeGen/RoundtripTests_RealValue.cs | 86 + .../CodeGen/RoundtripTests_StringValue.cs | 58 + .../RoundtripTests_StructureDeclaration.cs | 136 ++ .../Kingsland.MofParser.UnitTests.csproj | 20 +- .../Lexing/LexerTests_RealLiteralToken.cs | 3 +- .../Lexing/LexerTests_StringLiteralToken.cs | 3 +- .../Lexing/LexerTests_SymbolToken.cs | 3 +- .../Lexing/LexerTests_WhitespaceToken.cs | 3 +- .../Parsing/ParserTests.cs | 717 +------- .../Parsing/ParserTests_ComplexTypeValue.cs | 729 ++++++++ 26 files changed, 2446 insertions(+), 2190 deletions(-) delete mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PrimitiveTypeValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs create mode 100644 src/Kingsland.MofParser.UnitTests/Parsing/ParserTests_ComplexTypeValue.cs diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs deleted file mode 100644 index fa1582db..00000000 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/MofGeneratorTests.cs +++ /dev/null @@ -1,1466 +0,0 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; - -namespace Kingsland.MofParser.UnitTests.CodeGen -{ - - public static class RoundtripTests - { - - #region 7.3 Compiler directives - - public static class CompilerDirectiveTests - { - - [Test] - public static void CompilerDirectiveShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "#pragma include (\"GlobalStructs/GOLF_Address.mof\")" - ); - } - - [Test] - public static void CompilerDirectiveWithMultipleSingleStringsShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "#pragma include (\"GlobalStructs\" \"/\" \"GOLF_Address.mof\")" - ); - } - - } - - #endregion - - #region 7.4 Qualifiers - - public static class QualifierTests - { - - [Test] - public static void QualifierShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "[Description(\"Instances of this class represent golf clubs. A golf club is \" \"an organization that provides member services to golf players \" \"both amateur and professional.\")]\r\n" + - "class GOLF_Club : GOLF_Base\r\n" + - "{\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.4.1 QualifierList - - public static class QualifierListTests - { - - } - - public static class QualifierValueTests - { - - [Test] - public static void QualifierWithMofV2FlavorsAndQuirksEnabledShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + - "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + - "{\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + - "};", - ParserQuirks.AllowMofV2Qualifiers - ); - } - - [Test] - public static void QualifierWithMofV2FlavorsAndQuirksDisabledShouldThrow() - { - var sourceMof = - "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + - "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + - "{\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + - "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => - { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 13, Line Number 1, Column Number 14.\r\n" + - "Token Type: 'ColonToken'\r\n" + - "Token Text: ':'", - ex.Message - ); - } - - } - - #endregion - - #region 7.5.1 Structure declaration - - public static class StructureDeclarationTests - { - - [Test] - public static void EmptyStructureDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void StructureDeclarationWithSuperstructureShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor : GOLF_MySupestructure\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void StructureDeclarationWithStructureFeaturesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\tstring Name;\r\n" + - "\tGOLF_Date ContractSignedDate;\r\n" + - "\treal32 ContractAmount;\r\n" + - "};" - ); - } - - } - - public static class StructureFeatureTests - { - - [Test] - public static void StructureFeatureWithQualifierShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + - "};" - ); - } - - [Test] - public static void InvalidStructureFeatureShouldThrow() - { - - var sourceMof = - "structure Sponsor\r\n" + - "{\r\n" + - "\t100\r\n" + - "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - Assert.AreEqual(sourceMof, tokensMof); - var ex = Assert.Throws( - () => { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 23, Line Number 3, Column Number 2.\r\n" + - "Token Type: 'IntegerLiteralToken'\r\n" + - "Token Text: '100'", - ex.Message - ); - } - - [Test] - public static void StructureFeatureWithStructureDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\tstructure Nested\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); - } - - [Test] - public static void StructureFeatureWithEnumerationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\tenumeration MonthsEnum : Integer\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); - } - - [Test] - public static void StructureFeatureWithPropertyDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\tstring Name;\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.5.2 Class declaration - - public static class ClassDeclarationTests - { - - [Test] - public static void EmptyClassDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Base\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void ClassDeclarationWithSuperclassShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Base : GOLF_Superclass\r\n" + - "{\r\n" + - "\tstring InstanceID;\r\n" + - "\tstring Caption = Null;\r\n" + - "};" - ); - } - - [Test] - public static void ClassDeclarationWithClassFeaturesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Base\r\n" + - "{\r\n" + - "\tstring InstanceID;\r\n" + - "\tstring Caption = Null;\r\n" + - "};" - ); - } - - [Test] - public static void ClassDeclarationsWithQualifierListShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "[Abstract, OCL{\"-- the key property cannot be NULL\", \"inv: InstanceId.size() = 10\"}]\r\n" + - "class GOLF_Base\r\n" + - "{\r\n" + - "\t[Description(\"an instance of a class that derives from the GOLF_Base class. \"), Key] string InstanceID;\r\n" + - "\t[Description(\"A short textual description (one- line string) of the\"), MaxLen(64)] string Caption = Null;\r\n" + - "};" - ); - } - - } - - public static class ClassFeatureTests - { - - [Test] - public static void ClassFeatureWithQualifiersShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + - "};" - ); - } - - [Test] - public static void InvalidClassFeatureShouldThrow() - { - var sourceMof = - "class Sponsor\r\n" + - "{\r\n" + - "\t100\r\n" + - "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 19, Line Number 3, Column Number 2.\r\n" + - "Token Type: 'IntegerLiteralToken'\r\n" + - "Token Text: '100'", - ex.Message - ); - } - - [Test] - public static void ClassFeatureWithStructureDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\tstructure Nested\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); - } - - [Test] - public static void ClassFeatureWithEnumerationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\tenumeration MonthsEnum : Integer\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); - } - - [Test] - public static void ClassFeatureWithPropertyDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\tstring Name;\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.5.3 Association declaration - - public static class AssociationDeclarationTests - { - - [Test] - public static void EmptyAssociationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "association GOLF_MemberLocker\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void AssociationDeclarationWithSuperAssociationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "association GOLF_MemberLocker : GOLF_Base\r\n" + - "{\r\n" + - "\tGOLF_ClubMember REF Member;\r\n" + - "\tGOLF_Locker REF Locker;\r\n" + - "\tGOLF_Date AssignedOnDate;\r\n" + - "};" - ); - } - - [Test] - public static void AssociationDeclarationWithClassFeaturesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "association GOLF_MemberLocker\r\n" + - "{\r\n" + - "\tGOLF_ClubMember REF Member;\r\n" + - "\tGOLF_Locker REF Locker;\r\n" + - "\tGOLF_Date AssignedOnDate;\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.5.4 Enumeration declaration - - public static class EnumerationDeclarationTests - { - - [Test] - public static void EmptyIntegerEnumerationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration MonthsEnum : Integer\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void EmptyStringEnumerationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration MonthsEnum : String\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void EmptyInheritedEnumerationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration MonthsEnum : GOLF_MyEnum\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void EnumerationDeclarationWithoutValuesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration MonthsEnum : String\r\n" + - "{\r\n" + - "\tJanuary,\r\n" + - "\tFebruary,\r\n" + - "\tMarch,\r\n" + - "\tApril,\r\n" + - "\tMay,\r\n" + - "\tJune,\r\n" + - "\tJuly,\r\n" + - "\tAugust,\r\n" + - "\tSeptember,\r\n" + - "\tOctober,\r\n" + - "\tNovember,\r\n" + - "\tDecember\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/52")] - public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksEnabledShouldThrow() - { - // this should throw because "uint32" is recognized as an integer type. - // as a result, "July" (a string) is not a valid value for an integer enumElement value - var sourceMof = - "enumeration MonthsEnum : uint32\r\n" + - "{\r\n" + - "\tJuly = \"July\"\r\n" + - "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => - { - var astNodes = Parser.Parse( - tokens, - ParserQuirks.AllowDeprecatedMof300IntegerTypesAsEnumerationDeclarationsBase - ); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 44, Line Number 3, Column Number 9.\r\n" + - "Token Type: 'StringLiteralToken'\r\n" + - "Token Text: '\"July\"'", - ex.Message - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/52")] - public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksDisabledShouldRoundtrip() - { - // this should roundtrip because "uint32" is not recognized as an integer type, and - // so it's assumed to be a separate base enum like "enumeration uint32 { ... };". - // as a result, there's no validation done on the datattype of the enum element and - // it will accept "July" as a valid value - RoundtripTests.AssertRoundtrip( - "enumeration MonthsEnum : uint32\r\n" + - "{\r\n" + - "\tJuly = \"July\"\r\n" + - "};" - ); - } - - } - - public static class EnumElementTests - { - - [Test] - public static void EnumElementWithQualifiersShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration MonthsEnum : integer\r\n" + - "{\r\n" + - "\t[Description(\"myDescription\")] January = 1\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/41")] - public static void IntegerEnumElementShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration MonthsEnum : integer\r\n" + - "{\r\n" + - "\tJanuary = 1\r\n" + - "};" - ); - } - - [Test] - public static void StringEnumElementWithoutValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration GOLF_StatesEnum : string\r\n" + - "{\r\n" + - "\tAL\r\n" + - "};" - ); - } - - [Test] - public static void StringEnumElementWithValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "enumeration GOLF_StatesEnum : string\r\n" + - "{\r\n" + - "\tAL = \"Alabama\"\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.5.5 Property declaration - - public static class PropertyDeclarationTests - { - - [Test] - public static void PropertyDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Base\r\n" + - "{\r\n" + - "\tInteger Severity;\r\n" + - "};" - ); - } - - [Test] - public static void PropertyDeclarationWithArrayTypeShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Base\r\n" + - "{\r\n" + - "\tInteger Severity[];\r\n" + - "};" - ); - } - - [Test] - public static void PropertyDeclarationWithDefaultValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Base\r\n" + - "{\r\n" + - "\tInteger Severity = 0;\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] - public static void PropertyDeclarationWithDeprecatedMof300IntegerReturnTypesAndQuirksDisabledShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Base\r\n" + - "{\r\n" + - "\tuint8 SeverityUint8;\r\n" + - "\tuint16 SeverityUint16;\r\n" + - "\tuint32 SeverityUint32;\r\n" + - "\tuint64 SeverityUint64;\r\n" + - "\tsint8 SeveritySint8;\r\n" + - "\tsint16 SeveritySint16;\r\n" + - "\tsint32 SeveritySint32;\r\n" + - "\tsint64 SeveritySint64;\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.5.6 Method declaration - - public static class MethodDeclarationTests - { - - [Test] - public static void MethodDeclarationWithNoParametersShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Club\r\n" + - "{\r\n" + - "\tInteger GetMembersWithOutstandingFees();\r\n" + - "};" - ); - } - - [Test] - public static void MethodDeclarationWithParameterShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Club\r\n" + - "{\r\n" + - "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers);\r\n" + - "};" - ); - } - - [Test] - public static void MethodDeclarationWithArrayParameterShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Club\r\n" + - "{\r\n" + - "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers[]);\r\n" + - "};" - ); - } - - [Test] - public static void MethodDeclarationsWithRefParameterShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Club\r\n" + - "{\r\n" + - "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember REF lateMembers);\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/27")] - public static void ClassDeclarationsWithMethodDeclarationWithEnumParameterShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Professional : GOLF_ClubMember\r\n" + - "{\r\n" + - "\tGOLF_ResultCodeEnum GetNumberOfProfessionals(Integer NoOfPros, GOLF_Club Club, ProfessionalStatusEnum Status = Professional);\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/37")] - public static void MethodDeclarationsWithArrayReturnTypeShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Club\r\n" + - "{\r\n" + - "\tInteger[] GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers);\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/38")] - public static void MethodDeclarationWithMultipleParametersShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class GOLF_Professional : GOLF_ClubMember\r\n" + - "{\r\n" + - "\tGOLF_ResultCodeEnum GetNumberOfProfessionals(Integer NoOfPros, GOLF_Club Club, ProfessionalStatusEnum Status = Professional);\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] - public static void MethodDeclarationWithDeprecatedMof300IntegerReturnTypesAndQuirksDisabledShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Win32_SoftwareFeature : CIM_SoftwareFeature\r\n" + - "{\r\n" + - "\tuint8 ReinstallUint8(integer ReinstallMode = 1);\r\n" + - "\tuint16 ReinstallUint16(integer ReinstallMode = 1);\r\n" + - "\tuint32 ReinstallUint32(integer ReinstallMode = 1);\r\n" + - "\tuint64 ReinstallUint64(integer ReinstallMode = 1);\r\n" + - "\tsint8 ReinstallUint8(integer ReinstallMode = 1);\r\n" + - "\tsint16 ReinstallUint16(integer ReinstallMode = 1);\r\n" + - "\tsint32 ReinstallUint32(integer ReinstallMode = 1);\r\n" + - "\tsint64 ReinstallUint64(integer ReinstallMode = 1);\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] - public static void MethodDeclarationWithDeprecatedMof300IntegerParameterTypesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Win32_SoftwareFeature : CIM_SoftwareFeature\r\n" + - "{\r\n" + - "\tinteger ReinstallUint8(uint8 ReinstallMode = 1);\r\n" + - "\tinteger ReinstallUint16(uint16 ReinstallMode = 1);\r\n" + - "\tinteger ReinstallUint32(uint32 ReinstallMode = 1);\r\n" + - "\tinteger ReinstallUint64(uint64 ReinstallMode = 1);\r\n" + - "\tinteger ReinstallUint8(sint8 ReinstallMode = 1);\r\n" + - "\tinteger ReinstallUint16(sint16 ReinstallMode = 1);\r\n" + - "\tinteger ReinstallUint32(sint32 ReinstallMode = 1);\r\n" + - "\tinteger ReinstallUint64(sint64 ReinstallMode = 1);\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.5.9 Complex type value - - public static class ComplexTypeValueTests - { - - [Test] - public static void ComplexTypeValueWithComplexValuePropertyShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = $MyAliasIdentifier;\r\n" + - "};" - ); - } - - [Test] - public static void ComplexTypeValueWithComplexValueArrayPropertyShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {$MyAliasIdentifier};\r\n" + - "};" - ); - } - - } - - public static class ComplexValueTests - { - - [Test] - public static void ComplexValuePropertyShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = $MyAliasIdentifier;\r\n" + - "};" - ); - } - - [Test] - public static void ComplexValuePropertyWithValueOfShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = value of GOLF_Date\r\n" + - "\t{\r\n" + - "\t\tMonth = July;\r\n" + - "\t};\r\n" + - "};" - ); - } - - } - - public static class ComplexValueArrayTests - { - - [Test] - public static void ComplexValueArrayWithOneItemShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {$MyAliasIdentifier};\r\n" + - "};" - ); - } - - [Test] - public static void ComplexValueArrayWithMultipleItemsShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {$MyAliasIdentifier, $MyOtherAliasIdentifier};\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.6.1 Primitive type value - - public static class LiteralValueArrayTests - { - - [Test] - public static void LiteralValueArrayWithOneItemShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {1};\r\n" + - "};" - ); - } - - [Test] - public static void LiteralValueArrayWithMultipleItemsShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {1, 2};\r\n" + - "};" - ); - } - - } - - public static class LiteralValueTests - { - - [Test] - public static void IntegerLiteralValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = 1;\r\n" + - "};" - ); - } - - [Test] - public static void RealLiteralValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = 0.5;\r\n" + - "};" - ); - } - - [Test] - public static void BooleanLiteralValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = true;\r\n" + - "};" - ); - } - - [Test] - public static void NullLiteralValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = null;\r\n" + - "};" - ); - } - - [Test] - public static void StringLiteralValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = \"aaa\";\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.6.1.1 Integer values - - public static class IntegerValueTests - { - - [Test] - public static void IntegerValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = 100;\r\n" + - "};" - ); - } - - [Test] - public static void PositiveIntegerValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = +100;\r\n" + - "};" - ); - } - - [Test] - public static void NegativeIntegerValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = -100;\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] - public static void IntegerValuePropertiesInOtherBasesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of myType as $Alias00000070\r\n" + - "{\r\n" + - "\tMyBinaryValue1 = 0101010b;\r\n" + - "\tMyBinaryValue2 = +0101010b;\r\n" + - "\tMyBinaryValue3 = -0101010b;\r\n" + - "\tMyOctalValue1 = 000444444;\r\n" + - "\tMyOctalValue2 = +000444444;\r\n" + - "\tMyOctalValue3 = -000444444;\r\n" + - "\tMyHexValue1 = 0x00ABC123;\r\n" + - "\tMyHexValue2 = +0x00ABC123;\r\n" + - "\tMyHexValue3 = -0x00ABC123;\r\n" + - "\tMyDecimalValue1 = 12345;\r\n" + - "\tMyDecimalValue2 = +12345;\r\n" + - "\tMyDecimalValue3 = -12345;\r\n" + - "\tMyRealValue1 = 00123.45;\r\n" + - "\tMyRealValue2 = +00123.45;\r\n" + - "\tMyRealValue3 = -123.45;\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.6.1.1 Real values - - public static class RealValueTests - { - - [Test] - public static void RealValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = 0.5;\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] - public static void PositiveRealValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = +0.5;\r\n" + - "};" - ); - } - - [Test] - public static void NegativeRealValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = -0.5;\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] - public static void RealValueWithNoFractionShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = 5.0;\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] - public static void RealValueWithTrailingZerosShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = 0.50;\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] - public static void RealValueWithNoIntegerPartShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = .5;\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.6.1.3 String values - - public static class StringValueTests - { - - [Test] - public static void SingleStringValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = \"Instance of John Doe\";\r\n" + - "};" - ); - } - - [Test] - public static void MultistringValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = \"Instance\" \"of\" \"John\" \"Doe\";\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/20")] - public static void StringValueWithSingleQuoteShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.6.1.5 Boolean value - - public static class BooleanValueTests - { - - [Test] - public static void BooleanValueAstShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of myType as $Alias00000070\r\n" + - "{\r\n" + - "\tReference = TRUE;\r\n" + - "};" - ); - } - - } - - #endregion - - #region 7.6.2 Complex type value - - public static class InstanceValueDeclarationTests - { - - [Test] - public static void InstanceValueDeclarationWithNoPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void InstanceValueDeclarationWithChildPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tFirstName = \"John\";\r\n" + - "\tLastName = \"Doe\";\r\n" + - "};" - ); - } - - [Test] - public static void InstanceValueDeclarationWithAliasShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember as $MyAliasIdentifier\r\n" + - "{\r\n" + - "};" - ); - } - - } - - public static class StructureValueDeclarationTests - { - - [Test] - public static void StructureValueDeclarationWithNoPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void StructureValueDeclarationWithChildPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + - "{\r\n" + - "\tFirstName = \"John\";\r\n" + - "\tLastName = \"Doe\";\r\n" + - "};" - ); - } - - } - - //[Test] - //public static void InstanceValueDeclarationShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of GOLF_ClubMember\r\n" + - // "{\r\n" + - // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + - // "};" - // ); - //} - - //[Test] - //public static void ClassDeclarationsAstWithNumericPropertiesShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of myType as $Alias00000070\r\n" + - // "{\r\n" + - // "\tMyBinaryValue = 0101010b;\r\n" + - // "\tMyOctalValue = 0444444;\r\n" + - // "\tMyHexValue = 0xABC123;\r\n" + - // "\tMyDecimalValue = 12345;\r\n" + - // "\tMyRealValue = 123.45;\r\n" + - // "};" - // ); - //} - - //[Test(Description = "https://github.com/mikeclayton/MofParser/issues/26"), - // Ignore("https://github.com/mikeclayton/MofParser/issues/26")] - //public static void InstanceValueDeclarationsWithInstanceValuePropertyShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of GOLF_ClubMember\r\n" + - // "{\r\n" + - // "\tLastPaymentDate = instance of GOLF_Date\r\n" + - // "\t{\r\n" + - // "\tYear = 2011;\r\n" + - // "\tMonth = July;\r\n" + - // "\tDay = 31;\r\n" + - // "\t};\r\n" + - // "}"; - // ); - //} - - //[Test] - //public static void InstanceValueDeclarationWithStructureValueDeclarationPropertyShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of GOLF_ClubMember\r\n" + - // "{\r\n" + - // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + - // "\tMemberAddress = value of GOLF_Address\r\n" + - // "\t{\r\n" + - // "\t\tState = \"IL\";\r\n" + - // "\t\tCity = \"Oak Park\";\r\n" + - // "\t\tStreet = \"Oak Park Av.\";\r\n" + - // "\t\tStreetNo = \"1177\";\r\n" + - // "\t\tApartmentNo = \"3B\";\r\n" + - // "\t};\r\n" + - // "};"; - // ); - //} - - //[Test] - //public static void StructureValueDeclarationShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "value of GOLF_PhoneNumber as $JohnDoesPhoneNo\r\n" + - // "{\r\n" + - // "\tAreaCode = {\"9\", \"0\", \"7\"};\r\n" + - // "\tNumber = {\"7\", \"4\", \"7\", \"4\", \"8\", \"8\", \"4\"};\r\n" + - // "};"; - // ); - //} - - #endregion - - #region 7.6.3 Enum type value - - public static class EnumTypeValueTests - { - - [Test] - public static void EnumTypeValueWithEnumValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tYear = 2011;\r\n" + - "\tMonth = July;\r\n" + - "\tDay = 31;\r\n" + - "};" - ); - } - - [Test] - public static void EnumTypeValueWithEnumValueArrayShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tYear = 2011;\r\n" + - "\tMonth = {June};\r\n" + - "\tDay = 31;\r\n" + - "};" - ); - } - - } - - public static class EnumValueTests - { - - [Test] - public static void UnqalifiedEnumValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tYear = 2011;\r\n" + - "\tMonth = July;\r\n" + - "\tDay = 31;\r\n" + - "};" - ); - } - - [Test] - public static void QualifiedEnumValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tYear = 2011;\r\n" + - "\tMonth = MonthEnums.July;\r\n" + - "\tDay = 31;\r\n" + - "};" - ); - } - - } - - public static class EnumValueArrayTests - { - - [Test] - public static void EmptyEnumValueArrayShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tYear = 2011;\r\n" + - "\tMonth = {June};\r\n" + - "\tDay = 31;\r\n" + - "};" - ); - } - - [Test] - public static void EnumValueArrayWithSingleEnumValueShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tYear = 2011;\r\n" + - "\tMonth = {June};\r\n" + - "\tDay = 31;\r\n" + - "};" - ); - } - - public static void EnumValueArrayWithMultipleEnumValuesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tMonth = {January, February};\r\n" + - "};" - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/25")] - public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksEnabledShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tMonth = {MonthEnums.July};\r\n" + - "};", - ParserQuirks.EnumValueArrayContainsEnumValuesNotEnumNames - ); - } - - [Test(Description = "https://github.com/mikeclayton/MofParser/issues/25")] - public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldThrow() - { - var sourceMof = - "instance of GOLF_Date\r\n" + - "{\r\n" + - "\tMonth = {MonthEnums.July};\r\n" + - "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => - { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 46, Line Number 3, Column Number 21.\r\n" + - "Token Type: 'DotOperatorToken'\r\n" + - "Token Text: '.'", - ex.Message - ); - } - - } - - #endregion - - #region Roundtrip Test Cases - - //[TestFixture] - //public static class ConvertToMofMethodTestCasesWmiWinXp - //{ - // [Test, TestCaseSource(typeof(ConvertToMofMethodTestCasesWmiWinXp), "GetTestCases")] - // public static void ConvertToMofMethodTestsFromDisk(string mofFilename) - // { - // ConvertToMofTests.MofGeneratorRoundtripTest(mofFilename); - // } - // public static IEnumerable GetTestCases - // { - // get - // { - // return TestUtils.GetMofTestCase("Parsing\\WMI\\WinXp"); - // } - // } - //} - - //[TestFixture] - //public static class ConvertToMofMethodGolfExamples - //{ - // //[Test, TestCaseSource(typeof(ConvertToMofMethodGolfExamples), "GetTestCases")] - // public static void ConvertToMofMethodTestsFromDisk(string mofFilename) - // { - // ConvertToMofTests.MofGeneratorRoundtripTest(mofFilename); - // } - // public static IEnumerable GetTestCases - // { - // get - // { - // return TestUtils.GetMofTestCase("Parsing\\DSP0221_3.0.1"); - // } - // } - //} - - private static void AssertRoundtrip(string sourceMof, ParserQuirks parserQuirks = ParserQuirks.None) - { - // check the lexer tokens roundtrips ok - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - Assert.AreEqual(sourceMof, tokensMof); - // check the parser ast roundtrips ok - var astNodes = Parser.Parse(tokens, parserQuirks); - var astMof = AstMofGenerator.ConvertToMof(astNodes); - Assert.AreEqual(sourceMof, astMof); - } - - #endregion - - } - -} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs new file mode 100644 index 00000000..09cda382 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs @@ -0,0 +1,65 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region Roundtrip Test Cases + + //[TestFixture] + //public static class ConvertToMofMethodTestCasesWmiWinXp + //{ + // [Test, TestCaseSource(typeof(ConvertToMofMethodTestCasesWmiWinXp), "GetTestCases")] + // public static void ConvertToMofMethodTestsFromDisk(string mofFilename) + // { + // ConvertToMofTests.MofGeneratorRoundtripTest(mofFilename); + // } + // public static IEnumerable GetTestCases + // { + // get + // { + // return TestUtils.GetMofTestCase("Parsing\\WMI\\WinXp"); + // } + // } + //} + + //[TestFixture] + //public static class ConvertToMofMethodGolfExamples + //{ + // //[Test, TestCaseSource(typeof(ConvertToMofMethodGolfExamples), "GetTestCases")] + // public static void ConvertToMofMethodTestsFromDisk(string mofFilename) + // { + // ConvertToMofTests.MofGeneratorRoundtripTest(mofFilename); + // } + // public static IEnumerable GetTestCases + // { + // get + // { + // return TestUtils.GetMofTestCase("Parsing\\DSP0221_3.0.1"); + // } + // } + //} + + private static void AssertRoundtrip(string sourceMof, ParserQuirks parserQuirks = ParserQuirks.None) + { + // check the lexer tokens roundtrips ok + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + Assert.AreEqual(sourceMof, tokensMof); + // check the parser ast roundtrips ok + var astNodes = Parser.Parse(tokens, parserQuirks); + var astMof = AstMofGenerator.ConvertToMof(astNodes); + Assert.AreEqual(sourceMof, astMof); + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs new file mode 100644 index 00000000..6082b1f1 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs @@ -0,0 +1,61 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.3 Association declaration + + public static class AssociationDeclarationTests + { + + [Test] + public static void EmptyAssociationDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "association GOLF_MemberLocker\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void AssociationDeclarationWithSuperAssociationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "association GOLF_MemberLocker : GOLF_Base\r\n" + + "{\r\n" + + "\tGOLF_ClubMember REF Member;\r\n" + + "\tGOLF_Locker REF Locker;\r\n" + + "\tGOLF_Date AssignedOnDate;\r\n" + + "};" + ); + } + + [Test] + public static void AssociationDeclarationWithClassFeaturesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "association GOLF_MemberLocker\r\n" + + "{\r\n" + + "\tGOLF_ClubMember REF Member;\r\n" + + "\tGOLF_Locker REF Locker;\r\n" + + "\tGOLF_Date AssignedOnDate;\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs new file mode 100644 index 00000000..55f32a3f --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs @@ -0,0 +1,31 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.1.5 Boolean value + + public static class BooleanValueTests + { + + [Test] + public static void BooleanValueAstShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of myType as $Alias00000070\r\n" + + "{\r\n" + + "\tReference = TRUE;\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs new file mode 100644 index 00000000..f7c35e97 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs @@ -0,0 +1,148 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.2 Class declaration + + public static class ClassDeclarationTests + { + + [Test] + public static void EmptyClassDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Base\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void ClassDeclarationWithSuperclassShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Base : GOLF_Superclass\r\n" + + "{\r\n" + + "\tstring InstanceID;\r\n" + + "\tstring Caption = Null;\r\n" + + "};" + ); + } + + [Test] + public static void ClassDeclarationWithClassFeaturesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Base\r\n" + + "{\r\n" + + "\tstring InstanceID;\r\n" + + "\tstring Caption = Null;\r\n" + + "};" + ); + } + + [Test] + public static void ClassDeclarationsWithQualifierListShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "[Abstract, OCL{\"-- the key property cannot be NULL\", \"inv: InstanceId.size() = 10\"}]\r\n" + + "class GOLF_Base\r\n" + + "{\r\n" + + "\t[Description(\"an instance of a class that derives from the GOLF_Base class. \"), Key] string InstanceID;\r\n" + + "\t[Description(\"A short textual description (one- line string) of the\"), MaxLen(64)] string Caption = Null;\r\n" + + "};" + ); + } + + } + + public static class ClassFeatureTests + { + + [Test] + public static void ClassFeatureWithQualifiersShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class Sponsor\r\n" + + "{\r\n" + + "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + + "};" + ); + } + + [Test] + public static void InvalidClassFeatureShouldThrow() + { + var sourceMof = + "class Sponsor\r\n" + + "{\r\n" + + "\t100\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var ex = Assert.Throws( + () => { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 19, Line Number 3, Column Number 2.\r\n" + + "Token Type: 'IntegerLiteralToken'\r\n" + + "Token Text: '100'", + ex.Message + ); + } + + [Test] + public static void ClassFeatureWithStructureDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class Sponsor\r\n" + + "{\r\n" + + "\tstructure Nested\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};" + ); + } + + [Test] + public static void ClassFeatureWithEnumerationDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class Sponsor\r\n" + + "{\r\n" + + "\tenumeration MonthsEnum : Integer\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};" + ); + } + + [Test] + public static void ClassFeatureWithPropertyDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class Sponsor\r\n" + + "{\r\n" + + "\tstring Name;\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs new file mode 100644 index 00000000..508d8a2d --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs @@ -0,0 +1,36 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.3 Compiler directives + + public static class CompilerDirectiveTests + { + + [Test] + public static void CompilerDirectiveShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "#pragma include (\"GlobalStructs/GOLF_Address.mof\")" + ); + } + + [Test] + public static void CompilerDirectiveWithMultipleSingleStringsShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "#pragma include (\"GlobalStructs\" \"/\" \"GOLF_Address.mof\")" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs new file mode 100644 index 00000000..aea19a3a --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs @@ -0,0 +1,99 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.9 Complex type value + + public static class ComplexTypeValueTests + { + + [Test] + public static void ComplexTypeValueWithComplexValuePropertyShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = $MyAliasIdentifier;\r\n" + + "};" + ); + } + + [Test] + public static void ComplexTypeValueWithComplexValueArrayPropertyShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {$MyAliasIdentifier};\r\n" + + "};" + ); + } + + } + + public static class ComplexValueTests + { + + [Test] + public static void ComplexValuePropertyShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = $MyAliasIdentifier;\r\n" + + "};" + ); + } + + [Test] + public static void ComplexValuePropertyWithValueOfShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = value of GOLF_Date\r\n" + + "\t{\r\n" + + "\t\tMonth = July;\r\n" + + "\t};\r\n" + + "};" + ); + } + + } + + public static class ComplexValueArrayTests + { + + [Test] + public static void ComplexValueArrayWithOneItemShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {$MyAliasIdentifier};\r\n" + + "};" + ); + } + + [Test] + public static void ComplexValueArrayWithMultipleItemsShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {$MyAliasIdentifier, $MyOtherAliasIdentifier};\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs new file mode 100644 index 00000000..82f432a7 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs @@ -0,0 +1,158 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.2 Complex type value + + public static class InstanceValueDeclarationTests + { + + [Test] + public static void InstanceValueDeclarationWithNoPropertiesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void InstanceValueDeclarationWithChildPropertiesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tFirstName = \"John\";\r\n" + + "\tLastName = \"Doe\";\r\n" + + "};" + ); + } + + [Test] + public static void InstanceValueDeclarationWithAliasShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember as $MyAliasIdentifier\r\n" + + "{\r\n" + + "};" + ); + } + + } + + public static class StructureValueDeclarationTests + { + + [Test] + public static void StructureValueDeclarationWithNoPropertiesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void StructureValueDeclarationWithChildPropertiesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + + "{\r\n" + + "\tFirstName = \"John\";\r\n" + + "\tLastName = \"Doe\";\r\n" + + "};" + ); + } + + } + + //[Test] + //public static void InstanceValueDeclarationShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of GOLF_ClubMember\r\n" + + // "{\r\n" + + // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + + // "};" + // ); + //} + + //[Test] + //public static void ClassDeclarationsAstWithNumericPropertiesShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of myType as $Alias00000070\r\n" + + // "{\r\n" + + // "\tMyBinaryValue = 0101010b;\r\n" + + // "\tMyOctalValue = 0444444;\r\n" + + // "\tMyHexValue = 0xABC123;\r\n" + + // "\tMyDecimalValue = 12345;\r\n" + + // "\tMyRealValue = 123.45;\r\n" + + // "};" + // ); + //} + + //[Test(Description = "https://github.com/mikeclayton/MofParser/issues/26"), + // Ignore("https://github.com/mikeclayton/MofParser/issues/26")] + //public static void InstanceValueDeclarationsWithInstanceValuePropertyShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of GOLF_ClubMember\r\n" + + // "{\r\n" + + // "\tLastPaymentDate = instance of GOLF_Date\r\n" + + // "\t{\r\n" + + // "\tYear = 2011;\r\n" + + // "\tMonth = July;\r\n" + + // "\tDay = 31;\r\n" + + // "\t};\r\n" + + // "}"; + // ); + //} + + //[Test] + //public static void InstanceValueDeclarationWithStructureValueDeclarationPropertyShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of GOLF_ClubMember\r\n" + + // "{\r\n" + + // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + + // "\tMemberAddress = value of GOLF_Address\r\n" + + // "\t{\r\n" + + // "\t\tState = \"IL\";\r\n" + + // "\t\tCity = \"Oak Park\";\r\n" + + // "\t\tStreet = \"Oak Park Av.\";\r\n" + + // "\t\tStreetNo = \"1177\";\r\n" + + // "\t\tApartmentNo = \"3B\";\r\n" + + // "\t};\r\n" + + // "};"; + // ); + //} + + //[Test] + //public static void StructureValueDeclarationShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "value of GOLF_PhoneNumber as $JohnDoesPhoneNo\r\n" + + // "{\r\n" + + // "\tAreaCode = {\"9\", \"0\", \"7\"};\r\n" + + // "\tNumber = {\"7\", \"4\", \"7\", \"4\", \"8\", \"8\", \"4\"};\r\n" + + // "};"; + // ); + //} + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs new file mode 100644 index 00000000..c5e82ca9 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs @@ -0,0 +1,159 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.3 Enum type value + + public static class EnumTypeValueTests + { + + [Test] + public static void EnumTypeValueWithEnumValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tYear = 2011;\r\n" + + "\tMonth = July;\r\n" + + "\tDay = 31;\r\n" + + "};" + ); + } + + [Test] + public static void EnumTypeValueWithEnumValueArrayShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tYear = 2011;\r\n" + + "\tMonth = {June};\r\n" + + "\tDay = 31;\r\n" + + "};" + ); + } + + } + + public static class EnumValueTests + { + + [Test] + public static void UnqalifiedEnumValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tYear = 2011;\r\n" + + "\tMonth = July;\r\n" + + "\tDay = 31;\r\n" + + "};" + ); + } + + [Test] + public static void QualifiedEnumValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tYear = 2011;\r\n" + + "\tMonth = MonthEnums.July;\r\n" + + "\tDay = 31;\r\n" + + "};" + ); + } + + } + + public static class EnumValueArrayTests + { + + [Test] + public static void EmptyEnumValueArrayShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tYear = 2011;\r\n" + + "\tMonth = {June};\r\n" + + "\tDay = 31;\r\n" + + "};" + ); + } + + [Test] + public static void EnumValueArrayWithSingleEnumValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tYear = 2011;\r\n" + + "\tMonth = {June};\r\n" + + "\tDay = 31;\r\n" + + "};" + ); + } + + public static void EnumValueArrayWithMultipleEnumValuesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tMonth = {January, February};\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/25")] + public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksEnabledShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tMonth = {MonthEnums.July};\r\n" + + "};", + ParserQuirks.EnumValueArrayContainsEnumValuesNotEnumNames + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/25")] + public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldThrow() + { + var sourceMof = + "instance of GOLF_Date\r\n" + + "{\r\n" + + "\tMonth = {MonthEnums.July};\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var ex = Assert.Throws( + () => + { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 46, Line Number 3, Column Number 21.\r\n" + + "Token Type: 'DotOperatorToken'\r\n" + + "Token Text: '.'", + ex.Message + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs new file mode 100644 index 00000000..2419f7bd --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs @@ -0,0 +1,170 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.4 Enumeration declaration + + public static class EnumerationDeclarationTests + { + + [Test] + public static void EmptyIntegerEnumerationDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration MonthsEnum : Integer\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void EmptyStringEnumerationDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration MonthsEnum : String\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void EmptyInheritedEnumerationDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration MonthsEnum : GOLF_MyEnum\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void EnumerationDeclarationWithoutValuesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration MonthsEnum : String\r\n" + + "{\r\n" + + "\tJanuary,\r\n" + + "\tFebruary,\r\n" + + "\tMarch,\r\n" + + "\tApril,\r\n" + + "\tMay,\r\n" + + "\tJune,\r\n" + + "\tJuly,\r\n" + + "\tAugust,\r\n" + + "\tSeptember,\r\n" + + "\tOctober,\r\n" + + "\tNovember,\r\n" + + "\tDecember\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/52")] + public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksEnabledShouldThrow() + { + // this should throw because "uint32" is recognized as an integer type. + // as a result, "July" (a string) is not a valid value for an integer enumElement value + var sourceMof = + "enumeration MonthsEnum : uint32\r\n" + + "{\r\n" + + "\tJuly = \"July\"\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var ex = Assert.Throws( + () => + { + var astNodes = Parser.Parse( + tokens, + ParserQuirks.AllowDeprecatedMof300IntegerTypesAsEnumerationDeclarationsBase + ); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 44, Line Number 3, Column Number 9.\r\n" + + "Token Type: 'StringLiteralToken'\r\n" + + "Token Text: '\"July\"'", + ex.Message + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/52")] + public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksDisabledShouldRoundtrip() + { + // this should roundtrip because "uint32" is not recognized as an integer type, and + // so it's assumed to be a separate base enum like "enumeration uint32 { ... };". + // as a result, there's no validation done on the datattype of the enum element and + // it will accept "July" as a valid value + RoundtripTests.AssertRoundtrip( + "enumeration MonthsEnum : uint32\r\n" + + "{\r\n" + + "\tJuly = \"July\"\r\n" + + "};" + ); + } + + } + + public static class EnumElementTests + { + + [Test] + public static void EnumElementWithQualifiersShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration MonthsEnum : integer\r\n" + + "{\r\n" + + "\t[Description(\"myDescription\")] January = 1\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/41")] + public static void IntegerEnumElementShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration MonthsEnum : integer\r\n" + + "{\r\n" + + "\tJanuary = 1\r\n" + + "};" + ); + } + + [Test] + public static void StringEnumElementWithoutValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration GOLF_StatesEnum : string\r\n" + + "{\r\n" + + "\tAL\r\n" + + "};" + ); + } + + [Test] + public static void StringEnumElementWithValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "enumeration GOLF_StatesEnum : string\r\n" + + "{\r\n" + + "\tAL = \"Alabama\"\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs new file mode 100644 index 00000000..24783019 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs @@ -0,0 +1,78 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.1.1 Integer values + + public static class IntegerValueTests + { + + [Test] + public static void IntegerValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = 100;\r\n" + + "};" + ); + } + + [Test] + public static void PositiveIntegerValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = +100;\r\n" + + "};" + ); + } + + [Test] + public static void NegativeIntegerValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = -100;\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] + public static void IntegerValuePropertiesInOtherBasesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of myType as $Alias00000070\r\n" + + "{\r\n" + + "\tMyBinaryValue1 = 0101010b;\r\n" + + "\tMyBinaryValue2 = +0101010b;\r\n" + + "\tMyBinaryValue3 = -0101010b;\r\n" + + "\tMyOctalValue1 = 000444444;\r\n" + + "\tMyOctalValue2 = +000444444;\r\n" + + "\tMyOctalValue3 = -000444444;\r\n" + + "\tMyHexValue1 = 0x00ABC123;\r\n" + + "\tMyHexValue2 = +0x00ABC123;\r\n" + + "\tMyHexValue3 = -0x00ABC123;\r\n" + + "\tMyDecimalValue1 = 12345;\r\n" + + "\tMyDecimalValue2 = +12345;\r\n" + + "\tMyDecimalValue3 = -12345;\r\n" + + "\tMyRealValue1 = 00123.45;\r\n" + + "\tMyRealValue2 = +00123.45;\r\n" + + "\tMyRealValue3 = -123.45;\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs new file mode 100644 index 00000000..7d9f1346 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs @@ -0,0 +1,133 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.6 Method declaration + + public static class MethodDeclarationTests + { + + [Test] + public static void MethodDeclarationWithNoParametersShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Club\r\n" + + "{\r\n" + + "\tInteger GetMembersWithOutstandingFees();\r\n" + + "};" + ); + } + + [Test] + public static void MethodDeclarationWithParameterShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Club\r\n" + + "{\r\n" + + "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers);\r\n" + + "};" + ); + } + + [Test] + public static void MethodDeclarationWithArrayParameterShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Club\r\n" + + "{\r\n" + + "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers[]);\r\n" + + "};" + ); + } + + [Test] + public static void MethodDeclarationsWithRefParameterShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Club\r\n" + + "{\r\n" + + "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember REF lateMembers);\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/27")] + public static void ClassDeclarationsWithMethodDeclarationWithEnumParameterShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Professional : GOLF_ClubMember\r\n" + + "{\r\n" + + "\tGOLF_ResultCodeEnum GetNumberOfProfessionals(Integer NoOfPros, GOLF_Club Club, ProfessionalStatusEnum Status = Professional);\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/37")] + public static void MethodDeclarationsWithArrayReturnTypeShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Club\r\n" + + "{\r\n" + + "\tInteger[] GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers);\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/38")] + public static void MethodDeclarationWithMultipleParametersShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Professional : GOLF_ClubMember\r\n" + + "{\r\n" + + "\tGOLF_ResultCodeEnum GetNumberOfProfessionals(Integer NoOfPros, GOLF_Club Club, ProfessionalStatusEnum Status = Professional);\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] + public static void MethodDeclarationWithDeprecatedMof300IntegerReturnTypesAndQuirksDisabledShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class Win32_SoftwareFeature : CIM_SoftwareFeature\r\n" + + "{\r\n" + + "\tuint8 ReinstallUint8(integer ReinstallMode = 1);\r\n" + + "\tuint16 ReinstallUint16(integer ReinstallMode = 1);\r\n" + + "\tuint32 ReinstallUint32(integer ReinstallMode = 1);\r\n" + + "\tuint64 ReinstallUint64(integer ReinstallMode = 1);\r\n" + + "\tsint8 ReinstallUint8(integer ReinstallMode = 1);\r\n" + + "\tsint16 ReinstallUint16(integer ReinstallMode = 1);\r\n" + + "\tsint32 ReinstallUint32(integer ReinstallMode = 1);\r\n" + + "\tsint64 ReinstallUint64(integer ReinstallMode = 1);\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] + public static void MethodDeclarationWithDeprecatedMof300IntegerParameterTypesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class Win32_SoftwareFeature : CIM_SoftwareFeature\r\n" + + "{\r\n" + + "\tinteger ReinstallUint8(uint8 ReinstallMode = 1);\r\n" + + "\tinteger ReinstallUint16(uint16 ReinstallMode = 1);\r\n" + + "\tinteger ReinstallUint32(uint32 ReinstallMode = 1);\r\n" + + "\tinteger ReinstallUint64(uint64 ReinstallMode = 1);\r\n" + + "\tinteger ReinstallUint8(sint8 ReinstallMode = 1);\r\n" + + "\tinteger ReinstallUint16(sint16 ReinstallMode = 1);\r\n" + + "\tinteger ReinstallUint32(sint32 ReinstallMode = 1);\r\n" + + "\tinteger ReinstallUint64(sint64 ReinstallMode = 1);\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PrimitiveTypeValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PrimitiveTypeValue.cs new file mode 100644 index 00000000..b1a4573b --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PrimitiveTypeValue.cs @@ -0,0 +1,102 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.1 Primitive type value + + public static class LiteralValueArrayTests + { + + [Test] + public static void LiteralValueArrayWithOneItemShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {1};\r\n" + + "};" + ); + } + + [Test] + public static void LiteralValueArrayWithMultipleItemsShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {1, 2};\r\n" + + "};" + ); + } + + } + + public static class LiteralValueTests + { + + [Test] + public static void IntegerLiteralValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = 1;\r\n" + + "};" + ); + } + + [Test] + public static void RealLiteralValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = 0.5;\r\n" + + "};" + ); + } + + [Test] + public static void BooleanLiteralValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = true;\r\n" + + "};" + ); + } + + [Test] + public static void NullLiteralValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = null;\r\n" + + "};" + ); + } + + [Test] + public static void StringLiteralValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = \"aaa\";\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs new file mode 100644 index 00000000..9f6a79bc --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs @@ -0,0 +1,71 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.5 Property declaration + + public static class PropertyDeclarationTests + { + + [Test] + public static void PropertyDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Base\r\n" + + "{\r\n" + + "\tInteger Severity;\r\n" + + "};" + ); + } + + [Test] + public static void PropertyDeclarationWithArrayTypeShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Base\r\n" + + "{\r\n" + + "\tInteger Severity[];\r\n" + + "};" + ); + } + + [Test] + public static void PropertyDeclarationWithDefaultValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Base\r\n" + + "{\r\n" + + "\tInteger Severity = 0;\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] + public static void PropertyDeclarationWithDeprecatedMof300IntegerReturnTypesAndQuirksDisabledShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "class GOLF_Base\r\n" + + "{\r\n" + + "\tuint8 SeverityUint8;\r\n" + + "\tuint16 SeverityUint16;\r\n" + + "\tuint32 SeverityUint32;\r\n" + + "\tuint64 SeverityUint64;\r\n" + + "\tsint8 SeveritySint8;\r\n" + + "\tsint16 SeveritySint16;\r\n" + + "\tsint32 SeveritySint32;\r\n" + + "\tsint64 SeveritySint64;\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs new file mode 100644 index 00000000..e2f84c23 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs @@ -0,0 +1,31 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.4 Qualifiers + + public static class QualifierTests + { + + [Test] + public static void QualifierShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "[Description(\"Instances of this class represent golf clubs. A golf club is \" \"an organization that provides member services to golf players \" \"both amateur and professional.\")]\r\n" + + "class GOLF_Club : GOLF_Base\r\n" + + "{\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs new file mode 100644 index 00000000..5598216b --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs @@ -0,0 +1,70 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.4.1 QualifierList + + public static class QualifierListTests + { + + } + + public static class QualifierValueTests + { + + [Test] + public static void QualifierWithMofV2FlavorsAndQuirksEnabledShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + + "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + + "{\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + + "};", + ParserQuirks.AllowMofV2Qualifiers + ); + } + + [Test] + public static void QualifierWithMofV2FlavorsAndQuirksDisabledShouldThrow() + { + var sourceMof = + "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + + "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + + "{\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var ex = Assert.Throws( + () => + { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 13, Line Number 1, Column Number 14.\r\n" + + "Token Type: 'ColonToken'\r\n" + + "Token Text: ':'", + ex.Message + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs new file mode 100644 index 00000000..badb02e0 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs @@ -0,0 +1,86 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.1.1 Real values + + public static class RealValueTests + { + + [Test] + public static void RealValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = 0.5;\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] + public static void PositiveRealValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = +0.5;\r\n" + + "};" + ); + } + + [Test] + public static void NegativeRealValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = -0.5;\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] + public static void RealValueWithNoFractionShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = 5.0;\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] + public static void RealValueWithTrailingZerosShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = 0.50;\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] + public static void RealValueWithNoIntegerPartShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = .5;\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs new file mode 100644 index 00000000..0be64ba2 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs @@ -0,0 +1,58 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.1.3 String values + + public static class StringValueTests + { + + [Test] + public static void SingleStringValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = \"Instance of John Doe\";\r\n" + + "};" + ); + } + + [Test] + public static void MultistringValueShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = \"Instance\" \"of\" \"John\" \"Doe\";\r\n" + + "};" + ); + } + + [Test(Description = "https://github.com/mikeclayton/MofParser/issues/20")] + public static void StringValueWithSingleQuoteShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs new file mode 100644 index 00000000..64e75153 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs @@ -0,0 +1,136 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.1 Structure declaration + + public static class StructureDeclarationTests + { + + [Test] + public static void EmptyStructureDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "structure Sponsor\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void StructureDeclarationWithSuperstructureShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "structure Sponsor : GOLF_MySupestructure\r\n" + + "{\r\n" + + "};" + ); + } + + [Test] + public static void StructureDeclarationWithStructureFeaturesShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "structure Sponsor\r\n" + + "{\r\n" + + "\tstring Name;\r\n" + + "\tGOLF_Date ContractSignedDate;\r\n" + + "\treal32 ContractAmount;\r\n" + + "};" + ); + } + + } + + public static class StructureFeatureTests + { + + [Test] + public static void StructureFeatureWithQualifierShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "structure Sponsor\r\n" + + "{\r\n" + + "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + + "};" + ); + } + + [Test] + public static void InvalidStructureFeatureShouldThrow() + { + + var sourceMof = + "structure Sponsor\r\n" + + "{\r\n" + + "\t100\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + Assert.AreEqual(sourceMof, tokensMof); + var ex = Assert.Throws( + () => { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 23, Line Number 3, Column Number 2.\r\n" + + "Token Type: 'IntegerLiteralToken'\r\n" + + "Token Text: '100'", + ex.Message + ); + } + + [Test] + public static void StructureFeatureWithStructureDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "structure Sponsor\r\n" + + "{\r\n" + + "\tstructure Nested\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};" + ); + } + + [Test] + public static void StructureFeatureWithEnumerationDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "structure Sponsor\r\n" + + "{\r\n" + + "\tenumeration MonthsEnum : Integer\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};" + ); + } + + [Test] + public static void StructureFeatureWithPropertyDeclarationShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "structure Sponsor\r\n" + + "{\r\n" + + "\tstring Name;\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj index 41e005d3..e26f70b4 100644 --- a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj +++ b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj @@ -35,7 +35,24 @@ - + + + + + + + + + + + + + + + + + + @@ -49,6 +66,7 @@ + diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteralToken.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteralToken.cs index 9662e678..2c3f835d 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteralToken.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteralToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteralToken.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteralToken.cs index f51eccaf..e4a0a55d 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteralToken.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteralToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_SymbolToken.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_SymbolToken.cs index 39005b9b..908c22dd 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_SymbolToken.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_SymbolToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_WhitespaceToken.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_WhitespaceToken.cs index 2ce37603..a843e08c 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_WhitespaceToken.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_WhitespaceToken.cs @@ -1,5 +1,4 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.Tokens; using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; diff --git a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs index 602c1e6f..7a275251 100644 --- a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs +++ b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests.cs @@ -1,730 +1,17 @@ -using Kingsland.MofParser.Ast; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Model; +using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Parsing; -using Kingsland.MofParser.Tokens; using Kingsland.MofParser.UnitTests.Helpers; -using Kingsland.MofParser.UnitTests.Model; using Kingsland.ParseFx.Text; using NUnit.Framework; -using System; using System.Collections.Generic; -using System.Collections.ObjectModel; using System.IO; namespace Kingsland.MofParser.UnitTests.Parsing { - public static class ParserTests + public static partial class ParserTests { - #region 7.5.9 Complex type value - - public static class PropertyValueTests - { - - [Test] - public static void ParsePropetyValueWithLiteralString() - { - var tokens = Lexer.Lex( - SourceReader.From( - "instance of myType as $Alias0000006E\r\n" + - "{\r\n" + - " ServerURL = \"https://URL\";\r\n" + - "};" - ) - ); - var actualAst = Parser.Parse(tokens); - var expectedAst = new MofSpecificationAst( - new ReadOnlyCollection( - new List - { - new InstanceValueDeclarationAst( - new IdentifierToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(12, 1, 13), - new SourcePosition(17, 1, 18), - "myType" - ), - "myType" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(19, 1, 20), - new SourcePosition(20, 1, 21), - "as" - ), - "as" - ), - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(22, 1, 23), - new SourcePosition(35, 1, 36), - "$Alias0000006E" - ), - "Alias0000006E" - ), - new PropertyValueListAst( - new ReadOnlyDictionary( - new Dictionary { - { "ServerURL", new StringValueAst.Builder { - StringLiteralValues = new List { - new StringLiteralToken( - new SourceExtent( - new SourcePosition(57, 3, 17), - new SourcePosition(69, 3, 29), - "\"https://URL\"" - ), - "https://URL" - ) - }, - Value = "https://URL" - }.Build() } - } - ) - ), - new StatementEndToken( - new SourceExtent( - new SourcePosition(74, 4, 2), - new SourcePosition(74, 4, 2), - ";" - ) - ) - ) - } - ) - ); - var actualJson = TestUtils.ConvertToJson(actualAst); - var expectedJson = TestUtils.ConvertToJson(expectedAst); - Assert.AreEqual(expectedJson, actualJson); - var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); - var expectedModule = new Module.Builder - { - Instances = new List { - new Instance.Builder - { - TypeName = "myType", - Alias = "Alias0000006E", - Properties = new List { - new Property("ServerURL", "https://URL") - } - }.Build() - } - }.Build(); - ModelAssert.AreEqual(expectedModule, actualModule); - } - - [Test] - public static void ParsePropetyValueWithAliasIdentifier() - { - var tokens = Lexer.Lex( - SourceReader.From( - "instance of myType as $Alias00000070\r\n" + - "{\r\n" + - " Reference = $Alias0000006E;\r\n" + - "};" - ) - ); - var actualAst = Parser.Parse(tokens); - var expectedAst = new MofSpecificationAst( - new ReadOnlyCollection( - new List - { - new InstanceValueDeclarationAst( - new IdentifierToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(12, 1, 13), - new SourcePosition(17, 1, 18), - "myType" - ), - "myType" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(19, 1, 20), - new SourcePosition(20, 1, 21), - "as" - ), - "as" - ), - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(22, 1, 23), - new SourcePosition(35, 1, 36), - "$Alias00000070" - ), - "Alias00000070" - ), - new PropertyValueListAst( - new ReadOnlyDictionary( - new Dictionary { - { "Reference", new ComplexValueAst( - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(57, 3, 17), - new SourcePosition(70, 3, 30), - "$Alias0000006E" - ), - "Alias0000006E" - ) - )} - } - ) - ), - new StatementEndToken( - new SourceExtent( - new SourcePosition(75, 4, 2), - new SourcePosition(75, 4, 2), - ";" - ) - ) - ) - } - ) - ); - var actualJson = TestUtils.ConvertToJson(actualAst); - var expectedJson = TestUtils.ConvertToJson(expectedAst); - Assert.AreEqual(expectedJson, actualJson); - var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); - var expectedModule = new Module.Builder - { - Instances = new List { - new Instance.Builder - { - TypeName = "myType", - Alias = "Alias00000070", - Properties = new List { - new Property("Reference", "Alias0000006E") - } - }.Build() - } - }.Build(); - ModelAssert.AreEqual(expectedModule, actualModule); - } - - [Test] - public static void ParsePropetyValueWithEmptyArray() - { - var tokens = Lexer.Lex( - SourceReader.From( - "instance of myType as $Alias00000070\r\n" + - "{\r\n" + - " Reference = {};\r\n" + - "};" - ) - ); - var actualAst = Parser.Parse(tokens); - var expectedAst = new MofSpecificationAst( - new ReadOnlyCollection( - new List - { - new InstanceValueDeclarationAst( - new IdentifierToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(12, 1, 13), - new SourcePosition(17, 1, 18), - "myType" - ), - "myType" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(19, 1, 20), - new SourcePosition(20, 1, 21), - "as" - ), - "as" - ), - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(22, 1, 23), - new SourcePosition(35, 1, 36), - "$Alias00000070" - ), - "Alias00000070" - ), - new PropertyValueListAst( - new ReadOnlyDictionary( - new Dictionary { - { "Reference", new LiteralValueArrayAst(null) } - } - ) - ), - new StatementEndToken( - new SourceExtent( - new SourcePosition(63, 4, 2), - new SourcePosition(63, 4, 2), - ";" - ) - ) - ) - } - ) - ); - var actualJson = TestUtils.ConvertToJson(actualAst); - var expectedJson = TestUtils.ConvertToJson(expectedAst); - Assert.AreEqual(expectedJson, actualJson); - var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); - var expectedModule = new Module.Builder - { - Instances = new List { - new Instance.Builder - { - TypeName = "myType", - Alias = "Alias00000070", - Properties = new List { - new Property("Reference", new List()) - } - }.Build() - } - }.Build(); - ModelAssert.AreEqual(expectedModule, actualModule); - } - - [Test] - public static void ParsePropetyValueArrayWithAliasIdentifier() - { - var tokens = Lexer.Lex( - SourceReader.From( - "instance of myType as $Alias00000070\r\n" + - "{\r\n" + - " Reference = {$Alias0000006E};\r\n" + - "};" - ) - ); - var actualAst = Parser.Parse(tokens); - var expectedAst = new MofSpecificationAst( - new ReadOnlyCollection( - new List - { - new InstanceValueDeclarationAst( - new IdentifierToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(12, 1, 13), - new SourcePosition(17, 1, 18), - "myType" - ), - "myType" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(19, 1, 20), - new SourcePosition(20, 1, 21), - "as" - ), - "as" - ), - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(22, 1, 23), - new SourcePosition(35, 1, 36), - "$Alias00000070" - ), - "Alias00000070" - ), - new PropertyValueListAst( - new ReadOnlyDictionary( - new Dictionary { - { "Reference", new ComplexValueArrayAst( - new ReadOnlyCollection( - new List { - new ComplexValueAst( - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(58, 3, 18), - new SourcePosition(71, 3, 31), - "$Alias0000006E" - ), - "Alias0000006E" - ) - ) - } - ) - )} - } - ) - ), - new StatementEndToken( - new SourceExtent( - new SourcePosition(77, 4, 2), - new SourcePosition(77, 4, 2), - ";" - ) - ) - ) - } - ) - ); - var actualJson = TestUtils.ConvertToJson(actualAst); - var expectedJson = TestUtils.ConvertToJson(expectedAst); - Assert.AreEqual(expectedJson, actualJson); - var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); - var expectedModule = new Module.Builder - { - Instances = new List { - new Instance.Builder - { - TypeName = "myType", - Alias = "Alias00000070", - Properties = new List { - new Property("Reference", new List { - "Alias0000006E" - }) - } - }.Build() - } - }.Build(); - ModelAssert.AreEqual(expectedModule, actualModule); - } - - [Test] - public static void ParsePropetyValueArrayWithLiteralStrings() - { - var tokens = Lexer.Lex( - SourceReader.From( - "instance of myType as $Alias00000070\r\n" + - "{\r\n" + - " ServerURLs = { \"https://URL1\", \"https://URL2\" };\r\n" + - "};" - ) - ); - var actualAst = Parser.Parse(tokens); - var expectedAst = new MofSpecificationAst( - new ReadOnlyCollection( - new List - { - new InstanceValueDeclarationAst( - new IdentifierToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(12, 1, 13), - new SourcePosition(17, 1, 18), - "myType" - ), - "myType" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(19, 1, 20), - new SourcePosition(20, 1, 21), - "as" - ), - "as" - ), - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(22, 1, 23), - new SourcePosition(35, 1, 36), - "$Alias00000070" - ), - "Alias00000070" - ), - new PropertyValueListAst( - new ReadOnlyDictionary( - new Dictionary { - { "ServerURLs", new LiteralValueArrayAst( - new ReadOnlyCollection( - new List { - new StringValueAst.Builder { - StringLiteralValues = new List { - new StringLiteralToken( - new SourceExtent( - new SourcePosition(60, 3, 20), - new SourcePosition(73, 3, 33), - "\"https://URL1\"" - ), - "https://URL1" - ) - }, - Value = "https://URL1" - }.Build(), - new StringValueAst.Builder { - StringLiteralValues = new List { - new StringLiteralToken( - new SourceExtent( - new SourcePosition(76, 3, 36), - new SourcePosition(89, 3, 49), - "\"https://URL2\"" - ), - "https://URL2" - ) - }, - Value = "https://URL2" - }.Build() - } - ) - )} - } - ) - ), - new StatementEndToken( - new SourceExtent( - new SourcePosition(96, 4, 2), - new SourcePosition(96, 4, 2), - ";" - ) - ) - ) - } - ) - ); - var actualJson = TestUtils.ConvertToJson(actualAst); - var expectedJson = TestUtils.ConvertToJson(expectedAst); - Assert.AreEqual(expectedJson, actualJson); - var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); - var expectedModule = new Module.Builder - { - Instances = new List { - new Instance.Builder - { - TypeName = "myType", - Alias = "Alias00000070", - Properties = new List { - new Property("ServerURLs", new List { - "https://URL1", "https://URL2" - }) - } - }.Build() - } - }.Build(); - ModelAssert.AreEqual(expectedModule, actualModule); - } - - [Test] - public static void ParsePropetyValueArrayWithNumericLiteralValues() - { - var tokens = Lexer.Lex( - SourceReader.From( - "instance of myType as $Alias00000070\r\n" + - "{\r\n" + - " MyBinaryValue = 0101010b;\r\n" + - " MyOctalValue = 0444444;\r\n" + - " MyHexValue = 0xABC123;\r\n" + - " MyDecimalValue = 12345;\r\n" + - " MyRealValue = 123.45;\r\n" + - "};" - ) - ); - var actualAst = Parser.Parse(tokens); - var expectedAst = new MofSpecificationAst( - new ReadOnlyCollection( - new List - { - new InstanceValueDeclarationAst( - new IdentifierToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(12, 1, 13), - new SourcePosition(17, 1, 18), - "myType" - ), - "myType" - ), - new IdentifierToken( - new SourceExtent( - new SourcePosition(19, 1, 20), - new SourcePosition(20, 1, 21), - "as" - ), - "as" - ), - new AliasIdentifierToken( - new SourceExtent( - new SourcePosition(22, 1, 23), - new SourcePosition(35, 1, 36), - "$Alias00000070" - ), - "Alias00000070" - ), - new PropertyValueListAst( - new ReadOnlyDictionary( - new Dictionary { - { "MyBinaryValue", new IntegerValueAst( - new IntegerLiteralToken( - new SourceExtent( - new SourcePosition(61, 3, 21), - new SourcePosition(68, 3, 28), - "0101010b" - ), - IntegerKind.BinaryValue, 0b101010 - ) - )}, - { "MyOctalValue", new IntegerValueAst( - new IntegerLiteralToken( - new SourceExtent( - new SourcePosition(91, 4, 20), - new SourcePosition(97, 4, 26), - "0444444" - ), - IntegerKind.OctalValue, Convert.ToInt32("444444", 8) - ) - )}, - { "MyHexValue", new IntegerValueAst( - new IntegerLiteralToken( - new SourceExtent( - new SourcePosition(118, 5, 18), - new SourcePosition(125, 5, 25), - "0xABC123" - ), - IntegerKind.HexValue, 0xABC123 - ) - )}, - { "MyDecimalValue", new IntegerValueAst( - new IntegerLiteralToken( - new SourceExtent( - new SourcePosition(150, 6, 22), - new SourcePosition(154, 6, 26), - "12345" - ), - IntegerKind.DecimalValue, 12345 - ) - )}, - { "MyRealValue", new RealValueAst( - new RealLiteralToken( - new SourceExtent( - new SourcePosition(176, 7, 19), - new SourcePosition(181, 7, 24), - "123.45" - ), - 123.45 - ) - )} - } - ) - ), - new StatementEndToken( - new SourceExtent( - new SourcePosition(186, 8, 2), - new SourcePosition(186, 8, 2), - ";" - ) - ) - ) - } - ) - ); - var actualJson = TestUtils.ConvertToJson(actualAst); - var expectedJson = TestUtils.ConvertToJson(expectedAst); - Assert.AreEqual(expectedJson, actualJson); - var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); - var expectedModule = new Module.Builder - { - Instances = new List { - new Instance.Builder - { - TypeName = "myType", - Alias = "Alias00000070", - Properties = new List { - new Property("MyBinaryValue", 42), - new Property("MyOctalValue", 149796), - new Property("MyHexValue", 11256099), - new Property("MyDecimalValue", 12345), - new Property("MyRealValue", 123.45) - } - }.Build() - } - }.Build(); - ModelAssert.AreEqual(expectedModule, actualModule); - } - - } - - #endregion - //[TestFixture] //private static class ParseMethodTestCasesWmiWin81 //{ diff --git a/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests_ComplexTypeValue.cs b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests_ComplexTypeValue.cs new file mode 100644 index 00000000..fed67bee --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/Parsing/ParserTests_ComplexTypeValue.cs @@ -0,0 +1,729 @@ +using Kingsland.MofParser.Ast; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Model; +using Kingsland.MofParser.Parsing; +using Kingsland.MofParser.Tokens; +using Kingsland.MofParser.UnitTests.Helpers; +using Kingsland.MofParser.UnitTests.Model; +using Kingsland.ParseFx.Text; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; + +namespace Kingsland.MofParser.UnitTests.Parsing +{ + + public static partial class ParserTests + { + + #region 7.5.9 Complex type value + + public static class PropertyValueTests + { + + [Test] + public static void ParsePropetyValueWithLiteralString() + { + var tokens = Lexer.Lex( + SourceReader.From( + "instance of myType as $Alias0000006E\r\n" + + "{\r\n" + + " ServerURL = \"https://URL\";\r\n" + + "};" + ) + ); + var actualAst = Parser.Parse(tokens); + var expectedAst = new MofSpecificationAst( + new ReadOnlyCollection( + new List + { + new InstanceValueDeclarationAst( + new IdentifierToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(12, 1, 13), + new SourcePosition(17, 1, 18), + "myType" + ), + "myType" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(19, 1, 20), + new SourcePosition(20, 1, 21), + "as" + ), + "as" + ), + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(22, 1, 23), + new SourcePosition(35, 1, 36), + "$Alias0000006E" + ), + "Alias0000006E" + ), + new PropertyValueListAst( + new ReadOnlyDictionary( + new Dictionary { + { "ServerURL", new StringValueAst.Builder { + StringLiteralValues = new List { + new StringLiteralToken( + new SourceExtent( + new SourcePosition(57, 3, 17), + new SourcePosition(69, 3, 29), + "\"https://URL\"" + ), + "https://URL" + ) + }, + Value = "https://URL" + }.Build() } + } + ) + ), + new StatementEndToken( + new SourceExtent( + new SourcePosition(74, 4, 2), + new SourcePosition(74, 4, 2), + ";" + ) + ) + ) + } + ) + ); + var actualJson = TestUtils.ConvertToJson(actualAst); + var expectedJson = TestUtils.ConvertToJson(expectedAst); + Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias0000006E", + Properties = new List { + new Property("ServerURL", "https://URL") + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); + } + + [Test] + public static void ParsePropetyValueWithAliasIdentifier() + { + var tokens = Lexer.Lex( + SourceReader.From( + "instance of myType as $Alias00000070\r\n" + + "{\r\n" + + " Reference = $Alias0000006E;\r\n" + + "};" + ) + ); + var actualAst = Parser.Parse(tokens); + var expectedAst = new MofSpecificationAst( + new ReadOnlyCollection( + new List + { + new InstanceValueDeclarationAst( + new IdentifierToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(12, 1, 13), + new SourcePosition(17, 1, 18), + "myType" + ), + "myType" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(19, 1, 20), + new SourcePosition(20, 1, 21), + "as" + ), + "as" + ), + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(22, 1, 23), + new SourcePosition(35, 1, 36), + "$Alias00000070" + ), + "Alias00000070" + ), + new PropertyValueListAst( + new ReadOnlyDictionary( + new Dictionary { + { "Reference", new ComplexValueAst( + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(57, 3, 17), + new SourcePosition(70, 3, 30), + "$Alias0000006E" + ), + "Alias0000006E" + ) + )} + } + ) + ), + new StatementEndToken( + new SourceExtent( + new SourcePosition(75, 4, 2), + new SourcePosition(75, 4, 2), + ";" + ) + ) + ) + } + ) + ); + var actualJson = TestUtils.ConvertToJson(actualAst); + var expectedJson = TestUtils.ConvertToJson(expectedAst); + Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("Reference", "Alias0000006E") + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); + } + + [Test] + public static void ParsePropetyValueWithEmptyArray() + { + var tokens = Lexer.Lex( + SourceReader.From( + "instance of myType as $Alias00000070\r\n" + + "{\r\n" + + " Reference = {};\r\n" + + "};" + ) + ); + var actualAst = Parser.Parse(tokens); + var expectedAst = new MofSpecificationAst( + new ReadOnlyCollection( + new List + { + new InstanceValueDeclarationAst( + new IdentifierToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(12, 1, 13), + new SourcePosition(17, 1, 18), + "myType" + ), + "myType" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(19, 1, 20), + new SourcePosition(20, 1, 21), + "as" + ), + "as" + ), + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(22, 1, 23), + new SourcePosition(35, 1, 36), + "$Alias00000070" + ), + "Alias00000070" + ), + new PropertyValueListAst( + new ReadOnlyDictionary( + new Dictionary { + { "Reference", new LiteralValueArrayAst(null) } + } + ) + ), + new StatementEndToken( + new SourceExtent( + new SourcePosition(63, 4, 2), + new SourcePosition(63, 4, 2), + ";" + ) + ) + ) + } + ) + ); + var actualJson = TestUtils.ConvertToJson(actualAst); + var expectedJson = TestUtils.ConvertToJson(expectedAst); + Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("Reference", new List()) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); + } + + [Test] + public static void ParsePropetyValueArrayWithAliasIdentifier() + { + var tokens = Lexer.Lex( + SourceReader.From( + "instance of myType as $Alias00000070\r\n" + + "{\r\n" + + " Reference = {$Alias0000006E};\r\n" + + "};" + ) + ); + var actualAst = Parser.Parse(tokens); + var expectedAst = new MofSpecificationAst( + new ReadOnlyCollection( + new List + { + new InstanceValueDeclarationAst( + new IdentifierToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(12, 1, 13), + new SourcePosition(17, 1, 18), + "myType" + ), + "myType" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(19, 1, 20), + new SourcePosition(20, 1, 21), + "as" + ), + "as" + ), + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(22, 1, 23), + new SourcePosition(35, 1, 36), + "$Alias00000070" + ), + "Alias00000070" + ), + new PropertyValueListAst( + new ReadOnlyDictionary( + new Dictionary { + { "Reference", new ComplexValueArrayAst( + new ReadOnlyCollection( + new List { + new ComplexValueAst( + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(58, 3, 18), + new SourcePosition(71, 3, 31), + "$Alias0000006E" + ), + "Alias0000006E" + ) + ) + } + ) + )} + } + ) + ), + new StatementEndToken( + new SourceExtent( + new SourcePosition(77, 4, 2), + new SourcePosition(77, 4, 2), + ";" + ) + ) + ) + } + ) + ); + var actualJson = TestUtils.ConvertToJson(actualAst); + var expectedJson = TestUtils.ConvertToJson(expectedAst); + Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("Reference", new List { + "Alias0000006E" + }) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); + } + + [Test] + public static void ParsePropetyValueArrayWithLiteralStrings() + { + var tokens = Lexer.Lex( + SourceReader.From( + "instance of myType as $Alias00000070\r\n" + + "{\r\n" + + " ServerURLs = { \"https://URL1\", \"https://URL2\" };\r\n" + + "};" + ) + ); + var actualAst = Parser.Parse(tokens); + var expectedAst = new MofSpecificationAst( + new ReadOnlyCollection( + new List + { + new InstanceValueDeclarationAst( + new IdentifierToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(12, 1, 13), + new SourcePosition(17, 1, 18), + "myType" + ), + "myType" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(19, 1, 20), + new SourcePosition(20, 1, 21), + "as" + ), + "as" + ), + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(22, 1, 23), + new SourcePosition(35, 1, 36), + "$Alias00000070" + ), + "Alias00000070" + ), + new PropertyValueListAst( + new ReadOnlyDictionary( + new Dictionary { + { "ServerURLs", new LiteralValueArrayAst( + new ReadOnlyCollection( + new List { + new StringValueAst.Builder { + StringLiteralValues = new List { + new StringLiteralToken( + new SourceExtent( + new SourcePosition(60, 3, 20), + new SourcePosition(73, 3, 33), + "\"https://URL1\"" + ), + "https://URL1" + ) + }, + Value = "https://URL1" + }.Build(), + new StringValueAst.Builder { + StringLiteralValues = new List { + new StringLiteralToken( + new SourceExtent( + new SourcePosition(76, 3, 36), + new SourcePosition(89, 3, 49), + "\"https://URL2\"" + ), + "https://URL2" + ) + }, + Value = "https://URL2" + }.Build() + } + ) + )} + } + ) + ), + new StatementEndToken( + new SourceExtent( + new SourcePosition(96, 4, 2), + new SourcePosition(96, 4, 2), + ";" + ) + ) + ) + } + ) + ); + var actualJson = TestUtils.ConvertToJson(actualAst); + var expectedJson = TestUtils.ConvertToJson(expectedAst); + Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("ServerURLs", new List { + "https://URL1", "https://URL2" + }) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); + } + + [Test] + public static void ParsePropetyValueArrayWithNumericLiteralValues() + { + var tokens = Lexer.Lex( + SourceReader.From( + "instance of myType as $Alias00000070\r\n" + + "{\r\n" + + " MyBinaryValue = 0101010b;\r\n" + + " MyOctalValue = 0444444;\r\n" + + " MyHexValue = 0xABC123;\r\n" + + " MyDecimalValue = 12345;\r\n" + + " MyRealValue = 123.45;\r\n" + + "};" + ) + ); + var actualAst = Parser.Parse(tokens); + var expectedAst = new MofSpecificationAst( + new ReadOnlyCollection( + new List + { + new InstanceValueDeclarationAst( + new IdentifierToken( + new SourceExtent( + new SourcePosition(0, 1, 1), + new SourcePosition(7, 1, 8), + "instance" + ), + "instance" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(9, 1, 10), + new SourcePosition(10, 1, 11), + "of" + ), + "of" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(12, 1, 13), + new SourcePosition(17, 1, 18), + "myType" + ), + "myType" + ), + new IdentifierToken( + new SourceExtent( + new SourcePosition(19, 1, 20), + new SourcePosition(20, 1, 21), + "as" + ), + "as" + ), + new AliasIdentifierToken( + new SourceExtent( + new SourcePosition(22, 1, 23), + new SourcePosition(35, 1, 36), + "$Alias00000070" + ), + "Alias00000070" + ), + new PropertyValueListAst( + new ReadOnlyDictionary( + new Dictionary { + { "MyBinaryValue", new IntegerValueAst( + new IntegerLiteralToken( + new SourceExtent( + new SourcePosition(61, 3, 21), + new SourcePosition(68, 3, 28), + "0101010b" + ), + IntegerKind.BinaryValue, 0b101010 + ) + )}, + { "MyOctalValue", new IntegerValueAst( + new IntegerLiteralToken( + new SourceExtent( + new SourcePosition(91, 4, 20), + new SourcePosition(97, 4, 26), + "0444444" + ), + IntegerKind.OctalValue, Convert.ToInt32("444444", 8) + ) + )}, + { "MyHexValue", new IntegerValueAst( + new IntegerLiteralToken( + new SourceExtent( + new SourcePosition(118, 5, 18), + new SourcePosition(125, 5, 25), + "0xABC123" + ), + IntegerKind.HexValue, 0xABC123 + ) + )}, + { "MyDecimalValue", new IntegerValueAst( + new IntegerLiteralToken( + new SourceExtent( + new SourcePosition(150, 6, 22), + new SourcePosition(154, 6, 26), + "12345" + ), + IntegerKind.DecimalValue, 12345 + ) + )}, + { "MyRealValue", new RealValueAst( + new RealLiteralToken( + new SourceExtent( + new SourcePosition(176, 7, 19), + new SourcePosition(181, 7, 24), + "123.45" + ), + 123.45 + ) + )} + } + ) + ), + new StatementEndToken( + new SourceExtent( + new SourcePosition(186, 8, 2), + new SourcePosition(186, 8, 2), + ";" + ) + ) + ) + } + ) + ); + var actualJson = TestUtils.ConvertToJson(actualAst); + var expectedJson = TestUtils.ConvertToJson(expectedAst); + Assert.AreEqual(expectedJson, actualJson); + var actualModule = ModelConverter.ConvertMofSpecificationAst(actualAst); + var expectedModule = new Module.Builder + { + Instances = new List { + new Instance.Builder + { + TypeName = "myType", + Alias = "Alias00000070", + Properties = new List { + new Property("MyBinaryValue", 42), + new Property("MyOctalValue", 149796), + new Property("MyHexValue", 11256099), + new Property("MyDecimalValue", 12345), + new Property("MyRealValue", 123.45) + } + }.Build() + } + }.Build(); + ModelAssert.AreEqual(expectedModule, actualModule); + } + + } + + #endregion + + } + +} From 46c61dd001c1db0559354bcd6a8e7e6c06e61468 Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Sat, 12 Sep 2020 23:01:32 +0100 Subject: [PATCH 06/11] More splitting of roundtrip tests into partial files --- .../CodeGen/RoundtripTests.cs | 8 +- .../RoundtripTests_AssociationDeclaration.cs | 25 ++- .../CodeGen/RoundtripTests_BooleanValue.cs | 6 +- .../RoundtripTests_ClassDeclaration.cs | 118 +++---------- .../CodeGen/RoundtripTests_ClassFeature.cs | 96 +++++++++++ .../RoundtripTests_CompilerDirective.cs | 12 +- .../RoundtripTests_ComplexTypeValue.cs | 69 +------- .../RoundtripTests_ComplexTypeValue2.cs | 158 ------------------ .../CodeGen/RoundtripTests_ComplexValue.cs | 45 +++++ .../RoundtripTests_ComplexValueArray.cs | 42 +++++ .../CodeGen/RoundtripTests_EnumTypeValue.cs | 52 +++--- .../RoundtripTests_EnumerationDeclaration.cs | 58 +++---- ...RoundtripTests_InstanceValueDeclaration.cs | 99 +++++++++++ .../CodeGen/RoundtripTests_IntegerValues.cs | 24 +-- ...alue.cs => RoundtripTests_LiteralValue.cs} | 57 ++----- .../RoundtripTests_LiteralValueArray.cs | 42 +++++ .../RoundtripTests_MethodDeclaration.cs | 54 +++--- .../RoundtripTests_PropertyDeclaration.cs | 24 +-- .../CodeGen/RoundtripTests_Qualifier.cs | 6 +- .../CodeGen/RoundtripTests_QualifierList.cs | 54 +----- .../CodeGen/RoundtripTests_QualifierValue.cs | 67 ++++++++ .../CodeGen/RoundtripTests_RealValue.cs | 36 ++-- .../CodeGen/RoundtripTests_StringValue.cs | 25 ++- .../RoundtripTests_StructureDeclaration.cs | 101 +---------- .../RoundtripTests_StructureFeature.cs | 98 +++++++++++ ...ripTests_StructureValueDeclarationTests.cs | 54 ++++++ .../Kingsland.MofParser.UnitTests.csproj | 11 +- 27 files changed, 770 insertions(+), 671 deletions(-) create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValueArray.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_InstanceValueDeclaration.cs rename src/Kingsland.MofParser.UnitTests/CodeGen/{RoundtripTests_PrimitiveTypeValue.cs => RoundtripTests_LiteralValue.cs} (55%) create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_LiteralValueArray.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs create mode 100644 src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureValueDeclarationTests.cs diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs index 09cda382..4a4a55cf 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs @@ -46,16 +46,16 @@ public static partial class RoundtripTests // } //} - private static void AssertRoundtrip(string sourceMof, ParserQuirks parserQuirks = ParserQuirks.None) + private static void AssertRoundtrip(string sourceText, ParserQuirks parserQuirks = ParserQuirks.None) { // check the lexer tokens roundtrips ok - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceText)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - Assert.AreEqual(sourceMof, tokensMof); + Assert.AreEqual(sourceText, tokensMof); // check the parser ast roundtrips ok var astNodes = Parser.Parse(tokens, parserQuirks); var astMof = AstMofGenerator.ConvertToMof(astNodes); - Assert.AreEqual(sourceMof, astMof); + Assert.AreEqual(sourceText, astMof); } #endregion diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs index 6082b1f1..bb37bdda 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_AssociationDeclaration.cs @@ -1,9 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -19,37 +14,37 @@ public static class AssociationDeclarationTests [Test] public static void EmptyAssociationDeclarationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "association GOLF_MemberLocker\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void AssociationDeclarationWithSuperAssociationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "association GOLF_MemberLocker : GOLF_Base\r\n" + "{\r\n" + "\tGOLF_ClubMember REF Member;\r\n" + "\tGOLF_Locker REF Locker;\r\n" + "\tGOLF_Date AssignedOnDate;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void AssociationDeclarationWithClassFeaturesShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "association GOLF_MemberLocker\r\n" + "{\r\n" + "\tGOLF_ClubMember REF Member;\r\n" + "\tGOLF_Locker REF Locker;\r\n" + "\tGOLF_Date AssignedOnDate;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs index 55f32a3f..293865b7 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_BooleanValue.cs @@ -14,12 +14,12 @@ public static class BooleanValueTests [Test] public static void BooleanValueAstShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of myType as $Alias00000070\r\n" + "{\r\n" + "\tReference = TRUE;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs index f7c35e97..e099aa32 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassDeclaration.cs @@ -1,9 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -19,125 +14,64 @@ public static class ClassDeclarationTests [Test] public static void EmptyClassDeclarationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Base\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void ClassDeclarationWithSuperclassShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Base : GOLF_Superclass\r\n" + "{\r\n" + "\tstring InstanceID;\r\n" + "\tstring Caption = Null;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void ClassDeclarationWithClassFeaturesShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Base\r\n" + "{\r\n" + "\tstring InstanceID;\r\n" + "\tstring Caption = Null;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void ClassDeclarationsWithQualifierListShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "[Abstract, OCL{\"-- the key property cannot be NULL\", \"inv: InstanceId.size() = 10\"}]\r\n" + "class GOLF_Base\r\n" + "{\r\n" + "\t[Description(\"an instance of a class that derives from the GOLF_Base class. \"), Key] string InstanceID;\r\n" + "\t[Description(\"A short textual description (one- line string) of the\"), MaxLen(64)] string Caption = Null;\r\n" + - "};" - ); - } - - } - - public static class ClassFeatureTests - { - - [Test] - public static void ClassFeatureWithQualifiersShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + - "};" - ); - } - - [Test] - public static void InvalidClassFeatureShouldThrow() - { - var sourceMof = - "class Sponsor\r\n" + - "{\r\n" + - "\t100\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 19, Line Number 3, Column Number 2.\r\n" + - "Token Type: 'IntegerLiteralToken'\r\n" + - "Token Text: '100'", - ex.Message - ); - } - - [Test] - public static void ClassFeatureWithStructureDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\tstructure Nested\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); + RoundtripTests.AssertRoundtrip(sourceText); } - [Test] - public static void ClassFeatureWithEnumerationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\tenumeration MonthsEnum : Integer\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); - } - - [Test] - public static void ClassFeatureWithPropertyDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "class Sponsor\r\n" + - "{\r\n" + - "\tstring Name;\r\n" + - "};" - ); - } + //[Test] + //public static void ClassDeclarationsAstWithNumericPropertiesShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of myType as $Alias00000070\r\n" + + // "{\r\n" + + // "\tMyBinaryValue = 0101010b;\r\n" + + // "\tMyOctalValue = 0444444;\r\n" + + // "\tMyHexValue = 0xABC123;\r\n" + + // "\tMyDecimalValue = 12345;\r\n" + + // "\tMyRealValue = 123.45;\r\n" + + // "};" + // ); + //} } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs new file mode 100644 index 00000000..298ed51a --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs @@ -0,0 +1,96 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.2 Class declaration + + public static class ClassFeatureTests + { + + [Test] + public static void ClassFeatureWithQualifiersShouldRoundtrip() + { + var sourceText = + "class Sponsor\r\n" + + "{\r\n" + + "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void InvalidClassFeatureShouldThrow() + { + var sourceText = + "class Sponsor\r\n" + + "{\r\n" + + "\t100\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceText)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var ex = Assert.Throws( + () => { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 19, Line Number 3, Column Number 2.\r\n" + + "Token Type: 'IntegerLiteralToken'\r\n" + + "Token Text: '100'", + ex.Message + ); + } + + [Test] + public static void ClassFeatureWithStructureDeclarationShouldRoundtrip() + { + var sourceText = + "class Sponsor\r\n" + + "{\r\n" + + "\tstructure Nested\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void ClassFeatureWithEnumerationDeclarationShouldRoundtrip() + { + var sourceText = + "class Sponsor\r\n" + + "{\r\n" + + "\tenumeration MonthsEnum : Integer\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void ClassFeatureWithPropertyDeclarationShouldRoundtrip() + { + var sourceText = + "class Sponsor\r\n" + + "{\r\n" + + "\tstring Name;\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs index 508d8a2d..1eabeb64 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_CompilerDirective.cs @@ -14,17 +14,17 @@ public static class CompilerDirectiveTests [Test] public static void CompilerDirectiveShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( - "#pragma include (\"GlobalStructs/GOLF_Address.mof\")" - ); + var sourceText = + "#pragma include (\"GlobalStructs/GOLF_Address.mof\")"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void CompilerDirectiveWithMultipleSingleStringsShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( - "#pragma include (\"GlobalStructs\" \"/\" \"GOLF_Address.mof\")" - ); + var sourceText = + "#pragma include (\"GlobalStructs\" \"/\" \"GOLF_Address.mof\")"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs index aea19a3a..57f91fb4 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue.cs @@ -14,80 +14,23 @@ public static class ComplexTypeValueTests [Test] public static void ComplexTypeValueWithComplexValuePropertyShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tLastPaymentDate = $MyAliasIdentifier;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void ComplexTypeValueWithComplexValueArrayPropertyShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tLastPaymentDate = {$MyAliasIdentifier};\r\n" + - "};" - ); - } - - } - - public static class ComplexValueTests - { - - [Test] - public static void ComplexValuePropertyShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = $MyAliasIdentifier;\r\n" + - "};" - ); - } - - [Test] - public static void ComplexValuePropertyWithValueOfShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = value of GOLF_Date\r\n" + - "\t{\r\n" + - "\t\tMonth = July;\r\n" + - "\t};\r\n" + - "};" - ); - } - - } - - public static class ComplexValueArrayTests - { - - [Test] - public static void ComplexValueArrayWithOneItemShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {$MyAliasIdentifier};\r\n" + - "};" - ); - } - - [Test] - public static void ComplexValueArrayWithMultipleItemsShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {$MyAliasIdentifier, $MyOtherAliasIdentifier};\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs deleted file mode 100644 index 82f432a7..00000000 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexTypeValue2.cs +++ /dev/null @@ -1,158 +0,0 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; - -namespace Kingsland.MofParser.UnitTests.CodeGen -{ - - public static partial class RoundtripTests - { - - #region 7.6.2 Complex type value - - public static class InstanceValueDeclarationTests - { - - [Test] - public static void InstanceValueDeclarationWithNoPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void InstanceValueDeclarationWithChildPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tFirstName = \"John\";\r\n" + - "\tLastName = \"Doe\";\r\n" + - "};" - ); - } - - [Test] - public static void InstanceValueDeclarationWithAliasShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember as $MyAliasIdentifier\r\n" + - "{\r\n" + - "};" - ); - } - - } - - public static class StructureValueDeclarationTests - { - - [Test] - public static void StructureValueDeclarationWithNoPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + - "{\r\n" + - "};" - ); - } - - [Test] - public static void StructureValueDeclarationWithChildPropertiesShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + - "{\r\n" + - "\tFirstName = \"John\";\r\n" + - "\tLastName = \"Doe\";\r\n" + - "};" - ); - } - - } - - //[Test] - //public static void InstanceValueDeclarationShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of GOLF_ClubMember\r\n" + - // "{\r\n" + - // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + - // "};" - // ); - //} - - //[Test] - //public static void ClassDeclarationsAstWithNumericPropertiesShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of myType as $Alias00000070\r\n" + - // "{\r\n" + - // "\tMyBinaryValue = 0101010b;\r\n" + - // "\tMyOctalValue = 0444444;\r\n" + - // "\tMyHexValue = 0xABC123;\r\n" + - // "\tMyDecimalValue = 12345;\r\n" + - // "\tMyRealValue = 123.45;\r\n" + - // "};" - // ); - //} - - //[Test(Description = "https://github.com/mikeclayton/MofParser/issues/26"), - // Ignore("https://github.com/mikeclayton/MofParser/issues/26")] - //public static void InstanceValueDeclarationsWithInstanceValuePropertyShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of GOLF_ClubMember\r\n" + - // "{\r\n" + - // "\tLastPaymentDate = instance of GOLF_Date\r\n" + - // "\t{\r\n" + - // "\tYear = 2011;\r\n" + - // "\tMonth = July;\r\n" + - // "\tDay = 31;\r\n" + - // "\t};\r\n" + - // "}"; - // ); - //} - - //[Test] - //public static void InstanceValueDeclarationWithStructureValueDeclarationPropertyShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "instance of GOLF_ClubMember\r\n" + - // "{\r\n" + - // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + - // "\tMemberAddress = value of GOLF_Address\r\n" + - // "\t{\r\n" + - // "\t\tState = \"IL\";\r\n" + - // "\t\tCity = \"Oak Park\";\r\n" + - // "\t\tStreet = \"Oak Park Av.\";\r\n" + - // "\t\tStreetNo = \"1177\";\r\n" + - // "\t\tApartmentNo = \"3B\";\r\n" + - // "\t};\r\n" + - // "};"; - // ); - //} - - //[Test] - //public static void StructureValueDeclarationShouldRoundtrip() - //{ - // RoundtripTests.AssertRoundtrip( - // "value of GOLF_PhoneNumber as $JohnDoesPhoneNo\r\n" + - // "{\r\n" + - // "\tAreaCode = {\"9\", \"0\", \"7\"};\r\n" + - // "\tNumber = {\"7\", \"4\", \"7\", \"4\", \"8\", \"8\", \"4\"};\r\n" + - // "};"; - // ); - //} - - #endregion - - } - -} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValue.cs new file mode 100644 index 00000000..2fa2f0dd --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValue.cs @@ -0,0 +1,45 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.9 Complex type value + + public static class ComplexValueTests + { + + [Test] + public static void ComplexValuePropertyShouldRoundtrip() + { + var sourceText = + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = $MyAliasIdentifier;\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void ComplexValuePropertyWithValueOfShouldRoundtrip() + { + var sourceText = + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = value of GOLF_Date\r\n" + + "\t{\r\n" + + "\t\tMonth = July;\r\n" + + "\t};\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValueArray.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValueArray.cs new file mode 100644 index 00000000..b0c78aca --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ComplexValueArray.cs @@ -0,0 +1,42 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.9 Complex type value + + public static class ComplexValueArrayTests + { + + [Test] + public static void ComplexValueArrayWithOneItemShouldRoundtrip() + { + var sourceText = + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {$MyAliasIdentifier};\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void ComplexValueArrayWithMultipleItemsShouldRoundtrip() + { + var sourceText = + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {$MyAliasIdentifier, $MyOtherAliasIdentifier};\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs index c5e82ca9..80590634 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs @@ -19,27 +19,27 @@ public static class EnumTypeValueTests [Test] public static void EnumTypeValueWithEnumValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tYear = 2011;\r\n" + "\tMonth = July;\r\n" + "\tDay = 31;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void EnumTypeValueWithEnumValueArrayShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tYear = 2011;\r\n" + "\tMonth = {June};\r\n" + "\tDay = 31;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } @@ -50,27 +50,27 @@ public static class EnumValueTests [Test] public static void UnqalifiedEnumValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tYear = 2011;\r\n" + "\tMonth = July;\r\n" + "\tDay = 31;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void QualifiedEnumValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tYear = 2011;\r\n" + "\tMonth = MonthEnums.July;\r\n" + "\tDay = 31;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } @@ -81,47 +81,49 @@ public static class EnumValueArrayTests [Test] public static void EmptyEnumValueArrayShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tYear = 2011;\r\n" + "\tMonth = {June};\r\n" + "\tDay = 31;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void EnumValueArrayWithSingleEnumValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tYear = 2011;\r\n" + "\tMonth = {June};\r\n" + "\tDay = 31;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } public static void EnumValueArrayWithMultipleEnumValuesShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tMonth = {January, February};\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/25")] public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksEnabledShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tMonth = {MonthEnums.July};\r\n" + - "};", + "};"; + RoundtripTests.AssertRoundtrip( + sourceText, ParserQuirks.EnumValueArrayContainsEnumValuesNotEnumNames ); } @@ -129,12 +131,12 @@ public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksEnabledShouldRo [Test(Description = "https://github.com/mikeclayton/MofParser/issues/25")] public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldThrow() { - var sourceMof = + var sourceText = "instance of GOLF_Date\r\n" + "{\r\n" + "\tMonth = {MonthEnums.July};\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceText)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs index 2419f7bd..cc08c5f9 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs @@ -19,37 +19,37 @@ public static class EnumerationDeclarationTests [Test] public static void EmptyIntegerEnumerationDeclarationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration MonthsEnum : Integer\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void EmptyStringEnumerationDeclarationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration MonthsEnum : String\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void EmptyInheritedEnumerationDeclarationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration MonthsEnum : GOLF_MyEnum\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void EnumerationDeclarationWithoutValuesShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration MonthsEnum : String\r\n" + "{\r\n" + "\tJanuary,\r\n" + @@ -64,8 +64,8 @@ public static void EnumerationDeclarationWithoutValuesShouldRoundtrip() "\tOctober,\r\n" + "\tNovember,\r\n" + "\tDecember\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/52")] @@ -73,12 +73,12 @@ public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksEna { // this should throw because "uint32" is recognized as an integer type. // as a result, "July" (a string) is not a valid value for an integer enumElement value - var sourceMof = + var sourceText = "enumeration MonthsEnum : uint32\r\n" + "{\r\n" + "\tJuly = \"July\"\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); + var tokens = Lexer.Lex(SourceReader.From(sourceText)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => @@ -104,12 +104,12 @@ public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksDis // so it's assumed to be a separate base enum like "enumeration uint32 { ... };". // as a result, there's no validation done on the datattype of the enum element and // it will accept "July" as a valid value - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration MonthsEnum : uint32\r\n" + "{\r\n" + "\tJuly = \"July\"\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } @@ -120,45 +120,45 @@ public static class EnumElementTests [Test] public static void EnumElementWithQualifiersShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration MonthsEnum : integer\r\n" + "{\r\n" + "\t[Description(\"myDescription\")] January = 1\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/41")] public static void IntegerEnumElementShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration MonthsEnum : integer\r\n" + "{\r\n" + "\tJanuary = 1\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void StringEnumElementWithoutValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration GOLF_StatesEnum : string\r\n" + "{\r\n" + "\tAL\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void StringEnumElementWithValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "enumeration GOLF_StatesEnum : string\r\n" + "{\r\n" + "\tAL = \"Alabama\"\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_InstanceValueDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_InstanceValueDeclaration.cs new file mode 100644 index 00000000..c4d9ee31 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_InstanceValueDeclaration.cs @@ -0,0 +1,99 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.2 Complex type value + + public static class InstanceValueDeclarationTests + { + + [Test] + public static void InstanceValueDeclarationWithNoPropertiesShouldRoundtrip() + { + var sourceText = + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void InstanceValueDeclarationWithChildPropertiesShouldRoundtrip() + { + var sourceText = + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tFirstName = \"John\";\r\n" + + "\tLastName = \"Doe\";\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void InstanceValueDeclarationWithAliasShouldRoundtrip() + { + var sourceText = + "instance of GOLF_ClubMember as $MyAliasIdentifier\r\n" + + "{\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + //[Test] + //public static void InstanceValueDeclarationShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of GOLF_ClubMember\r\n" + + // "{\r\n" + + // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + + // "};" + // ); + //} + + //[Test(Description = "https://github.com/mikeclayton/MofParser/issues/26"), + // Ignore("https://github.com/mikeclayton/MofParser/issues/26")] + //public static void InstanceValueDeclarationsWithInstanceValuePropertyShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of GOLF_ClubMember\r\n" + + // "{\r\n" + + // "\tLastPaymentDate = instance of GOLF_Date\r\n" + + // "\t{\r\n" + + // "\tYear = 2011;\r\n" + + // "\tMonth = July;\r\n" + + // "\tDay = 31;\r\n" + + // "\t};\r\n" + + // "}"; + // ); + //} + + //[Test] + //public static void InstanceValueDeclarationWithStructureValueDeclarationPropertyShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "instance of GOLF_ClubMember\r\n" + + // "{\r\n" + + // "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + + // "\tMemberAddress = value of GOLF_Address\r\n" + + // "\t{\r\n" + + // "\t\tState = \"IL\";\r\n" + + // "\t\tCity = \"Oak Park\";\r\n" + + // "\t\tStreet = \"Oak Park Av.\";\r\n" + + // "\t\tStreetNo = \"1177\";\r\n" + + // "\t\tApartmentNo = \"3B\";\r\n" + + // "\t};\r\n" + + // "};"; + // ); + //} + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs index 24783019..4d26f2e8 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_IntegerValues.cs @@ -14,40 +14,40 @@ public static class IntegerValueTests [Test] public static void IntegerValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = 100;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void PositiveIntegerValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = +100;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void NegativeIntegerValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = -100;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] public static void IntegerValuePropertiesInOtherBasesShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of myType as $Alias00000070\r\n" + "{\r\n" + "\tMyBinaryValue1 = 0101010b;\r\n" + @@ -65,8 +65,8 @@ public static void IntegerValuePropertiesInOtherBasesShouldRoundtrip() "\tMyRealValue1 = 00123.45;\r\n" + "\tMyRealValue2 = +00123.45;\r\n" + "\tMyRealValue3 = -123.45;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PrimitiveTypeValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_LiteralValue.cs similarity index 55% rename from src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PrimitiveTypeValue.cs rename to src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_LiteralValue.cs index b1a4573b..6288f921 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PrimitiveTypeValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_LiteralValue.cs @@ -8,89 +8,62 @@ public static partial class RoundtripTests #region 7.6.1 Primitive type value - public static class LiteralValueArrayTests - { - - [Test] - public static void LiteralValueArrayWithOneItemShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {1};\r\n" + - "};" - ); - } - - [Test] - public static void LiteralValueArrayWithMultipleItemsShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "instance of GOLF_ClubMember\r\n" + - "{\r\n" + - "\tLastPaymentDate = {1, 2};\r\n" + - "};" - ); - } - - } - public static class LiteralValueTests { [Test] public static void IntegerLiteralValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tLastPaymentDate = 1;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void RealLiteralValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tLastPaymentDate = 0.5;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void BooleanLiteralValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tLastPaymentDate = true;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void NullLiteralValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tLastPaymentDate = null;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void StringLiteralValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tLastPaymentDate = \"aaa\";\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_LiteralValueArray.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_LiteralValueArray.cs new file mode 100644 index 00000000..31a13d25 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_LiteralValueArray.cs @@ -0,0 +1,42 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.1 Primitive type value + + public static class LiteralValueArrayTests + { + + [Test] + public static void LiteralValueArrayWithOneItemShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {1};\r\n" + + "};" + ); + } + + [Test] + public static void LiteralValueArrayWithMultipleItemsShouldRoundtrip() + { + RoundtripTests.AssertRoundtrip( + "instance of GOLF_ClubMember\r\n" + + "{\r\n" + + "\tLastPaymentDate = {1, 2};\r\n" + + "};" + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs index 7d9f1346..d153b53f 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_MethodDeclaration.cs @@ -14,84 +14,84 @@ public static class MethodDeclarationTests [Test] public static void MethodDeclarationWithNoParametersShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Club\r\n" + "{\r\n" + "\tInteger GetMembersWithOutstandingFees();\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void MethodDeclarationWithParameterShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Club\r\n" + "{\r\n" + "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void MethodDeclarationWithArrayParameterShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Club\r\n" + "{\r\n" + "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers[]);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void MethodDeclarationsWithRefParameterShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Club\r\n" + "{\r\n" + "\tInteger GetMembersWithOutstandingFees(GOLF_ClubMember REF lateMembers);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/27")] public static void ClassDeclarationsWithMethodDeclarationWithEnumParameterShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Professional : GOLF_ClubMember\r\n" + "{\r\n" + "\tGOLF_ResultCodeEnum GetNumberOfProfessionals(Integer NoOfPros, GOLF_Club Club, ProfessionalStatusEnum Status = Professional);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/37")] public static void MethodDeclarationsWithArrayReturnTypeShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Club\r\n" + "{\r\n" + "\tInteger[] GetMembersWithOutstandingFees(GOLF_ClubMember lateMembers);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/38")] public static void MethodDeclarationWithMultipleParametersShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Professional : GOLF_ClubMember\r\n" + "{\r\n" + "\tGOLF_ResultCodeEnum GetNumberOfProfessionals(Integer NoOfPros, GOLF_Club Club, ProfessionalStatusEnum Status = Professional);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] public static void MethodDeclarationWithDeprecatedMof300IntegerReturnTypesAndQuirksDisabledShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class Win32_SoftwareFeature : CIM_SoftwareFeature\r\n" + "{\r\n" + "\tuint8 ReinstallUint8(integer ReinstallMode = 1);\r\n" + @@ -102,14 +102,14 @@ public static void MethodDeclarationWithDeprecatedMof300IntegerReturnTypesAndQui "\tsint16 ReinstallUint16(integer ReinstallMode = 1);\r\n" + "\tsint32 ReinstallUint32(integer ReinstallMode = 1);\r\n" + "\tsint64 ReinstallUint64(integer ReinstallMode = 1);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] public static void MethodDeclarationWithDeprecatedMof300IntegerParameterTypesShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class Win32_SoftwareFeature : CIM_SoftwareFeature\r\n" + "{\r\n" + "\tinteger ReinstallUint8(uint8 ReinstallMode = 1);\r\n" + @@ -120,8 +120,8 @@ public static void MethodDeclarationWithDeprecatedMof300IntegerParameterTypesSho "\tinteger ReinstallUint16(sint16 ReinstallMode = 1);\r\n" + "\tinteger ReinstallUint32(sint32 ReinstallMode = 1);\r\n" + "\tinteger ReinstallUint64(sint64 ReinstallMode = 1);\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs index 9f6a79bc..892e5741 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_PropertyDeclaration.cs @@ -14,40 +14,40 @@ public static class PropertyDeclarationTests [Test] public static void PropertyDeclarationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Base\r\n" + "{\r\n" + "\tInteger Severity;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void PropertyDeclarationWithArrayTypeShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Base\r\n" + "{\r\n" + "\tInteger Severity[];\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void PropertyDeclarationWithDefaultValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Base\r\n" + "{\r\n" + "\tInteger Severity = 0;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/28")] public static void PropertyDeclarationWithDeprecatedMof300IntegerReturnTypesAndQuirksDisabledShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "class GOLF_Base\r\n" + "{\r\n" + "\tuint8 SeverityUint8;\r\n" + @@ -58,8 +58,8 @@ public static void PropertyDeclarationWithDeprecatedMof300IntegerReturnTypesAndQ "\tsint16 SeveritySint16;\r\n" + "\tsint32 SeveritySint32;\r\n" + "\tsint64 SeveritySint64;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs index e2f84c23..a12b35df 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_Qualifier.cs @@ -14,12 +14,12 @@ public static class QualifierTests [Test] public static void QualifierShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "[Description(\"Instances of this class represent golf clubs. A golf club is \" \"an organization that provides member services to golf players \" \"both amateur and professional.\")]\r\n" + "class GOLF_Club : GOLF_Base\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs index 5598216b..bc23b7f8 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierList.cs @@ -1,11 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; - -namespace Kingsland.MofParser.UnitTests.CodeGen +namespace Kingsland.MofParser.UnitTests.CodeGen { public static partial class RoundtripTests @@ -18,51 +11,6 @@ public static class QualifierListTests } - public static class QualifierValueTests - { - - [Test] - public static void QualifierWithMofV2FlavorsAndQuirksEnabledShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + - "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + - "{\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + - "};", - ParserQuirks.AllowMofV2Qualifiers - ); - } - - [Test] - public static void QualifierWithMofV2FlavorsAndQuirksDisabledShouldThrow() - { - var sourceMof = - "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + - "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + - "{\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + - "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + - "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => - { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 13, Line Number 1, Column Number 14.\r\n" + - "Token Type: 'ColonToken'\r\n" + - "Token Text: ':'", - ex.Message - ); - } - - } - #endregion } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs new file mode 100644 index 00000000..83bd58c7 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs @@ -0,0 +1,67 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.4.1 QualifierList + + public static class QualifierValueTests + { + + [Test] + public static void QualifierWithMofV2FlavorsAndQuirksEnabledShouldRoundtrip() + { + var sourceText = + "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + + "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + + "{\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + + "};"; + RoundtripTests.AssertRoundtrip( + sourceText, + ParserQuirks.AllowMofV2Qualifiers + ); + } + + [Test] + public static void QualifierWithMofV2FlavorsAndQuirksDisabledShouldThrow() + { + var sourceText = + "[Locale(1033): ToInstance, UUID(\"{BE46D060-7A7C-11d2-BC85-00104B2CF71C}\"): ToInstance]\r\n" + + "class Win32_PrivilegesStatus : __ExtendedStatus\r\n" + + "{\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceText)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var ex = Assert.Throws( + () => + { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 13, Line Number 1, Column Number 14.\r\n" + + "Token Type: 'ColonToken'\r\n" + + "Token Text: ':'", + ex.Message + ); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs index badb02e0..88297e0e 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_RealValue.cs @@ -14,67 +14,67 @@ public static class RealValueTests [Test] public static void RealValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = 0.5;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] public static void PositiveRealValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = +0.5;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void NegativeRealValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = -0.5;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] public static void RealValueWithNoFractionShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = 5.0;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] public static void RealValueWithTrailingZerosShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = 0.50;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/xx")] public static void RealValueWithNoIntegerPartShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = .5;\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs index 0be64ba2..33817a7b 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StringValue.cs @@ -1,9 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -19,34 +14,34 @@ public static class StringValueTests [Test] public static void SingleStringValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = \"Instance of John Doe\";\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void MultistringValueShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = \"Instance\" \"of\" \"John\" \"Doe\";\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/20")] public static void StringValueWithSingleQuoteShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "instance of GOLF_ClubMember\r\n" + "{\r\n" + "\tCaption = \"Instance of John Doe\\\'s GOLF_ClubMember object\";\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs index 64e75153..29d60aef 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureDeclaration.cs @@ -1,9 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -19,112 +14,34 @@ public static class StructureDeclarationTests [Test] public static void EmptyStructureDeclarationShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "structure Sponsor\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void StructureDeclarationWithSuperstructureShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "structure Sponsor : GOLF_MySupestructure\r\n" + "{\r\n" + - "};" - ); + "};"; + RoundtripTests.AssertRoundtrip(sourceText); } [Test] public static void StructureDeclarationWithStructureFeaturesShouldRoundtrip() { - RoundtripTests.AssertRoundtrip( + var sourceText = "structure Sponsor\r\n" + "{\r\n" + "\tstring Name;\r\n" + "\tGOLF_Date ContractSignedDate;\r\n" + "\treal32 ContractAmount;\r\n" + - "};" - ); - } - - } - - public static class StructureFeatureTests - { - - [Test] - public static void StructureFeatureWithQualifierShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + - "};" - ); - } - - [Test] - public static void InvalidStructureFeatureShouldThrow() - { - - var sourceMof = - "structure Sponsor\r\n" + - "{\r\n" + - "\t100\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceMof)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - Assert.AreEqual(sourceMof, tokensMof); - var ex = Assert.Throws( - () => { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( - "Unexpected token found at Position 23, Line Number 3, Column Number 2.\r\n" + - "Token Type: 'IntegerLiteralToken'\r\n" + - "Token Text: '100'", - ex.Message - ); - } - - [Test] - public static void StructureFeatureWithStructureDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\tstructure Nested\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); - } - - [Test] - public static void StructureFeatureWithEnumerationDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\tenumeration MonthsEnum : Integer\r\n" + - "\t{\r\n" + - "\t};\r\n" + - "};" - ); - } - - [Test] - public static void StructureFeatureWithPropertyDeclarationShouldRoundtrip() - { - RoundtripTests.AssertRoundtrip( - "structure Sponsor\r\n" + - "{\r\n" + - "\tstring Name;\r\n" + - "};" - ); + RoundtripTests.AssertRoundtrip(sourceText); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs new file mode 100644 index 00000000..ccd4a29c --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs @@ -0,0 +1,98 @@ +using Kingsland.MofParser.CodeGen; +using Kingsland.MofParser.Lexing; +using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Text; +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.5.1 Structure declaration + + public static class StructureFeatureTests + { + + [Test] + public static void StructureFeatureWithQualifierShouldRoundtrip() + { + var sourceText = + "structure Sponsor\r\n" + + "{\r\n" + + "\t[Description(\"Monthly salary in $US\")] string Name;\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void InvalidStructureFeatureShouldThrow() + { + + var sourceText = + "structure Sponsor\r\n" + + "{\r\n" + + "\t100\r\n" + + "};"; + var tokens = Lexer.Lex(SourceReader.From(sourceText)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + Assert.AreEqual(sourceText, tokensMof); + var ex = Assert.Throws( + () => { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual( + "Unexpected token found at Position 23, Line Number 3, Column Number 2.\r\n" + + "Token Type: 'IntegerLiteralToken'\r\n" + + "Token Text: '100'", + ex.Message + ); + } + + [Test] + public static void StructureFeatureWithStructureDeclarationShouldRoundtrip() + { + var sourceText = + "structure Sponsor\r\n" + + "{\r\n" + + "\tstructure Nested\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void StructureFeatureWithEnumerationDeclarationShouldRoundtrip() + { + var sourceText = + "structure Sponsor\r\n" + + "{\r\n" + + "\tenumeration MonthsEnum : Integer\r\n" + + "\t{\r\n" + + "\t};\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void StructureFeatureWithPropertyDeclarationShouldRoundtrip() + { + var sourceText = + "structure Sponsor\r\n" + + "{\r\n" + + "\tstring Name;\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureValueDeclarationTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureValueDeclarationTests.cs new file mode 100644 index 00000000..98b00132 --- /dev/null +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureValueDeclarationTests.cs @@ -0,0 +1,54 @@ +using NUnit.Framework; + +namespace Kingsland.MofParser.UnitTests.CodeGen +{ + + public static partial class RoundtripTests + { + + #region 7.6.2 Complex type value + + public static class StructureValueDeclarationTests + { + + [Test] + public static void StructureValueDeclarationWithNoPropertiesShouldRoundtrip() + { + var sourceText = + "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + + "{\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + [Test] + public static void StructureValueDeclarationWithChildPropertiesShouldRoundtrip() + { + var sourceText = + "value of GOLF_ClubMember as $MyAliasIdentifier\r\n" + + "{\r\n" + + "\tFirstName = \"John\";\r\n" + + "\tLastName = \"Doe\";\r\n" + + "};"; + RoundtripTests.AssertRoundtrip(sourceText); + } + + //[Test] + //public static void StructureValueDeclarationShouldRoundtrip() + //{ + // RoundtripTests.AssertRoundtrip( + // "value of GOLF_PhoneNumber as $JohnDoesPhoneNo\r\n" + + // "{\r\n" + + // "\tAreaCode = {\"9\", \"0\", \"7\"};\r\n" + + // "\tNumber = {\"7\", \"4\", \"7\", \"4\", \"8\", \"8\", \"4\"};\r\n" + + // "};"; + // ); + //} + + } + + #endregion + + } + +} \ No newline at end of file diff --git a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj index e26f70b4..a082eb5d 100644 --- a/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj +++ b/src/Kingsland.MofParser.UnitTests/Kingsland.MofParser.UnitTests.csproj @@ -35,13 +35,20 @@ + + + + + + + + - - + From a081e0791b5b3951c7ba6a8f679ab6379f7f6936 Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Sat, 12 Sep 2020 23:12:45 +0100 Subject: [PATCH 07/11] More roundtrip test refactoring - de-duped common exception test code --- .../CodeGen/RoundtripTests.cs | 13 ++++++++++ .../CodeGen/RoundtripTests_ClassFeature.cs | 21 +++------------- .../CodeGen/RoundtripTests_EnumTypeValue.cs | 15 +++-------- .../RoundtripTests_EnumerationDeclaration.cs | 25 +++---------------- .../CodeGen/RoundtripTests_QualifierValue.cs | 21 +++------------- .../RoundtripTests_StructureFeature.cs | 23 +++-------------- 6 files changed, 32 insertions(+), 86 deletions(-) diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs index 4a4a55cf..b04d9b33 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs @@ -1,6 +1,7 @@ using Kingsland.MofParser.CodeGen; using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Parsing; +using Kingsland.ParseFx.Parsing; using Kingsland.ParseFx.Text; using NUnit.Framework; @@ -58,6 +59,18 @@ private static void AssertRoundtrip(string sourceText, ParserQuirks parserQuirks Assert.AreEqual(sourceText, astMof); } + private static void AssertRoundtripException(string sourceText, string expectedMessage) + { + var tokens = Lexer.Lex(SourceReader.From(sourceText)); + var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var ex = Assert.Throws( + () => { + var astNodes = Parser.Parse(tokens); + } + ); + Assert.AreEqual(expectedMessage, ex.Message); + } + #endregion } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs index 298ed51a..f2aaee7b 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_ClassFeature.cs @@ -1,9 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -35,19 +30,11 @@ public static void InvalidClassFeatureShouldThrow() "{\r\n" + "\t100\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceText)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( + var expectedMessage = "Unexpected token found at Position 19, Line Number 3, Column Number 2.\r\n" + "Token Type: 'IntegerLiteralToken'\r\n" + - "Token Text: '100'", - ex.Message - ); + "Token Text: '100'"; + RoundtripTests.AssertRoundtripException(sourceText, expectedMessage); } [Test] diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs index 80590634..fb8d7a7b 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumTypeValue.cs @@ -136,20 +136,11 @@ public static void EnumValueArrayWithQualifiedEnumValuesAndQuirksDisabledShouldT "{\r\n" + "\tMonth = {MonthEnums.July};\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceText)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => - { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( + var expectedMessage = "Unexpected token found at Position 46, Line Number 3, Column Number 21.\r\n" + "Token Type: 'DotOperatorToken'\r\n" + - "Token Text: '.'", - ex.Message - ); + "Token Text: '.'"; + RoundtripTests.AssertRoundtripException(sourceText, expectedMessage); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs index cc08c5f9..b5466b35 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs @@ -1,9 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -78,23 +73,11 @@ public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksEna "{\r\n" + "\tJuly = \"July\"\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceText)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => - { - var astNodes = Parser.Parse( - tokens, - ParserQuirks.AllowDeprecatedMof300IntegerTypesAsEnumerationDeclarationsBase - ); - } - ); - Assert.AreEqual( + var expectedMessage = "Unexpected token found at Position 44, Line Number 3, Column Number 9.\r\n" + "Token Type: 'StringLiteralToken'\r\n" + - "Token Text: '\"July\"'", - ex.Message - ); + "Token Text: '\"July\"'"; + RoundtripTests.AssertRoundtripException(sourceText, expectedMessage); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/52")] diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs index 83bd58c7..580ba4a8 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_QualifierValue.cs @@ -1,8 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; +using Kingsland.MofParser.Parsing; using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen @@ -42,20 +38,11 @@ public static void QualifierWithMofV2FlavorsAndQuirksDisabledShouldThrow() "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesNotHeld[];\r\n" + "\t[read: ToSubClass, MappingStrings{\"Win32API|AccessControl|Windows NT Privileges\"}: ToSubClass] string PrivilegesRequired[];\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceText)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - var ex = Assert.Throws( - () => - { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( + var expectedMessage = "Unexpected token found at Position 13, Line Number 1, Column Number 14.\r\n" + "Token Type: 'ColonToken'\r\n" + - "Token Text: ':'", - ex.Message - ); + "Token Text: ':'"; + RoundtripTests.AssertRoundtripException(sourceText, expectedMessage); } } diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs index ccd4a29c..b84acaa4 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_StructureFeature.cs @@ -1,9 +1,4 @@ -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Parsing; -using Kingsland.ParseFx.Parsing; -using Kingsland.ParseFx.Text; -using NUnit.Framework; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -30,26 +25,16 @@ public static void StructureFeatureWithQualifierShouldRoundtrip() [Test] public static void InvalidStructureFeatureShouldThrow() { - var sourceText = "structure Sponsor\r\n" + "{\r\n" + "\t100\r\n" + "};"; - var tokens = Lexer.Lex(SourceReader.From(sourceText)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); - Assert.AreEqual(sourceText, tokensMof); - var ex = Assert.Throws( - () => { - var astNodes = Parser.Parse(tokens); - } - ); - Assert.AreEqual( + var expectedMessage = "Unexpected token found at Position 23, Line Number 3, Column Number 2.\r\n" + "Token Type: 'IntegerLiteralToken'\r\n" + - "Token Text: '100'", - ex.Message - ); + "Token Text: '100'"; + RoundtripTests.AssertRoundtripException(sourceText, expectedMessage); } [Test] From 287ba84bef9df3e8244de6256e535a60493756c4 Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Sun, 13 Sep 2020 09:02:37 +0100 Subject: [PATCH 08/11] Removed some deprecated files --- .../Lexing/LexerTests_AliasIdentifier.cs | 199 -------- .../Lexing/LexerTests_BooleanLiteral.cs | 143 ------ .../Lexing/LexerTests_Comments.cs | 371 -------------- .../Lexing/LexerTests_Identifier.cs | 64 --- .../Lexing/LexerTests_IntegerLiteral.cs | 471 ------------------ .../Lexing/LexerTests_NullLiteral.cs | 80 --- .../Lexing/LexerTests_Pragma.cs | 80 --- .../Lexing/LexerTests_RealLiteral.cs | 182 ------- .../Lexing/LexerTests_StringLiteral.cs | 83 --- .../Lexing/LexerTests_Symbols.cs | 320 ------------ .../Lexing/LexerTests_Whitespace.cs | 137 ----- 11 files changed, 2130 deletions(-) delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs delete mode 100644 src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs deleted file mode 100644 index 7edd8060..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_AliasIdentifier.cs +++ /dev/null @@ -1,199 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadAliasIdentifierTokenMethod - { - - [Test] - public static void ShouldReadAliasIdentifierToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From - ( - "$myAliasIdentifier\r\n" + - "$myAliasIdentifier2" - ) - ); - var expectedTokens = new List { - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(17, 1, 18), - "$myAliasIdentifier" - ), - "myAliasIdentifier" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(18, 1, 19), - new SourcePosition(19, 1, 20), - "\r\n" - ) - ), - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(20, 2, 1), - new SourcePosition(38, 2, 19), - "$myAliasIdentifier2" - ), - "myAliasIdentifier2" - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadInstanceWithAliasIdentifier() - { - // test case for https://github.com/mikeclayton/MofParser/issues/4 - var actualTokens = Lexer.Lex( - SourceReader.From - ( - "instance of cTentacleAgent as $cTentacleAgent1ref\r\n" + - "{\r\n" + - "};" - ) - ); - var expectedTokens = new List - { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(7, 1, 8), - "instance" - ), - "instance" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(8, 1, 9), - new SourcePosition(8, 1, 9), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(9, 1, 10), - new SourcePosition(10, 1, 11), - "of" - ), - "of" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(11, 1, 12), - new SourcePosition(11, 1, 12), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(12, 1, 13), - new SourcePosition(25, 1, 26), - "cTentacleAgent" - ), - "cTentacleAgent" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(26, 1, 27), - new SourcePosition(26, 1, 27), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(27, 1, 28), - new SourcePosition(28, 1, 29), - "as" - ), - "as" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(29, 1, 30), - new SourcePosition(29, 1, 30), - " " - ) - ), - new AliasIdentifierToken( - new SourceExtent - ( - new SourcePosition(30, 1, 31), - new SourcePosition(48, 1, 49), - "$cTentacleAgent1ref" - ), - "cTentacleAgent1ref" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(49, 1, 50), - new SourcePosition(50, 1, 51), - "\r\n" - ) - ), - new BlockOpenToken( - new SourceExtent - ( - new SourcePosition(51, 2, 1), - new SourcePosition(51, 2, 1), - "{" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(52, 2, 2), - new SourcePosition(53, 2, 3), - "\r\n" - ) - ), - new BlockCloseToken( - new SourceExtent - ( - new SourcePosition(54, 3, 1), - new SourcePosition(54, 3, 1), - "}" - ) - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(55, 3, 2), - new SourcePosition(55, 3, 2), - ";" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs deleted file mode 100644 index 6c911516..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_BooleanLiteral.cs +++ /dev/null @@ -1,143 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadBooleanLiteralTokenMethod - { - - [Test] - public static void ShouldReadLowerCaseFalseToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("false") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "false" - ), - false - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseFalseToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("False") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "False" - ), - false - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseFalseToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("FALSE") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "FALSE" - ), - false - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadLowerCaseTrueToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("true") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "true" - ), - true - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseTrueToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("True") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "True" - ), - true - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseTrueToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("TRUE") - ); - var expectedTokens = new List { - new BooleanLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "TRUE" - ), - true - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs deleted file mode 100644 index 1fcc920d..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Comments.cs +++ /dev/null @@ -1,371 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadCommentTokenMethod - { - - [Test] - public static void ShouldReadSingleLineEofCommentToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("// single line comment") - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(21, 1, 22), - "// single line comment" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadSingleLineEolCommentToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("// single line comment\r\n") - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(21, 1, 22), - "// single line comment" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(22, 1, 23), - new SourcePosition(23, 1, 24), - "\r\n" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineEofCommentToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(120, 6, 2), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineUnclosedCommentToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(118, 5, 27), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineInlineAsterisks() - { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*************\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*************/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(144, 6, 14), - "/*************\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*************/" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMultilineMultiple() - { - var actualTokens = Lexer.Lex( - SourceReader.From( - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*//*\r\n" + - "@TargetNode='MyServer2'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(120, 6, 2), - "/*\r\n" + - "@TargetNode='MyServer'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(121, 6, 3), - new SourcePosition(242, 11, 2), - "/*\r\n" + - "@TargetNode='MyServer2'\r\n" + - "@GeneratedBy=mike.clayton\r\n" + - "@GenerationDate=07/19/2014 10:37:04\r\n" + - "@GenerationHost=MyDesktop\r\n" + - "*/" - ) - ) - - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadExample1CommentToken() - { - // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexer.Lex( - SourceReader.From("Integer MyProperty; // This is an example of a single-line comment") - ); - var expectedTokens = new List { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "Integer" - ), - "Integer" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(7, 1, 8), - new SourcePosition(7, 1, 8), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(8, 1, 9), - new SourcePosition(17, 1, 18), - "MyProperty" - ), - "MyProperty" - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(18, 1, 19), - new SourcePosition(18, 1, 19), - ";" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(19, 1, 20), - new SourcePosition(19, 1, 20), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(20, 1, 21), - new SourcePosition(65, 1, 66), - "// This is an example of a single-line comment" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadExample2CommentToken() - { - // see DSP0221_3.0.1.pdf "5.4 Comments" - var actualTokens = Lexer.Lex( - SourceReader.From( - "/* example of a comment between property definition tokens and a multi-line comment */\r\n" + - "Integer /* 16-bit integer property */ MyProperty; /* and a multi-line\r\n" + - " comment */" - ) - ); - var expectedTokens = new List { - new CommentToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(85, 1, 86), - "/* example of a comment between property definition tokens and a multi-line comment */" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(86, 1, 87), - new SourcePosition(87, 1, 88), - "\r\n" - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(88, 2, 1), - new SourcePosition(94, 2, 7), - "Integer" - ), - "Integer" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(95, 2, 8), - new SourcePosition(95, 2, 8), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(96, 2, 9), - new SourcePosition(124, 2, 37), - "/* 16-bit integer property */" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(125, 2, 38), - new SourcePosition(125, 2, 38), - " " - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(126, 2, 39), - new SourcePosition(135, 2, 48), - "MyProperty" - ), - "MyProperty" - ), - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(136, 2, 49), - new SourcePosition(136, 2, 49), - ";" - ) - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(137, 2, 50), - new SourcePosition(137, 2, 50), - " " - ) - ), - new CommentToken( - new SourceExtent - ( - new SourcePosition(138, 2, 51), - new SourcePosition(192, 3, 34), - "/* and a multi-line\r\n" + - " comment */" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs deleted file mode 100644 index 0b69c3f5..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Identifier.cs +++ /dev/null @@ -1,64 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadIdentifierTokenMethod - { - - [Test] - public static void ShouldReadIdentifierToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From - ( - "myIdentifier\r\n" + - "myIdentifier2" - ) - ); - var expectedTokens = new List { - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(11, 1, 12), - "myIdentifier" - ), - "myIdentifier" - ), - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(12, 1, 13), - new SourcePosition(13, 1, 14), - "\r\n" - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(14, 2, 1), - new SourcePosition(26, 2, 13), - "myIdentifier2" - ), - "myIdentifier2" - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs deleted file mode 100644 index e694b0e8..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_IntegerLiteral.cs +++ /dev/null @@ -1,471 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadIntegerLiteralTokenMethod - { - - // binaryValue - - [Test] - public static void ShouldReadBinaryValue0b() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "0b" - ), - IntegerKind.BinaryValue, 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue1b() - { - var actualTokens = Lexer.Lex( - SourceReader.From("1b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "1b" - ), - IntegerKind.BinaryValue, 1 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue00000b() - { - var actualTokens = Lexer.Lex( - SourceReader.From("00000b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "00000b" - ), - IntegerKind.BinaryValue, 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue10000b() - { - var actualTokens = Lexer.Lex( - SourceReader.From("10000b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "10000b" - ), - IntegerKind.BinaryValue, 16 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBinaryValue11111b() - { - var actualTokens = Lexer.Lex( - SourceReader.From("11111b") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "11111b" - ), - IntegerKind.BinaryValue, 31 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - // octalValue - - [Test] - public static void ShouldReadOctalValue00() - { - var actualTokens = Lexer.Lex( - SourceReader.From("00") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "00" - ), - IntegerKind.OctalValue, 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01() - { - var actualTokens = Lexer.Lex( - SourceReader.From("01") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "01" - ), - IntegerKind.OctalValue, 1 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue00000() - { - var actualTokens = Lexer.Lex( - SourceReader.From("00000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "00000" - ), - IntegerKind.OctalValue, 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01000() - { - var actualTokens = Lexer.Lex( - SourceReader.From("01000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "01000" - ), - IntegerKind.OctalValue, 512 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue01111() - { - var actualTokens = Lexer.Lex( - SourceReader.From("01111") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "01111" - ), - IntegerKind.OctalValue, 585 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue04444() - { - var actualTokens = Lexer.Lex( - SourceReader.From("04444") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "04444" - ), - IntegerKind.OctalValue, 2340 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadOctalValue07777() - { - var actualTokens = Lexer.Lex( - SourceReader.From("07777") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "07777" - ), - IntegerKind.OctalValue, 4095 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - // hexValue - - [Test] - public static void ShouldReadHexValue0x0() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0x0") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - "0x0" - ), - IntegerKind.HexValue, 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0x0000() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0x0000") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0x0000" - ), - IntegerKind.HexValue, 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0x8888() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0x8888") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0x8888" - ), - IntegerKind.HexValue, 34952 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0xabcd() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0xabcd") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0xabcd" - ), - IntegerKind.HexValue, 43981 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadHexValue0xABCD() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0xABCD") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "0xABCD" - ), - IntegerKind.HexValue, 43981 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - // decimalValue - - [Test] - public static void ShouldReadDecimalValue0() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "0" - ), - IntegerKind.DecimalValue, 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValue12345() - { - var actualTokens = Lexer.Lex( - SourceReader.From("12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "12345" - ), - IntegerKind.DecimalValue, 12345 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValuePlus12345() - { - var actualTokens = Lexer.Lex( - SourceReader.From("+12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "+12345" - ), - IntegerKind.DecimalValue, 12345 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValueMinus12345() - { - var actualTokens = Lexer.Lex( - SourceReader.From("-12345") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "-12345" - ), - IntegerKind.DecimalValue, -12345 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDecimalValue1234567890() - { - var actualTokens = Lexer.Lex( - SourceReader.From("1234567890") - ); - var expectedTokens = new List { - new IntegerLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(9, 1, 10), - "1234567890" - ), - IntegerKind.DecimalValue, 1234567890 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs deleted file mode 100644 index 139f61ba..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_NullLiteral.cs +++ /dev/null @@ -1,80 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadNullLiteralTokenMethod - { - - [Test] - public static void ShouldReadLowerCaseNullToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("null") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "null" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCaseNullToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("Null") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "Null" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCaseNullToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("NULL") - ); - var expectedTokens = new List { - new NullLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "NULL" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs deleted file mode 100644 index 35dc87c4..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Pragma.cs +++ /dev/null @@ -1,80 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadPragmaTokenMethod - { - - [Test] - public static void ShouldReadLowerCasePragmaToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("#pragma") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#pragma" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedCasePragmaToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("#Pragma") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#Pragma" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadUpperCasePragmaToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("#PRAGMA") - ); - var expectedTokens = new List { - new PragmaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "#PRAGMA" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs deleted file mode 100644 index 6ffd7f45..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_RealLiteral.cs +++ /dev/null @@ -1,182 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadRealLiteralTokenMethod - { - - [Test] - public static void ShouldReadRealValue0_0() - { - var actualTokens = Lexer.Lex( - SourceReader.From("0.0") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - "0.0" - ), - 0 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue123_45() - { - var actualTokens = Lexer.Lex( - SourceReader.From("123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(5, 1, 6), - "123.45" - ), - 123.45 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValuePlus123_45() - { - var actualTokens = Lexer.Lex( - SourceReader.From("+123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "+123.45" - ), - 123.45 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValueMinus123_45() - { - var actualTokens = Lexer.Lex( - SourceReader.From("-123.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(6, 1, 7), - "-123.45" - ), - -123.45 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue1234567890_00() - { - var actualTokens = Lexer.Lex( - SourceReader.From("1234567890.00") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent( - new SourcePosition(0, 1, 1), - new SourcePosition(12, 1, 13), - "1234567890.00" - ), - 1234567890.00 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValue_45() - { - var actualTokens = Lexer.Lex( - SourceReader.From(".45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(2, 1, 3), - ".45" - ), - 0.45 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValuePlus_45() - { - var actualTokens = Lexer.Lex( - SourceReader.From("+.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "+.45" - ), - 0.45 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadRealValueMinus_45() - { - var actualTokens = Lexer.Lex( - SourceReader.From("-.45") - ); - var expectedTokens = new List { - new RealLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(3, 1, 4), - "-.45" - ), - -0.45 - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs deleted file mode 100644 index 205830cf..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_StringLiteral.cs +++ /dev/null @@ -1,83 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadStringLiteralTokenMethod - { - - [Test] - public static void ShouldReadEmptyString() - { - var actualTokens = Lexer.Lex( - SourceReader.From("\"\"") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(1, 1, 2), - "\"\"" - ), - string.Empty - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadBasicString() - { - var actualTokens = Lexer.Lex( - SourceReader.From("\"my string literal\"") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(18, 1, 19), - "\"my string literal\"" - ), - "my string literal" - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadEscapedString() - { - var actualTokens = Lexer.Lex( - SourceReader.From(@"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""") - ); - var expectedTokens = new List { - new StringLiteralToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(72, 1, 73), - @"""my \\ string \"" literal \' with \b lots \t and \n lots \f of \r escapes""" - ), - "my \\ string \" literal \' with \b lots \t and \n lots \f of \r escapes" - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs deleted file mode 100644 index fca5ca60..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Symbols.cs +++ /dev/null @@ -1,320 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadAttributeCloseTokenMethod - { - - [Test] - public static void ShouldReadAttributeCloseToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("]") - ); - var expectedTokens = new List { - new AttributeCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "]" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadAttributeOpenTokenMethod - { - - [Test] - public static void ShouldReadAttributeOpenToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("[") - ); - var expectedTokens = new List { - new AttributeOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "[" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBlockCloseTokenMethod - { - - [Test] - public static void ShouldReadBlockCloseToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("}") - ); - var expectedTokens = new List { - new BlockCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "}" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadBlockOpenTokenMethod - { - - [Test] - public static void ShouldReaBlockOpenToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("{") - ); - var expectedTokens = new List { - new BlockOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "{" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadColonTokenMethod - { - - [Test] - public static void ShouldReadColonToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From(":") - ); - var expectedTokens = new List { - new ColonToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ":" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadCommaTokenMethod - { - - [Test] - public static void ShouldReadCommaToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From(",") - ); - var expectedTokens = new List { - new CommaToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "," - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadDotOperatorTokenMethod - { - - [Test] - public static void ShouldReadDotOperatorToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From(".") - ); - var expectedTokens = new List { - new DotOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "." - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadDotOperatorTokenWithTrailingNonDecimalDigit() - { - var actualTokens = Lexer.Lex( - SourceReader.From(".abc") - ); - var expectedTokens = new List { - new DotOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "." - ) - ), - new IdentifierToken( - new SourceExtent - ( - new SourcePosition(1, 1, 2), - new SourcePosition(3, 1, 4), - "abc" - ), - "abc" - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadEqualsOperatorTokenMethod - { - - [Test] - public static void ShouldReadEqualsOperatorToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("=") - ); - var expectedTokens = new List { - new EqualsOperatorToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "=" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadParenthesesCloseTokenMethod - { - - [Test] - public static void ShouldReadEqualsOperatorToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From(")") - ); - var expectedTokens = new List { - new ParenthesisCloseToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ")" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadParenthesesOpenTokenMethod - { - - [Test] - public static void ShouldReadParenthesesOpenToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("(") - ); - var expectedTokens = new List { - new ParenthesisOpenToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - "(" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - [TestFixture] - public static class ReadStatementEndTokenMethod - { - - [Test] - public static void ShouldReadStatementEndToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From(";") - ); - var expectedTokens = new List { - new StatementEndToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(0, 1, 1), - ";" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs deleted file mode 100644 index 0493c6d2..00000000 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerTests_Whitespace.cs +++ /dev/null @@ -1,137 +0,0 @@ -using Kingsland.MofParser.Lexing; -using Kingsland.MofParser.Tokens; -using Kingsland.ParseFx.Syntax; -using Kingsland.ParseFx.Text; -using NUnit.Framework; -using System.Collections.Generic; - -namespace Kingsland.MofParser.UnitTests.Lexing -{ - - [TestFixture] - public static partial class LexerTests - { - - [TestFixture] - public static class ReadWhitespaceTokenMethod - { - - [Test] - public static void ShouldReadSpaceWhitespaceToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From(" ") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - " " - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadTabWhitespaceToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("\t\t\t\t\t") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 1, 5), - "\t\t\t\t\t" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadCrWhitespaceToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("\r\r\r\r\r") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 5, 1), - "\r\r\r\r\r" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadLfWhitespaceToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("\n\n\n\n\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(4, 5, 1), - "\n\n\n\n\n" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadCrLfWhitespaceToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From("\r\n\r\n\r\n\r\n\r\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(9, 5, 2), - "\r\n\r\n\r\n\r\n\r\n" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - [Test] - public static void ShouldReadMixedWhitespaceToken() - { - var actualTokens = Lexer.Lex( - SourceReader.From(" \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n") - ); - var expectedTokens = new List { - new WhitespaceToken( - new SourceExtent - ( - new SourcePosition(0, 1, 1), - new SourcePosition(29, 14, 2), - " \t\t\t\t\t\r\r\r\r\r\n\n\n\n\n\r\n\r\n\r\n\r\n\r\n" - ) - ) - }; - LexerAssert.AreEqual(expectedTokens, actualTokens); - } - - } - - } - -} From ca32a3ecb0d43ffe484595dff0b95276b96b3b6a Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Sun, 13 Sep 2020 09:02:51 +0100 Subject: [PATCH 09/11] Fixed a failing test --- .../CodeGen/RoundtripTests.cs | 4 ++-- .../CodeGen/RoundtripTests_EnumerationDeclaration.cs | 8 ++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs index b04d9b33..33fd0cf0 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs @@ -59,13 +59,13 @@ private static void AssertRoundtrip(string sourceText, ParserQuirks parserQuirks Assert.AreEqual(sourceText, astMof); } - private static void AssertRoundtripException(string sourceText, string expectedMessage) + private static void AssertRoundtripException(string sourceText, string expectedMessage, ParserQuirks parserQuirks = ParserQuirks.None) { var tokens = Lexer.Lex(SourceReader.From(sourceText)); var tokensMof = TokenMofGenerator.ConvertToMof(tokens); var ex = Assert.Throws( () => { - var astNodes = Parser.Parse(tokens); + var astNodes = Parser.Parse(tokens, parserQuirks); } ); Assert.AreEqual(expectedMessage, ex.Message); diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs index b5466b35..37a9673d 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests_EnumerationDeclaration.cs @@ -1,4 +1,5 @@ -using NUnit.Framework; +using Kingsland.MofParser.Parsing; +using NUnit.Framework; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -77,7 +78,10 @@ public static void EnumerationDeclarationDeprecatedMof300IntegerBaseAndQuirksEna "Unexpected token found at Position 44, Line Number 3, Column Number 9.\r\n" + "Token Type: 'StringLiteralToken'\r\n" + "Token Text: '\"July\"'"; - RoundtripTests.AssertRoundtripException(sourceText, expectedMessage); + RoundtripTests.AssertRoundtripException( + sourceText, expectedMessage, + ParserQuirks.AllowDeprecatedMof300IntegerTypesAsEnumerationDeclarationsBase + ); } [Test(Description = "https://github.com/mikeclayton/MofParser/issues/52")] From 75f223808bbc50fd8c97080bd87602b11e84d90d Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Mon, 14 Sep 2020 21:25:21 +0100 Subject: [PATCH 10/11] Small cleanup tweaks. --- .../CodeGen/RoundtripTests.cs | 12 ++- .../Lexing/LexerAssert.cs | 79 ++++++++--------- .../Tokens/TokenAssert.cs | 84 +++++++++---------- src/Kingsland.MofParser.sln | 7 ++ ...okenMofGenerator.cs => TokenSerializer.cs} | 14 +++- src/Kingsland.ParseFx/Syntax/SyntaxToken.cs | 15 +++- 6 files changed, 122 insertions(+), 89 deletions(-) rename src/Kingsland.MofParser/CodeGen/{TokenMofGenerator.cs => TokenSerializer.cs} (52%) diff --git a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs index 33fd0cf0..395ff926 100644 --- a/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs +++ b/src/Kingsland.MofParser.UnitTests/CodeGen/RoundtripTests.cs @@ -1,9 +1,13 @@ using Kingsland.MofParser.CodeGen; using Kingsland.MofParser.Lexing; using Kingsland.MofParser.Parsing; +using Kingsland.MofParser.UnitTests.Lexing; +using Kingsland.MofParser.UnitTests.Tokens; using Kingsland.ParseFx.Parsing; +using Kingsland.ParseFx.Syntax; using Kingsland.ParseFx.Text; using NUnit.Framework; +using System.Collections.Generic; namespace Kingsland.MofParser.UnitTests.CodeGen { @@ -50,11 +54,11 @@ public static partial class RoundtripTests private static void AssertRoundtrip(string sourceText, ParserQuirks parserQuirks = ParserQuirks.None) { // check the lexer tokens roundtrips ok - var tokens = Lexer.Lex(SourceReader.From(sourceText)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var actualTokens = Lexer.Lex(SourceReader.From(sourceText)); + var tokensMof = TokenSerializer.ConvertToMofText(actualTokens); Assert.AreEqual(sourceText, tokensMof); // check the parser ast roundtrips ok - var astNodes = Parser.Parse(tokens, parserQuirks); + var astNodes = Parser.Parse(actualTokens, parserQuirks); var astMof = AstMofGenerator.ConvertToMof(astNodes); Assert.AreEqual(sourceText, astMof); } @@ -62,7 +66,7 @@ private static void AssertRoundtrip(string sourceText, ParserQuirks parserQuirks private static void AssertRoundtripException(string sourceText, string expectedMessage, ParserQuirks parserQuirks = ParserQuirks.None) { var tokens = Lexer.Lex(SourceReader.From(sourceText)); - var tokensMof = TokenMofGenerator.ConvertToMof(tokens); + var tokensMof = TokenSerializer.ConvertToMofText(tokens); var ex = Assert.Throws( () => { var astNodes = Parser.Parse(tokens, parserQuirks); diff --git a/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs b/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs index 1424b745..d12651f4 100644 --- a/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs +++ b/src/Kingsland.MofParser.UnitTests/Lexing/LexerAssert.cs @@ -16,7 +16,7 @@ public static void AreEqual(SyntaxToken expectedToken, SyntaxToken actualToken) LexerAssert.AreEqualInternal(expectedToken, actualToken); } - public static void AreEqual(List expectedTokens, List actualTokens) + public static void AreEqual(List expectedTokens, List actualTokens, bool ignoreExtent = false) { if ((expectedTokens == null) && (actualTokens == null)) { @@ -32,12 +32,12 @@ public static void AreEqual(List expectedTokens, List } for (var i = 0; i < Math.Min(expectedTokens.Count, actualTokens.Count); i++) { - LexerAssert.AreEqualInternal(expectedTokens[i], actualTokens[i], i); + LexerAssert.AreEqualInternal(expectedTokens[i], actualTokens[i], ignoreExtent, i); } Assert.AreEqual(expectedTokens.Count, actualTokens.Count, "expected and actual are different lengths"); } - private static void AreEqualInternal(SyntaxToken expectedToken, SyntaxToken actualToken, int index = -1) + private static void AreEqualInternal(SyntaxToken expectedToken, SyntaxToken actualToken, bool ignoreExtent = false, int index = -1) { if ((expectedToken == null) && (actualToken == null)) { @@ -53,145 +53,148 @@ private static void AreEqualInternal(SyntaxToken expectedToken, SyntaxToken actu } Assert.AreEqual(expectedToken.GetType(), actualToken.GetType(), LexerAssert.GetAssertErrorMessage($"actual type does not match expected value", index)); - Assert.AreEqual(expectedToken.Extent.StartPosition.Position, actualToken.Extent.StartPosition.Position, - LexerAssert.GetAssertErrorMessage($"actual Start Position does not match expected value", index)); - Assert.AreEqual(expectedToken.Extent.StartPosition.LineNumber, actualToken.Extent.StartPosition.LineNumber, - LexerAssert.GetAssertErrorMessage($"actual Start Line does not match expected value", index)); - Assert.AreEqual(expectedToken.Extent.StartPosition.ColumnNumber, actualToken.Extent.StartPosition.ColumnNumber, - LexerAssert.GetAssertErrorMessage($"actual Start Column does not match expected value", index)); - Assert.AreEqual(expectedToken.Extent.EndPosition.Position, actualToken.Extent.EndPosition.Position, - LexerAssert.GetAssertErrorMessage($"actual End Position does not match expected value", index)); - Assert.AreEqual(expectedToken.Extent.EndPosition.LineNumber, actualToken.Extent.EndPosition.LineNumber, - LexerAssert.GetAssertErrorMessage($"actual End Line does not match expected value", index)); - Assert.AreEqual(expectedToken.Extent.EndPosition.ColumnNumber, actualToken.Extent.EndPosition.ColumnNumber, - LexerAssert.GetAssertErrorMessage($"actual End Column does not match expected value", index)); - Assert.AreEqual(expectedToken.Extent.Text, actualToken.Extent.Text, - LexerAssert.GetAssertErrorMessage($"actual Text does not match expected value", index)); + if (!ignoreExtent) + { + Assert.AreEqual(expectedToken.Extent.StartPosition.Position, actualToken.Extent.StartPosition.Position, + LexerAssert.GetAssertErrorMessage($"actual Start Position does not match expected value", index)); + Assert.AreEqual(expectedToken.Extent.StartPosition.LineNumber, actualToken.Extent.StartPosition.LineNumber, + LexerAssert.GetAssertErrorMessage($"actual Start Line does not match expected value", index)); + Assert.AreEqual(expectedToken.Extent.StartPosition.ColumnNumber, actualToken.Extent.StartPosition.ColumnNumber, + LexerAssert.GetAssertErrorMessage($"actual Start Column does not match expected value", index)); + Assert.AreEqual(expectedToken.Extent.EndPosition.Position, actualToken.Extent.EndPosition.Position, + LexerAssert.GetAssertErrorMessage($"actual End Position does not match expected value", index)); + Assert.AreEqual(expectedToken.Extent.EndPosition.LineNumber, actualToken.Extent.EndPosition.LineNumber, + LexerAssert.GetAssertErrorMessage($"actual End Line does not match expected value", index)); + Assert.AreEqual(expectedToken.Extent.EndPosition.ColumnNumber, actualToken.Extent.EndPosition.ColumnNumber, + LexerAssert.GetAssertErrorMessage($"actual End Column does not match expected value", index)); + Assert.AreEqual(expectedToken.Extent.Text, actualToken.Extent.Text, + LexerAssert.GetAssertErrorMessage($"actual Text does not match expected value", index)); + } switch (expectedToken) { case AliasIdentifierToken _: Assert.IsTrue( - TokenAssert.AreEqual((AliasIdentifierToken)expectedToken, (AliasIdentifierToken)actualToken), + TokenAssert.AreEqual((AliasIdentifierToken)expectedToken, (AliasIdentifierToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case AttributeCloseToken _: Assert.IsTrue( - TokenAssert.AreEqual((AttributeCloseToken)expectedToken, (AttributeCloseToken)actualToken), + TokenAssert.AreEqual((AttributeCloseToken)expectedToken, (AttributeCloseToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case AttributeOpenToken _: Assert.IsTrue( - TokenAssert.AreEqual((AttributeOpenToken)expectedToken, (AttributeOpenToken)actualToken), + TokenAssert.AreEqual((AttributeOpenToken)expectedToken, (AttributeOpenToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case BlockCloseToken _: Assert.IsTrue( - TokenAssert.AreEqual((BlockCloseToken)expectedToken, (BlockCloseToken)actualToken), + TokenAssert.AreEqual((BlockCloseToken)expectedToken, (BlockCloseToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case BlockOpenToken _: Assert.IsTrue( - TokenAssert.AreEqual((BlockOpenToken)expectedToken, (BlockOpenToken)actualToken), + TokenAssert.AreEqual((BlockOpenToken)expectedToken, (BlockOpenToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case BooleanLiteralToken _: Assert.IsTrue( - TokenAssert.AreEqual((BooleanLiteralToken)expectedToken, (BooleanLiteralToken)actualToken), + TokenAssert.AreEqual((BooleanLiteralToken)expectedToken, (BooleanLiteralToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case ColonToken _: Assert.IsTrue( - TokenAssert.AreEqual((ColonToken)expectedToken, (ColonToken)actualToken), + TokenAssert.AreEqual((ColonToken)expectedToken, (ColonToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case CommaToken _: Assert.IsTrue( - TokenAssert.AreEqual((CommaToken)expectedToken, (CommaToken)actualToken), + TokenAssert.AreEqual((CommaToken)expectedToken, (CommaToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case CommentToken _: Assert.IsTrue( - TokenAssert.AreEqual((CommentToken)expectedToken, (CommentToken)actualToken), + TokenAssert.AreEqual((CommentToken)expectedToken, (CommentToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case DotOperatorToken _: Assert.IsTrue( - TokenAssert.AreEqual((DotOperatorToken)expectedToken, (DotOperatorToken)actualToken), + TokenAssert.AreEqual((DotOperatorToken)expectedToken, (DotOperatorToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case EqualsOperatorToken _: Assert.IsTrue( - TokenAssert.AreEqual((EqualsOperatorToken)expectedToken, (EqualsOperatorToken)actualToken), + TokenAssert.AreEqual((EqualsOperatorToken)expectedToken, (EqualsOperatorToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case IdentifierToken _: Assert.IsTrue( - TokenAssert.AreEqual((IdentifierToken)expectedToken, (IdentifierToken)actualToken), + TokenAssert.AreEqual((IdentifierToken)expectedToken, (IdentifierToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case IntegerLiteralToken _: Assert.IsTrue( - TokenAssert.AreEqual((IntegerLiteralToken)expectedToken, (IntegerLiteralToken)actualToken), + TokenAssert.AreEqual((IntegerLiteralToken)expectedToken, (IntegerLiteralToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case NullLiteralToken _: Assert.IsTrue( - TokenAssert.AreEqual((NullLiteralToken)expectedToken, (NullLiteralToken)actualToken), + TokenAssert.AreEqual((NullLiteralToken)expectedToken, (NullLiteralToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case ParenthesisCloseToken _: Assert.IsTrue( - TokenAssert.AreEqual((ParenthesisCloseToken)expectedToken, (ParenthesisCloseToken)actualToken), + TokenAssert.AreEqual((ParenthesisCloseToken)expectedToken, (ParenthesisCloseToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case ParenthesisOpenToken _: Assert.IsTrue( - TokenAssert.AreEqual((ParenthesisOpenToken)expectedToken, (ParenthesisOpenToken)actualToken), + TokenAssert.AreEqual((ParenthesisOpenToken)expectedToken, (ParenthesisOpenToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case PragmaToken _: Assert.IsTrue( - TokenAssert.AreEqual((PragmaToken)expectedToken, (PragmaToken)actualToken), + TokenAssert.AreEqual((PragmaToken)expectedToken, (PragmaToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case RealLiteralToken _: Assert.IsTrue( - TokenAssert.AreEqual((RealLiteralToken)expectedToken, (RealLiteralToken)actualToken), + TokenAssert.AreEqual((RealLiteralToken)expectedToken, (RealLiteralToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case StatementEndToken _: Assert.IsTrue( - TokenAssert.AreEqual((StatementEndToken)expectedToken, (StatementEndToken)actualToken), + TokenAssert.AreEqual((StatementEndToken)expectedToken, (StatementEndToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case StringLiteralToken _: Assert.IsTrue( - TokenAssert.AreEqual((StringLiteralToken)expectedToken, (StringLiteralToken)actualToken), + TokenAssert.AreEqual((StringLiteralToken)expectedToken, (StringLiteralToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; case WhitespaceToken _: Assert.IsTrue( - TokenAssert.AreEqual((WhitespaceToken)expectedToken, (WhitespaceToken)actualToken), + TokenAssert.AreEqual((WhitespaceToken)expectedToken, (WhitespaceToken)actualToken, ignoreExtent), LexerAssert.GetAssertErrorMessage($"actual token does not match expected token", index) ); break; diff --git a/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs b/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs index e623fa40..766bbf59 100644 --- a/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs +++ b/src/Kingsland.MofParser.UnitTests/Tokens/TokenAssert.cs @@ -9,7 +9,7 @@ internal static class TokenAssert #region Token Comparison Methods - public static bool AreEqual(AliasIdentifierToken expected, AliasIdentifierToken actual) + public static bool AreEqual(AliasIdentifierToken expected, AliasIdentifierToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -21,12 +21,12 @@ public static bool AreEqual(AliasIdentifierToken expected, AliasIdentifierToken } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)) && (expected.Name == actual.Name); } } - public static bool AreEqual(AttributeCloseToken expected, AttributeCloseToken actual) + public static bool AreEqual(AttributeCloseToken expected, AttributeCloseToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -38,10 +38,10 @@ public static bool AreEqual(AttributeCloseToken expected, AttributeCloseToken ac } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(AttributeOpenToken expected, AttributeOpenToken actual) + public static bool AreEqual(AttributeOpenToken expected, AttributeOpenToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -53,11 +53,11 @@ public static bool AreEqual(AttributeOpenToken expected, AttributeOpenToken actu } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(BlockCloseToken expected, BlockCloseToken actual) + public static bool AreEqual(BlockCloseToken expected, BlockCloseToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -69,11 +69,11 @@ public static bool AreEqual(BlockCloseToken expected, BlockCloseToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(BlockOpenToken expected, BlockOpenToken actual) + public static bool AreEqual(BlockOpenToken expected, BlockOpenToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -85,11 +85,11 @@ public static bool AreEqual(BlockOpenToken expected, BlockOpenToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(BooleanLiteralToken expected, BooleanLiteralToken actual) + public static bool AreEqual(BooleanLiteralToken expected, BooleanLiteralToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -101,12 +101,12 @@ public static bool AreEqual(BooleanLiteralToken expected, BooleanLiteralToken ac } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)) && (expected.Value == actual.Value); } } - public static bool AreEqual(ColonToken expected, ColonToken actual) + public static bool AreEqual(ColonToken expected, ColonToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -118,11 +118,11 @@ public static bool AreEqual(ColonToken expected, ColonToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(CommaToken expected, CommaToken actual) + public static bool AreEqual(CommaToken expected, CommaToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -134,11 +134,11 @@ public static bool AreEqual(CommaToken expected, CommaToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(CommentToken expected, CommentToken actual) + public static bool AreEqual(CommentToken expected, CommentToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -150,11 +150,11 @@ public static bool AreEqual(CommentToken expected, CommentToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(DotOperatorToken expected, DotOperatorToken actual) + public static bool AreEqual(DotOperatorToken expected, DotOperatorToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -166,11 +166,11 @@ public static bool AreEqual(DotOperatorToken expected, DotOperatorToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(EqualsOperatorToken expected, EqualsOperatorToken actual) + public static bool AreEqual(EqualsOperatorToken expected, EqualsOperatorToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -182,11 +182,11 @@ public static bool AreEqual(EqualsOperatorToken expected, EqualsOperatorToken ac } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(IdentifierToken expected, IdentifierToken actual) + public static bool AreEqual(IdentifierToken expected, IdentifierToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -198,12 +198,12 @@ public static bool AreEqual(IdentifierToken expected, IdentifierToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)) && (expected.Name == actual.Name); } } - public static bool AreEqual(IntegerLiteralToken expected, IntegerLiteralToken actual) + public static bool AreEqual(IntegerLiteralToken expected, IntegerLiteralToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -215,13 +215,13 @@ public static bool AreEqual(IntegerLiteralToken expected, IntegerLiteralToken ac } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)) && (expected.Kind == actual.Kind) && (expected.Value == actual.Value); } } - public static bool AreEqual(NullLiteralToken expected, NullLiteralToken actual) + public static bool AreEqual(NullLiteralToken expected, NullLiteralToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -233,11 +233,11 @@ public static bool AreEqual(NullLiteralToken expected, NullLiteralToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(ParenthesisCloseToken expected, ParenthesisCloseToken actual) + public static bool AreEqual(ParenthesisCloseToken expected, ParenthesisCloseToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -249,11 +249,11 @@ public static bool AreEqual(ParenthesisCloseToken expected, ParenthesisCloseToke } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(ParenthesisOpenToken expected, ParenthesisOpenToken actual) + public static bool AreEqual(ParenthesisOpenToken expected, ParenthesisOpenToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -265,11 +265,11 @@ public static bool AreEqual(ParenthesisOpenToken expected, ParenthesisOpenToken } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(PragmaToken expected, PragmaToken actual) + public static bool AreEqual(PragmaToken expected, PragmaToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -281,11 +281,11 @@ public static bool AreEqual(PragmaToken expected, PragmaToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(RealLiteralToken expected, RealLiteralToken actual) + public static bool AreEqual(RealLiteralToken expected, RealLiteralToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -297,12 +297,12 @@ public static bool AreEqual(RealLiteralToken expected, RealLiteralToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)) && (expected.Value == actual.Value); } } - public static bool AreEqual(StatementEndToken expected, StatementEndToken actual) + public static bool AreEqual(StatementEndToken expected, StatementEndToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -314,11 +314,11 @@ public static bool AreEqual(StatementEndToken expected, StatementEndToken actual } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } - public static bool AreEqual(StringLiteralToken expected, StringLiteralToken actual) + public static bool AreEqual(StringLiteralToken expected, StringLiteralToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -330,12 +330,12 @@ public static bool AreEqual(StringLiteralToken expected, StringLiteralToken actu } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent) && + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)) && (expected.Value == actual.Value); } } - public static bool AreEqual(WhitespaceToken expected, WhitespaceToken actual) + public static bool AreEqual(WhitespaceToken expected, WhitespaceToken actual, bool ignoreExtent = false) { if ((expected == null) && (actual == null)) { @@ -347,7 +347,7 @@ public static bool AreEqual(WhitespaceToken expected, WhitespaceToken actual) } else { - return TokenAssert.AreEqual(expected.Extent, actual.Extent); + return (ignoreExtent || TokenAssert.AreEqual(expected.Extent, actual.Extent)); } } diff --git a/src/Kingsland.MofParser.sln b/src/Kingsland.MofParser.sln index 9bfd5b0f..c5701f86 100644 --- a/src/Kingsland.MofParser.sln +++ b/src/Kingsland.MofParser.sln @@ -21,6 +21,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kingsland.MofParser.NuGet", EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Kingsland.ParseFx", "Kingsland.ParseFx\Kingsland.ParseFx.csproj", "{4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kingsland.MofParser.EditMof", "Kingsland.MofParser.EditMof\Kingsland.MofParser.EditMof.csproj", "{ACF08AA4-D0DE-4B70-AC6D-1892852A4893}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -55,6 +57,10 @@ Global {4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}.Debug|Any CPU.Build.0 = Debug|Any CPU {4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}.Release|Any CPU.ActiveCfg = Release|Any CPU {4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}.Release|Any CPU.Build.0 = Release|Any CPU + {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -64,6 +70,7 @@ Global {4CCBEEF4-4303-4392-BE3D-A4BB8218D6E0} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} {A071F2FD-3C3D-43DE-9774-AACEE415EFA2} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} {ECD9DAF2-1FDA-4435-B489-886A2CCA749E} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} + {ACF08AA4-D0DE-4B70-AC6D-1892852A4893} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {02535FFA-CD2C-4FEC-997C-7A49EA77F34A} diff --git a/src/Kingsland.MofParser/CodeGen/TokenMofGenerator.cs b/src/Kingsland.MofParser/CodeGen/TokenSerializer.cs similarity index 52% rename from src/Kingsland.MofParser/CodeGen/TokenMofGenerator.cs rename to src/Kingsland.MofParser/CodeGen/TokenSerializer.cs index b84913bc..403c2113 100644 --- a/src/Kingsland.MofParser/CodeGen/TokenMofGenerator.cs +++ b/src/Kingsland.MofParser/CodeGen/TokenSerializer.cs @@ -1,25 +1,31 @@ using Kingsland.ParseFx.Syntax; +using System; using System.Collections.Generic; using System.Text; namespace Kingsland.MofParser.CodeGen { - public sealed class TokenMofGenerator + public sealed class TokenSerializer { #region Dispatcher - public static string ConvertToMof(IEnumerable tokens, MofQuirks quirks = MofQuirks.None) + public static string ConvertToMofText(SyntaxToken token) + { + return token.Extent.Text; + } + + public static string ConvertToMofText(IEnumerable tokens) { if (tokens == null) { - return null; + throw new ArgumentNullException(nameof(tokens)); } var source = new StringBuilder(); foreach (var token in tokens) { - source.Append(token.Extent.Text); + source.Append(TokenSerializer.ConvertToMofText(token)); } return source.ToString(); } diff --git a/src/Kingsland.ParseFx/Syntax/SyntaxToken.cs b/src/Kingsland.ParseFx/Syntax/SyntaxToken.cs index 0a9309f0..d8f18e1d 100644 --- a/src/Kingsland.ParseFx/Syntax/SyntaxToken.cs +++ b/src/Kingsland.ParseFx/Syntax/SyntaxToken.cs @@ -1,4 +1,5 @@ using Kingsland.ParseFx.Text; +using System; namespace Kingsland.ParseFx.Syntax { @@ -8,6 +9,10 @@ public abstract class SyntaxToken protected SyntaxToken(SourceExtent extent) { + if (extent == null) + { + throw new ArgumentNullException(nameof(extent)); + } this.Extent = extent; } @@ -17,9 +22,17 @@ public SourceExtent Extent private set; } + public string Text + { + get + { + return this.Extent.Text; + } + } + public override string ToString() { - return this.Extent.Text; + return this.Text; } } From d4f7b258dfe3a642abede096a21c283ddb01e4df Mon Sep 17 00:00:00 2001 From: mikeclayton Date: Mon, 14 Sep 2020 21:41:11 +0100 Subject: [PATCH 11/11] #65 - Updated sample and wiki notes --- README.md | 142 +++++----------------- src/Kingsland.MofParser.Sample/Program.cs | 24 ++-- src/Kingsland.MofParser.sln | 7 -- 3 files changed, 47 insertions(+), 126 deletions(-) diff --git a/README.md b/README.md index ca43c04b..b77f4346 100644 --- a/README.md +++ b/README.md @@ -31,126 +31,46 @@ Sample Code ----------- ```c# -const string filename = "..\\..\\dsc\\MyServer.mof"; +const string sourceText = @" +instance of MSFT_RoleResource as $MSFT_RoleResource1ref +{ + ResourceID = ""[WindowsFeature]IIS""; + Ensure = ""Present""; + SourceInfo = ""D:\\dsc\\MyServerConfig.ps1::6::9::WindowsFeature""; + Name = ""Web-Server""; + ModuleName = ""PSDesiredStateConfiguration""; + ModuleVersion = ""1.0""; +};"; // parse the mof file -var instances = PowerShellDscHelper.ParseMofFileInstances(filename); +var module = Parser.ParseText(sourceText); // display the instances -foreach (var instance in instances) +foreach (var instance in module.Instances) { - Console.WriteLine("--------------------------"); - if (string.IsNullOrEmpty(instance.Alias)) - { - Console.WriteLine(string.Format("instance of {0}", instance.ClassName)); - } - else - { - Console.WriteLine(string.Format("instance of {0} as ${1}", instance.ClassName, instance.Alias)); - } - foreach(var property in instance.Properties) + Console.WriteLine($"----------------------------------"); + Console.WriteLine($"typename = {instance.TypeName}"); + Console.WriteLine($"alias = {instance.Alias}"); + Console.WriteLine($"properties:"); + foreach (var property in instance.Properties) { - Console.WriteLine(" {0} = {1}", property.Key.PadRight(14), property.Value.ToString()); + Console.WriteLine(" {0} = {1}", property.Name.PadRight(13), property.Value); } - Console.WriteLine("--------------------------"); + Console.WriteLine($"----------------------------------"); } -``` - -Sample MOF ----------- - -``` -/* -@TargetNode='MyServer' -@GeneratedBy=mike.clayton -@GenerationDate=07/19/2014 10:37:04 -@GenerationHost=MyDesktop -*/ -instance of MSFT_RoleResource as $MSFT_RoleResource1ref -{ -ResourceID = "[WindowsFeature]IIS"; - Ensure = "Present"; - SourceInfo = "E:\\MofParser\\src\\Kingsland.MofParser.Sample\\dsc\\MyServerConfig.ps1::6::9::WindowsFeature"; - Name = "Web-Server"; - ModuleName = "PSDesiredStateConfiguration"; - ModuleVersion = "1.0"; - -}; - -instance of MSFT_RoleResource as $MSFT_RoleResource2ref -{ -ResourceID = "[WindowsFeature]ASP"; - Ensure = "Present"; - SourceInfo = "E:\\MofParser\\src\\Kingsland.MofParser.Sample\\dsc\\MyServerConfig.ps1::12::9::WindowsFeature"; - Name = "Web-Asp-Net45"; - ModuleName = "PSDesiredStateConfiguration"; - ModuleVersion = "1.0"; - -}; - -instance of MSFT_PackageResource as $MSFT_PackageResource1ref -{ -ResourceID = "[Package]7Zip"; - Path = "E:\\Installers\\Desktop Software\\7-Zip\\7z920-x64.msi"; - Ensure = "Present"; - ProductId = "23170F69-40C1-2702-0920-000001000000"; - SourceInfo = "E:\\MofParser\\src\\Kingsland.MofParser.Sample\\dsc\\MyServerConfig.ps1::18::9::Package"; - Name = "7-Zip 9.20 (x64 edition)"; - ModuleName = "PSDesiredStateConfiguration"; - ModuleVersion = "1.0"; - -}; - -instance of OMI_ConfigurationDocument -{ - Version="1.0.0"; - Author="mike.clayton"; - GenerationDate="07/19/2014 10:37:04"; - GenerationHost="MyDesktop"; -}; -``` - -Sample Output -------------- - -```text --------------------------- -instance of MSFT_RoleResource as $MSFT_RoleResource1ref - ResourceID = [WindowsFeature]IIS - Ensure = Present - SourceInfo = E:\MofParser\src\Kingsland.MofParser.Sample\dsc\MyServerConfig.ps1::6::9::WindowsFeature - Name = Web-Server - ModuleName = PSDesiredStateConfiguration - ModuleVersion = 1.0 --------------------------- --------------------------- -instance of MSFT_RoleResource as $MSFT_RoleResource2ref - ResourceID = [WindowsFeature]ASP - Ensure = Present - SourceInfo = E:\MofParser\src\Kingsland.MofParser.Sample\dsc\MyServerConfig.ps1::12::9::WindowsFeature - Name = Web-Asp-Net45 - ModuleName = PSDesiredStateConfiguration - ModuleVersion = 1.0 --------------------------- --------------------------- -instance of MSFT_PackageResource as $MSFT_PackageResource1ref - ResourceID = [Package]7Zip - Path = E:\Installers\Desktop Software\7-Zip\7z920-x64.msi - Ensure = Present - ProductId = 23170F69-40C1-2702-0920-000001000000 - SourceInfo = E:\MofParser\src\Kingsland.MofParser.Sample\dsc\MyServerConfig.ps1::18::9::Package - Name = 7-Zip 9.20 (x64 edition) - ModuleName = PSDesiredStateConfiguration - ModuleVersion = 1.0 --------------------------- --------------------------- -instance of OMI_ConfigurationDocument - Version = 1.0.0 - Author = mike.clayton - GenerationDate = 07/19/2014 10:37:04 - GenerationHost = MyDesktop --------------------------- +// output: +// ---------------------------------- +// typename = MSFT_RoleResource +// alias = MSFT_RoleResource1ref +// properties: +// ResourceID = [WindowsFeature]IIS +// Ensure = Present +// SourceInfo = D:\dsc\MyServerConfig.ps1::6::9::WindowsFeature +// Name = Web-Server +// ModuleName = PSDesiredStateConfiguration +// ModuleVersion = 1.0 +// ---------------------------------- ``` diff --git a/src/Kingsland.MofParser.Sample/Program.cs b/src/Kingsland.MofParser.Sample/Program.cs index 2bf930ab..3d015d6b 100644 --- a/src/Kingsland.MofParser.Sample/Program.cs +++ b/src/Kingsland.MofParser.Sample/Program.cs @@ -1,9 +1,5 @@ -using Kingsland.MofParser; -using Kingsland.MofParser.CodeGen; -using Kingsland.MofParser.Model; -using Kingsland.MofParser.Parsing; +using Kingsland.MofParser.Parsing; using System; -using System.Linq; namespace Kingsland.FileFormat.Mof.Tests { @@ -14,7 +10,7 @@ class Program static void Main(string[] args) { - const string mof = @" + const string sourceText = @" instance of MSFT_RoleResource as $MSFT_RoleResource1ref { ResourceID = ""[WindowsFeature]IIS""; @@ -26,7 +22,7 @@ instance of MSFT_RoleResource as $MSFT_RoleResource1ref };"; // parse the mof file - var module = Parser.ParseText(mof); + var module = Parser.ParseText(sourceText); // display the instances foreach (var instance in module.Instances) @@ -42,8 +38,20 @@ instance of MSFT_RoleResource as $MSFT_RoleResource1ref Console.WriteLine($"----------------------------------"); } + // ---------------------------------- + // typename = MSFT_RoleResource + // alias = MSFT_RoleResource1ref + // properties: + // ResourceID = [WindowsFeature]IIS + // Ensure = Present + // SourceInfo = D:\dsc\MyServerConfig.ps1::6::9::WindowsFeature + // Name = Web-Server + // ModuleName = PSDesiredStateConfiguration + // ModuleVersion = 1.0 + // ---------------------------------- + } } -} \ No newline at end of file +} diff --git a/src/Kingsland.MofParser.sln b/src/Kingsland.MofParser.sln index c5701f86..9bfd5b0f 100644 --- a/src/Kingsland.MofParser.sln +++ b/src/Kingsland.MofParser.sln @@ -21,8 +21,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kingsland.MofParser.NuGet", EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Kingsland.ParseFx", "Kingsland.ParseFx\Kingsland.ParseFx.csproj", "{4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kingsland.MofParser.EditMof", "Kingsland.MofParser.EditMof\Kingsland.MofParser.EditMof.csproj", "{ACF08AA4-D0DE-4B70-AC6D-1892852A4893}" -EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -57,10 +55,6 @@ Global {4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}.Debug|Any CPU.Build.0 = Debug|Any CPU {4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}.Release|Any CPU.ActiveCfg = Release|Any CPU {4F2655F8-10A9-4B60-8B1A-4EE84C0021E1}.Release|Any CPU.Build.0 = Release|Any CPU - {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Debug|Any CPU.Build.0 = Debug|Any CPU - {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Release|Any CPU.ActiveCfg = Release|Any CPU - {ACF08AA4-D0DE-4B70-AC6D-1892852A4893}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -70,7 +64,6 @@ Global {4CCBEEF4-4303-4392-BE3D-A4BB8218D6E0} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} {A071F2FD-3C3D-43DE-9774-AACEE415EFA2} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} {ECD9DAF2-1FDA-4435-B489-886A2CCA749E} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} - {ACF08AA4-D0DE-4B70-AC6D-1892852A4893} = {895B3DFF-FD72-4C6C-A08F-BA7D46FB87AE} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {02535FFA-CD2C-4FEC-997C-7A49EA77F34A}