From 9a2cd33eaa6796d97e4c1d1048b33cd892394848 Mon Sep 17 00:00:00 2001 From: Christoph Lipka Date: Thu, 31 May 2018 12:34:47 +0200 Subject: [PATCH 1/2] Minor refactoring of `#for` loop code. --- source/parser/parser.h | 8 ++++---- source/parser/parser_tokenizer.cpp | 15 ++++----------- 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/source/parser/parser.h b/source/parser/parser.h index f2814a843..3e7c4ac87 100644 --- a/source/parser/parser.h +++ b/source/parser/parser.h @@ -409,7 +409,7 @@ class Parser : public SceneTask void Release_Entry_Reference (SYM_TABLE *table, SYM_ENTRY *Entry); SYM_ENTRY *Destroy_Entry (SYM_ENTRY *Entry, bool destroyName); bool Parse_Ifdef_Param (); - int Parse_For_Param (char**, DBL*, DBL*); + int Parse_For_Param (UTF8String&, DBL*, DBL*); // parstxtr.h/parstxtr.cpp TEXTURE *Parse_Texture (void); @@ -598,11 +598,11 @@ class Parser : public SceneTask bool Macro_Same_Flag; bool Switch_Case_Ok_Flag; Macro *PMac; - char* Loop_Identifier; + UTF8String Loop_Identifier; DBL For_Loop_End; DBL For_Loop_Step; - CS_ENTRY() : Cond_Type(BUSY_COND), PMac(nullptr), Loop_Identifier(nullptr) {} - ~CS_ENTRY() { POV_PARSER_ASSERT(Loop_Identifier == nullptr); } + CS_ENTRY() : Cond_Type(BUSY_COND), PMac(nullptr) {} + ~CS_ENTRY() {} }; vector Cond_Stack; diff --git a/source/parser/parser_tokenizer.cpp b/source/parser/parser_tokenizer.cpp index 4559997d4..6b880b782 100644 --- a/source/parser/parser_tokenizer.cpp +++ b/source/parser/parser_tokenizer.cpp @@ -326,7 +326,6 @@ void Parser::Get_Token () continue; } - TokenId tokenId; switch (mToken.raw.GetTokenId()) { case IDENTIFIER_TOKEN: @@ -1081,14 +1080,12 @@ void Parser::Parse_Directive(int After_Hash) } else { - char* Identifier = nullptr; DBL End, Step; - if (Parse_For_Param (&Identifier, &End, &Step)) + if (Parse_For_Param (Cond_Stack.back().Loop_Identifier, &End, &Step)) { // execute loop Cond_Stack.back().Cond_Type = FOR_COND; Cond_Stack.back().returnToBookmark = mTokenizer.GetHotBookmark(); - Cond_Stack.back().Loop_Identifier = Identifier; Cond_Stack.back().For_Loop_End = End; Cond_Stack.back().For_Loop_Step = Step; } @@ -1098,7 +1095,6 @@ void Parser::Parse_Directive(int After_Hash) Cond_Stack.back().Cond_Type = SKIP_TIL_END_COND; Skip_Tokens(SKIP_TIL_END_COND); // need to do some cleanup otherwise deferred via the Cond_Stack - POV_FREE(Identifier); } } EXIT @@ -1383,7 +1379,7 @@ void Parser::Parse_Directive(int After_Hash) } { - SYM_ENTRY* Entry = Find_Symbol(Table_Index, Cond_Stack.back().Loop_Identifier); + SYM_ENTRY* Entry = Find_Symbol(Table_Index, Cond_Stack.back().Loop_Identifier.c_str()); if ((Entry == nullptr) || (Entry->Token_Number != FLOAT_ID_TOKEN)) Error ("#for loop variable must remain defined and numerical during loop."); @@ -1396,7 +1392,6 @@ void Parser::Parse_Directive(int After_Hash) if ( ((Step > 0) && (*CurrentPtr > End + EPSILON)) || ((Step < 0) && (*CurrentPtr < End - EPSILON)) ) { - POV_FREE(Cond_Stack.back().Loop_Identifier); Cond_Stack.back().Cond_Type = SKIP_TIL_END_COND; Skip_Tokens(SKIP_TIL_END_COND); } @@ -3397,7 +3392,7 @@ bool Parser::Parse_Ifdef_Param () return retval; } -int Parser::Parse_For_Param (char** IdentifierPtr, DBL* EndPtr, DBL* StepPtr) +int Parser::Parse_For_Param (UTF8String& identifierName, DBL* EndPtr, DBL* StepPtr) { TokenId Previous = NOT_A_TOKEN; SYM_ENTRY *Temp_Entry = nullptr; @@ -3499,9 +3494,7 @@ int Parser::Parse_For_Param (char** IdentifierPtr, DBL* EndPtr, DBL* StepPtr) *mToken.DataPtr = reinterpret_cast(Create_Float()); DBL* CurrentPtr = (reinterpret_cast(*mToken.DataPtr)); - size_t len = strlen(mToken.raw.lexeme.text.c_str())+1; - *IdentifierPtr = reinterpret_cast(POV_MALLOC(len, "loop identifier")); - memcpy(*IdentifierPtr, mToken.raw.lexeme.text.c_str(), len); + identifierName = mToken.raw.lexeme.text; Parse_Comma(); *CurrentPtr = Parse_Float(); From ac88d0e7c76a4bf5ad3232f19175968cd1d91897 Mon Sep 17 00:00:00 2001 From: Christoph Lipka Date: Thu, 31 May 2018 16:35:59 +0200 Subject: [PATCH 2/2] Re-enable `#read` statement. --- source/core/coretypes.h | 21 ++- source/parser/fncode.cpp | 4 +- source/parser/parser.cpp | 10 +- source/parser/parser.h | 29 +++- source/parser/parser_expressions.cpp | 37 ----- source/parser/parser_tokenizer.cpp | 216 +++++++++++---------------- source/parser/parsertypes.cpp | 9 +- source/parser/parsertypes.h | 10 +- 8 files changed, 137 insertions(+), 199 deletions(-) diff --git a/source/core/coretypes.h b/source/core/coretypes.h index ac3e64d40..a5128d9e1 100644 --- a/source/core/coretypes.h +++ b/source/core/coretypes.h @@ -610,18 +610,27 @@ inline void intrusive_ptr_release(GenericFunctionContextFactory* f) { if (!(--f- typedef intrusive_ptr GenericFunctionContextFactoryIPtr; typedef GenericFunctionContextFactory* GenericFunctionContextFactoryTPtr; -struct SourceInfo : MessageContext +struct SourcePosition { - UCS2String fileName; POV_LONG line; POV_LONG column; POV_OFF_T offset; + SourcePosition() = default; + SourcePosition(const SourcePosition&) = default; + SourcePosition(POV_LONG l, POV_LONG c, POV_OFF_T o) : line(l), column(c), offset(o) {} +}; + +struct SourceInfo : MessageContext +{ + UCS2String fileName; + SourcePosition position; SourceInfo() = default; - SourceInfo(const MessageContext& o) : fileName(o.GetFileName()), line(o.GetLine()), column(o.GetColumn()), offset(o.GetOffset()) {} + SourceInfo(const MessageContext& o) : fileName(o.GetFileName()), position(o.GetLine(), o.GetColumn(), o.GetOffset()) {} + SourceInfo(const UCS2String& fn, SourcePosition& p) : fileName(fn), position(p) {} virtual UCS2String GetFileName() const override { return fileName; } - virtual POV_LONG GetLine() const override { return line; } - virtual POV_LONG GetColumn() const override { return column; } - virtual POV_OFF_T GetOffset() const override { return offset; } + virtual POV_LONG GetLine() const override { return position.line; } + virtual POV_LONG GetColumn() const override { return position.column; } + virtual POV_OFF_T GetOffset() const override { return position.offset; } }; struct CustomFunctionSourceInfo : SourceInfo diff --git a/source/parser/fncode.cpp b/source/parser/fncode.cpp index 9a12be709..0e67d2d2f 100644 --- a/source/parser/fncode.cpp +++ b/source/parser/fncode.cpp @@ -124,9 +124,7 @@ FNCode::FNCode(Parser *pa, FunctionCode *f, bool is_local, const char *n) else function->sourceInfo.name = POV_STRDUP(""); function->sourceInfo.fileName = parser->UCS2_strdup(parser->mToken.sourceFile->Name()); - function->sourceInfo.line = parser->mToken.raw.lexeme.position.line; - function->sourceInfo.column = parser->mToken.raw.lexeme.position.column; - function->sourceInfo.offset = parser->mToken.raw.lexeme.position.offset; + function->sourceInfo.position = parser->mToken.raw.lexeme.position; function->flags = 0; function->private_copy_method = nullptr; function->private_destroy_method = nullptr; diff --git a/source/parser/parser.cpp b/source/parser/parser.cpp index 378fb7c1f..01bd88314 100644 --- a/source/parser/parser.cpp +++ b/source/parser/parser.cpp @@ -9014,7 +9014,7 @@ bool Parser::Parse_RValue (TokenId Previous, TokenId *NumberPtr, void **DataPtr, void *Temp_Data; POV_PARAM *New_Par; bool Found=true; - int Temp_Count=3000000; + int Temp_Count=3000000; // TODO FIXME - magic value! bool Old_Ok=Ok_To_Declare; int Terms; bool function_identifier; @@ -9142,7 +9142,7 @@ bool Parser::Parse_RValue (TokenId Previous, TokenId *NumberPtr, void **DataPtr, // get the number of tokens found Temp_Count -= token_count; - // no tokens have been found or a fucntion call had no parameters in parenthesis + // no tokens have been found or a function call had no parameters in parenthesis if (!((Temp_Count==-1) || (Temp_Count==TOKEN_OVERFLOW_RESET_COUNT)) && had_callable_identifier) Error("Identifier expected, incomplete function call or spline call found instead."); @@ -9457,7 +9457,6 @@ void Parser::Destroy_Ident_Data(void *Data, int Type) { int i; POV_ARRAY *a; - DATA_FILE *Temp_File; if(Data == nullptr) return; @@ -9563,10 +9562,7 @@ void Parser::Destroy_Ident_Data(void *Data, int Type) POV_FREE(Data); break; case FILE_ID_TOKEN: - Temp_File = reinterpret_cast(Data); - Temp_File->In_File = nullptr; - Temp_File->Out_File = nullptr; - POV_FREE(Data); + delete reinterpret_cast(Data); break; case FUNCT_ID_TOKEN: case VECTFUNCT_ID_TOKEN: diff --git a/source/parser/parser.h b/source/parser/parser.h index 3e7c4ac87..2081ca0c3 100644 --- a/source/parser/parser.h +++ b/source/parser/parser.h @@ -305,10 +305,31 @@ class Parser : public SceneTask struct DATA_FILE { - shared_ptr In_File; + shared_ptr inTokenizer; + RawToken inToken; shared_ptr Out_File; + bool inUngetToken : 1; bool busyParsing : 1; ///< `true` if parsing a statement related to the file, `false` otherwise. - bool R_Flag : 1; + + bool ReadNextToken() + { + POV_PARSER_ASSERT(inTokenizer != nullptr); + if (!inUngetToken && !inTokenizer->GetNextToken(inToken)) + { + inToken.id = END_OF_FILE_TOKEN; + return false; + } + inUngetToken = false; + return true; + } + void UnReadToken() + { + POV_PARSER_ASSERT(!inUngetToken); + if (inToken.id != END_OF_FILE_TOKEN) + inUngetToken = true; + } + + DATA_FILE() : inUngetToken(false), busyParsing(false) {} }; // constructor @@ -471,7 +492,6 @@ class Parser : public SceneTask void Parse_Float_Param2 (DBL *Val1, DBL *Val2); void Init_Random_Generators (void); void Destroy_Random_Generators (void); - DBL Parse_Signed_Float(void); // function.h/function.cpp FUNCTION_PTR Parse_Function(void); @@ -586,8 +606,6 @@ class Parser : public SceneTask int line_count; - bool readingExternalFile; - vector maIncludeStack; struct CS_ENTRY @@ -738,6 +756,7 @@ class Parser : public SceneTask void Parse_Read(void); void Parse_Write(void); int Parse_Read_Value(DATA_FILE *User_File, TokenId Previous, TokenId *NumberPtr, void **DataPtr); + bool Parse_Read_Float_Value(DBL& val, DATA_FILE *User_File); void Check_Macro_Vers(void); DBL Parse_Cond_Param(void); void Parse_Cond_Param2(DBL *V1,DBL *V2); diff --git a/source/parser/parser_expressions.cpp b/source/parser/parser_expressions.cpp index bfa66352b..25b8d66da 100644 --- a/source/parser/parser_expressions.cpp +++ b/source/parser/parser_expressions.cpp @@ -3904,41 +3904,4 @@ void Parser::Destroy_Random_Generators() Number_Of_Random_Generators = 0; } -DBL Parser::Parse_Signed_Float(void) -{ - DBL Sign=1.0; - DBL Val=0.0; - bool old_allow_id = Allow_Identifier_In_Call; - Allow_Identifier_In_Call = false; - - EXPECT - CASE (PLUS_TOKEN) - END_CASE - - CASE (DASH_TOKEN) - Sign=-1.0; - Get_Token(); - // FALLTHROUGH - CASE (FLOAT_FUNCT_TOKEN) - if (mToken.Function_Id==FLOAT_TOKEN) - { - Val = Sign * mToken.Token_Float; - EXIT - } - else - { - Parse_Error(FLOAT_TOKEN); - } - END_CASE - - OTHERWISE - Parse_Error(FLOAT_TOKEN); - END_CASE - END_EXPECT - - Allow_Identifier_In_Call = old_allow_id; - - return(Val); -} - } diff --git a/source/parser/parser_tokenizer.cpp b/source/parser/parser_tokenizer.cpp index 6b880b782..74099beed 100644 --- a/source/parser/parser_tokenizer.cpp +++ b/source/parser/parser_tokenizer.cpp @@ -104,8 +104,6 @@ void Parser::Initialize_Tokenizer() mTokenizer.SetInputStream(rfile); mToken.sourceFile = mTokenizer.GetSource(); - readingExternalFile = false; - mHavePendingRawToken = false; Got_EOF = false; @@ -289,17 +287,6 @@ void Parser::Get_Token () if (!GetRawToken(mToken.raw, fastForwardToDirective)) { - if (readingExternalFile) - { - // In `#read` statement. - mToken.Token_Id = END_OF_FILE_TOKEN; - mToken.is_array_elem = false; - mToken.is_mixed_array_elem = false; - mToken.is_dictionary_elem = false; - mToken.End_Of_File = true; - return; - } - if (maIncludeStack.empty()) { if (Cond_Stack.size() != 1) @@ -333,13 +320,6 @@ void Parser::Get_Token () break; case FLOAT_TOKEN: - /* - POV_PARSER_ASSERT(dynamic_pointer_cast(mToken.raw.value) != nullptr); - if (dynamic_pointer_cast(mToken.raw.value) == nullptr) - /// @todo - return; - */ - //mToken.Token_Float = dynamic_pointer_cast(mToken.raw.value)->data; mToken.Token_Float = mToken.raw.floatValue; Write_Token(mToken.raw); break; @@ -2571,7 +2551,6 @@ void Parser::Invoke_Macro() /* Not in same file */ Cond_Stack.back().Macro_Same_Flag = false; Got_EOF=false; - POV_PARSER_ASSERT(!readingExternalFile); shared_ptr is; if (PMac->Cache) { @@ -2611,8 +2590,6 @@ void Parser::Return_From_Macro() { Check_Macro_Vers(); - POV_PARSER_ASSERT(!readingExternalFile); - Got_EOF=false; if (!mTokenizer.GoToBookmark(Cond_Stack.back().returnToBookmark)) @@ -2872,9 +2849,7 @@ void Parser::Parse_Fopen(void) UCS2String ign; SYM_ENTRY *Entry; - New=reinterpret_cast(POV_MALLOC(sizeof(DATA_FILE),"user file")); - New->In_File=nullptr; - New->Out_File=nullptr; + New = new DATA_FILE; // Safeguard against accidental nesting of other file access directives inside the `#fopen` // directive (or the user forgetting portions of the directive). @@ -2890,19 +2865,15 @@ void Parser::Parse_Fopen(void) EXPECT_ONE CASE(READ_TOKEN) - New->R_Flag = true; + New->inTokenizer = std::make_shared(); rfile = Locate_File(fileName.c_str(), POV_File_Text_User, ign, true); if (rfile != nullptr) - New->In_File = std::make_shared(fileName.c_str(), rfile.get()); + New->inTokenizer->SetInputStream(rfile); else - New->In_File = nullptr; - - if(New->In_File == nullptr) Error ("Cannot open user file %s (read).", UCS2toASCIIString(fileName).c_str()); END_CASE CASE(WRITE_TOKEN) - New->R_Flag = false; wfile = CreateFile(fileName.c_str(), POV_File_Text_User, false); if(wfile != nullptr) New->Out_File = std::make_shared(fileName.c_str(), wfile); @@ -2914,7 +2885,6 @@ void Parser::Parse_Fopen(void) END_CASE CASE(APPEND_TOKEN) - New->R_Flag = false; wfile = CreateFile(fileName.c_str(), POV_File_Text_User, true); if(wfile != nullptr) New->Out_File = std::make_shared(fileName.c_str(), wfile); @@ -2945,7 +2915,7 @@ void Parser::Parse_Fclose(void) // NB no need to set Data->busyParsing, as we're not reading any tokens where the // tokenizer might stumble upon nested file access directives Got_EOF=false; - Data->In_File = nullptr; + Data->inTokenizer = nullptr; Data->Out_File = nullptr; Remove_Symbol (SYM_TABLE_GLOBAL,mToken.raw.lexeme.text.c_str(),false,nullptr,0); END_CASE @@ -2973,7 +2943,7 @@ void Parser::Parse_Read() if (User_File->busyParsing) Error ("Can't nest directives accessing the same file."); File_Id=POV_STRDUP(mToken.raw.lexeme.text.c_str()); - if(User_File->In_File == nullptr) + if(User_File->inTokenizer == nullptr) Error("Cannot read from file %s because the file is open for writing only.", UCS2toASCIIString(UCS2String(User_File->Out_File->name())).c_str()); // Safeguard against accidental nesting of other file access directives inside the `#fopen` @@ -3051,7 +3021,7 @@ void Parser::Parse_Read() if (End_File) { Got_EOF=false; - User_File->In_File = nullptr; + User_File->inTokenizer = nullptr; Remove_Symbol (SYM_TABLE_GLOBAL,File_Id,false,nullptr,0); } POV_FREE(File_Id); @@ -3059,137 +3029,127 @@ void Parser::Parse_Read() int Parser::Parse_Read_Value(DATA_FILE *User_File, TokenId Previous, TokenId *NumberPtr, void **DataPtr) { - /// @todo Re-enable reading of external files. -#if 1 - return true; -#else - shared_ptr Temp; - bool Temp_R_Flag; - DBL Val; int End_File=false; - int i; + int numComponents; EXPRESS Express; + DBL sign = 1.0; + DBL val = 0.0; + bool ungetToken = false; - Temp = Input_File->inFile; - POV_PARSER_ASSERT(!readingExternalFile); - Input_File->inFile = User_File->In_File; - readingExternalFile = User_File->R_Flag; - if(User_File->In_File == nullptr) + if(User_File->inTokenizer == nullptr) Error("Cannot read from file '%s' because the file is open for writing only.", UCS2toASCIIString(UCS2String(User_File->Out_File->name())).c_str()); - POV_PARSER_ASSERT(readingExternalFile); - User_File->In_File = nullptr; // take control over pointer - try + if (User_File->ReadNextToken()) { - EXPECT_ONE - CASE3 (PLUS_TOKEN,DASH_TOKEN,FLOAT_FUNCT_TOKEN) - UNGET - Val=Parse_Signed_Float(); + switch (User_File->inToken.GetTokenId()) + { + case DASH_TOKEN: + case PLUS_TOKEN: + case FLOAT_TOKEN: + User_File->UnReadToken(); + if (!Parse_Read_Float_Value(val, User_File)) + POV_PARSER_ASSERT(false); *NumberPtr = FLOAT_ID_TOKEN; Test_Redefine(Previous,NumberPtr,*DataPtr); *DataPtr = reinterpret_cast(Create_Float()); - *(reinterpret_cast(*DataPtr)) = Val; - Parse_Comma(); /* data file comma between 2 data items */ - END_CASE - - CASE (LEFT_ANGLE_TOKEN) - UNGET - Parse_Angle_Begin(); - - i=1; - Express[X]=Parse_Signed_Float(); Parse_Comma(); - Express[Y]=Parse_Signed_Float(); Parse_Comma(); - - EXPECT - CASE3 (PLUS_TOKEN,DASH_TOKEN,FLOAT_FUNCT_TOKEN) - UNGET - if (++i>4) - { - Error("Vector data too long"); - } - Express[i]=Parse_Signed_Float(); Parse_Comma(); - END_CASE - - CASE (RIGHT_ANGLE_TOKEN) - UNGET - EXIT - END_CASE - - OTHERWISE - Expectation_Error("vector"); - END_CASE - END_EXPECT + *(reinterpret_cast(*DataPtr)) = val; + break; - Parse_Angle_End(); + case LEFT_ANGLE_TOKEN: + numComponents = 0; + while (Parse_Read_Float_Value(val, User_File)) + { + if (numComponents == 5) + Error(SourceInfo(User_File->inTokenizer->GetSourceName(), User_File->inToken.lexeme.position), "Too many components in vector."); + Express[numComponents++] = val; + if (!User_File->ReadNextToken()) + Error(SourceInfo(User_File->inTokenizer->GetSourceName(), User_File->inToken.lexeme.position), "Incomplete vector."); + if (User_File->inToken.GetTokenId() != COMMA_TOKEN) + User_File->UnReadToken(); + } + if (!User_File->ReadNextToken() || (User_File->inToken.GetTokenId() != RIGHT_ANGLE_TOKEN)) + Error(SourceInfo(User_File->inTokenizer->GetSourceName(), User_File->inToken.lexeme.position), "Expected vector component or '>'."); + if (numComponents < 2) + Error(SourceInfo(User_File->inTokenizer->GetSourceName(), User_File->inToken.lexeme.position), "Not enough components in vector."); - switch(i) + switch (numComponents) { - case 1: + case 2: *NumberPtr = UV_ID_TOKEN; Test_Redefine(Previous,NumberPtr,*DataPtr); *DataPtr = reinterpret_cast(new Vector2d(Express)); break; - case 2: + case 3: *NumberPtr = VECTOR_ID_TOKEN; Test_Redefine(Previous,NumberPtr,*DataPtr); *DataPtr = reinterpret_cast(new Vector3d(Express)); break; - case 3: + case 4: *NumberPtr = VECTOR_4D_ID_TOKEN; Test_Redefine(Previous,NumberPtr,*DataPtr); *DataPtr = reinterpret_cast(Create_Vector_4D()); Assign_Vector_4D(reinterpret_cast(*DataPtr), Express); break; - case 4: - *NumberPtr = COLOUR_ID_TOKEN; + case 5: + *NumberPtr = COLOUR_ID_TOKEN; Test_Redefine(Previous,NumberPtr,*DataPtr); - *DataPtr = reinterpret_cast(Create_Colour()); + *DataPtr = reinterpret_cast(Create_Colour()); (*reinterpret_cast(*DataPtr)).Set(Express, 5); /* NK fix assign_colour bug */ break; - } - Parse_Comma(); // data file comma between 2 data items - END_CASE + default: + POV_PARSER_ASSERT(false); + break; + } + break; - CASE(STRING_LITERAL_TOKEN) + case STRING_LITERAL_TOKEN: *NumberPtr = STRING_ID_TOKEN; Test_Redefine(Previous,NumberPtr,*DataPtr); - *DataPtr = String_Literal_To_UCS2(mToken.raw.lexeme.text.c_str(), false); - Parse_Comma(); // data file comma between 2 data items - END_CASE + POV_PARSER_ASSERT(dynamic_pointer_cast(User_File->inToken.value) != nullptr); + *DataPtr = UCS2_strdup(dynamic_pointer_cast(User_File->inToken.value)->GetData().c_str()); + break; - CASE (END_OF_FILE_TOKEN) - END_CASE + default: + Error(SourceInfo(User_File->inTokenizer->GetSourceName(), User_File->inToken.lexeme.position), "Expected float, vector, or string literal"); + break; + } - OTHERWISE - Expectation_Error ("float, vector, or string literal"); - END_CASE - END_EXPECT - } - catch (...) - { - // re-assign the file pointers so that they are properly disposed of later on - User_File->In_File = Input_File->inFile; - Input_File->inFile = Temp; - readingExternalFile = false; - throw; + if (User_File->ReadNextToken() && (User_File->inToken.GetTokenId() != COMMA_TOKEN)) + User_File->UnReadToken(); } - if (mToken.Token_Id==END_OF_FILE_TOKEN) - End_File = true; + /// @todo Returning `true` in case of end-of-file is counter-intuitive. + return (User_File->inToken.id == END_OF_FILE_TOKEN); +} - mToken.End_Of_File = false; - mToken.Unget_Token = false; - Got_EOF = false; - User_File->In_File = Input_File->inFile; // return control over pointer - Input_File->inFile = Temp; - readingExternalFile = false; +bool Parser::Parse_Read_Float_Value(DBL& val, DATA_FILE* User_File) +{ + DBL sign = 1.0; - return End_File; -#endif + if (!User_File->ReadNextToken()) + return false; + + switch (User_File->inToken.GetTokenId()) + { + case DASH_TOKEN: + sign = -1.0; + // FALLTHROUGH + case PLUS_TOKEN: + if (!User_File->ReadNextToken() || (User_File->inToken.GetTokenId() != FLOAT_TOKEN)) + Error(SourceInfo(User_File->inTokenizer->GetSourceName(), User_File->inToken.lexeme.position), "Expected float literal"); + // FALLTHROUGH + case FLOAT_TOKEN: + val = sign * User_File->inToken.floatValue; + return true; + + default: + User_File->UnReadToken(); + return false; + } } void Parser::Parse_Write(void) @@ -3207,7 +3167,7 @@ void Parser::Parse_Write(void) if (User_File->busyParsing) Error ("Can't nest directives accessing the same file."); if(User_File->Out_File == nullptr) - Error("Cannot write to file %s because the file is open for reading only.", UCS2toASCIIString(UCS2String(User_File->In_File->name())).c_str()); + Error("Cannot write to file %s because the file is open for reading only.", UCS2toASCIIString(User_File->inTokenizer->GetSourceName()).c_str()); // Safeguard against accidental nesting of other file access directives inside the `#fopen` // directive (or the user forgetting portions of the directive). @@ -3545,8 +3505,6 @@ void Parser::IncludeHeader(const UCS2String& formalFileName) mTokenizer.SetInputStream(is); - POV_PARSER_ASSERT(!readingExternalFile); - Add_Sym_Table(); mToken.sourceFile = mTokenizer.GetSource(); diff --git a/source/parser/parsertypes.cpp b/source/parser/parsertypes.cpp index a1a5ea636..8a7ba6ed9 100644 --- a/source/parser/parsertypes.cpp +++ b/source/parser/parsertypes.cpp @@ -53,6 +53,9 @@ namespace pov_parser { +LexemePosition::LexemePosition() : SourcePosition(1, 1, 0) +{} + bool LexemePosition::operator==(const LexemePosition& o) const { bool result = (offset == o.offset); @@ -60,12 +63,6 @@ bool LexemePosition::operator==(const LexemePosition& o) const return result; } -LexemePosition::LexemePosition() : - line(1), - column(1), - offset(0) -{} - POV_OFF_T LexemePosition::operator-(const LexemePosition& o) const { return offset - o.offset; diff --git a/source/parser/parsertypes.h b/source/parser/parsertypes.h index 21b57f652..d195480bf 100644 --- a/source/parser/parsertypes.h +++ b/source/parser/parsertypes.h @@ -48,6 +48,9 @@ #include "base/types.h" #include "base/messenger.h" +// POV-Ray header files (core module) +#include "core/coretypes.h" + namespace pov_base { class IStream; @@ -67,14 +70,9 @@ using ConstSourcePtr = shared_ptr; //------------------------------------------------------------------------------ -struct LexemePosition +struct LexemePosition : pov::SourcePosition { - POV_LONG line; - POV_LONG column; - POV_OFF_T offset; - LexemePosition(); - LexemePosition(const LexemePosition& o) = default; bool operator==(const LexemePosition& o) const; POV_OFF_T operator-(const LexemePosition& o) const; };