diff --git a/internal/bundler_tests/bundler_css_test.go b/internal/bundler_tests/bundler_css_test.go index e031e458515..a3c98e78a68 100644 --- a/internal/bundler_tests/bundler_css_test.go +++ b/internal/bundler_tests/bundler_css_test.go @@ -1464,3 +1464,40 @@ entry.js: WARNING: Import "foo" will always be undefined because there is no mat `, }) } + +func TestCSSMalformedAtImport(t *testing.T) { + css_suite.expectBundled(t, bundled{ + files: map[string]string{ + "/entry.css": ` + @import "./url-token-eof.css"; + @import "./url-token-whitespace-eof.css"; + @import "./function-token-eof.css"; + @import "./function-token-whitespace-eof.css"; + `, + "/url-token-eof.css": `@import url(https://example.com/url-token-eof.css`, + "/url-token-whitespace-eof.css": ` + @import url(https://example.com/url-token-whitespace-eof.css + `, + "/function-token-eof.css": `@import url("https://example.com/function-token-eof.css"`, + "/function-token-whitespace-eof.css": ` + @import url("https://example.com/function-token-whitespace-eof.css" + `, + }, + entryPaths: []string{"/entry.css"}, + options: config.Options{ + Mode: config.ModeBundle, + AbsOutputDir: "/out", + }, + expectedScanLog: `function-token-eof.css: WARNING: Expected ")" to go with "(" +function-token-eof.css: NOTE: The unbalanced "(" is here: +function-token-whitespace-eof.css: WARNING: Expected ")" to go with "(" +function-token-whitespace-eof.css: NOTE: The unbalanced "(" is here: +url-token-eof.css: WARNING: Expected ")" to end URL token +url-token-eof.css: NOTE: The unbalanced "(" is here: +url-token-eof.css: WARNING: Expected ";" but found end of file +url-token-whitespace-eof.css: WARNING: Expected ")" to end URL token +url-token-whitespace-eof.css: NOTE: The unbalanced "(" is here: +url-token-whitespace-eof.css: WARNING: Expected ";" but found end of file +`, + }) +} diff --git a/internal/bundler_tests/snapshots/snapshots_css.txt b/internal/bundler_tests/snapshots/snapshots_css.txt index cb4d4fffcec..3a7b2967b22 100644 --- a/internal/bundler_tests/snapshots/snapshots_css.txt +++ b/internal/bundler_tests/snapshots/snapshots_css.txt @@ -178,6 +178,24 @@ console.log(void 0); color: red; } +================================================================================ +TestCSSMalformedAtImport +---------- /out/entry.css ---------- +@import "https://example.com/url-token-eof.css"; +@import "https://example.com/url-token-whitespace-eof.css"; +@import "https://example.com/function-token-eof.css"; +@import "https://example.com/function-token-whitespace-eof.css"; + +/* url-token-eof.css */ + +/* url-token-whitespace-eof.css */ + +/* function-token-eof.css */ + +/* function-token-whitespace-eof.css */ + +/* entry.css */ + ================================================================================ TestCSSNestingOldBrowser ---------- /out/two-type-selectors.css ---------- diff --git a/internal/css_lexer/css_lexer.go b/internal/css_lexer/css_lexer.go index c2702ab229c..b63ff733def 100644 --- a/internal/css_lexer/css_lexer.go +++ b/internal/css_lexer/css_lexer.go @@ -148,7 +148,12 @@ func (token Token) DecodedText(contents string) string { case TURL: start := 4 - end := len(raw) - 1 + end := len(raw) + + // Note: URL tokens with syntax errors may not have a trailing ")" + if raw[end-1] == ')' { + end-- + } // Trim leading and trailing whitespace for start < end && isWhitespace(rune(raw[start])) { @@ -753,6 +758,7 @@ func (lexer *lexer) consumeIdentLike() T { name := lexer.consumeName() if lexer.codePoint == '(' { + matchingLoc := logger.Loc{Start: lexer.Token.Range.End()} lexer.step() if len(name) == 3 { u, r, l := name[0], name[1], name[2] @@ -761,7 +767,7 @@ func (lexer *lexer) consumeIdentLike() T { lexer.step() } if lexer.codePoint != '"' && lexer.codePoint != '\'' { - return lexer.consumeURL() + return lexer.consumeURL(matchingLoc) } } } @@ -771,7 +777,7 @@ func (lexer *lexer) consumeIdentLike() T { return TIdent } -func (lexer *lexer) consumeURL() T { +func (lexer *lexer) consumeURL(matchingLoc logger.Loc) T { validURL: for { switch lexer.codePoint { @@ -781,8 +787,9 @@ validURL: case eof: loc := logger.Loc{Start: lexer.Token.Range.End()} - lexer.log.AddError(&lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token") - return TBadURL + lexer.log.AddIDWithNotes(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token", + []logger.MsgData{lexer.tracker.MsgData(logger.Range{Loc: matchingLoc, Len: 1}, "The unbalanced \"(\" is here:")}) + return TURL case ' ', '\t', '\n', '\r', '\f': lexer.step() @@ -791,7 +798,11 @@ validURL: } if lexer.codePoint != ')' { loc := logger.Loc{Start: lexer.Token.Range.End()} - lexer.log.AddError(&lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token") + lexer.log.AddIDWithNotes(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, logger.Range{Loc: loc}, "Expected \")\" to end URL token", + []logger.MsgData{lexer.tracker.MsgData(logger.Range{Loc: matchingLoc, Len: 1}, "The unbalanced \"(\" is here:")}) + if lexer.codePoint == eof { + return TURL + } break validURL } lexer.step() @@ -799,13 +810,14 @@ validURL: case '"', '\'', '(': r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1} - lexer.log.AddError(&lexer.tracker, r, "Expected \")\" to end URL token") + lexer.log.AddIDWithNotes(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, r, "Expected \")\" to end URL token", + []logger.MsgData{lexer.tracker.MsgData(logger.Range{Loc: matchingLoc, Len: 1}, "The unbalanced \"(\" is here:")}) break validURL case '\\': if !lexer.isValidEscape() { r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1} - lexer.log.AddError(&lexer.tracker, r, "Invalid escape") + lexer.log.AddID(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, r, "Invalid escape") break validURL } lexer.consumeEscape() @@ -813,7 +825,8 @@ validURL: default: if isNonPrintable(lexer.codePoint) { r := logger.Range{Loc: logger.Loc{Start: lexer.Token.Range.End()}, Len: 1} - lexer.log.AddError(&lexer.tracker, r, "Unexpected non-printable character in URL token") + lexer.log.AddID(logger.MsgID_CSS_CSSSyntaxError, logger.Warning, &lexer.tracker, r, "Unexpected non-printable character in URL token") + break validURL } lexer.step() } diff --git a/internal/css_parser/css_parser.go b/internal/css_parser/css_parser.go index 6a29f4340c4..94fedf8126d 100644 --- a/internal/css_parser/css_parser.go +++ b/internal/css_parser/css_parser.go @@ -911,7 +911,8 @@ func (p *parser) parseURLOrString() (string, logger.Range, bool) { p.advance() t = p.current() text := p.decoded() - if p.expect(css_lexer.TString) && p.expectWithMatchingLoc(css_lexer.TCloseParen, matchingLoc) { + if p.expect(css_lexer.TString) { + p.expectWithMatchingLoc(css_lexer.TCloseParen, matchingLoc) return text, t.Range, true } } diff --git a/internal/css_parser/css_parser_test.go b/internal/css_parser/css_parser_test.go index d8abcb577f4..dc3f6d40547 100644 --- a/internal/css_parser/css_parser_test.go +++ b/internal/css_parser/css_parser_test.go @@ -1287,10 +1287,10 @@ func TestAtImport(t *testing.T) { expectPrinted(t, "@import;", "@import;\n", ": WARNING: Expected URL token but found \";\"\n") expectPrinted(t, "@import ;", "@import;\n", ": WARNING: Expected URL token but found \";\"\n") expectPrinted(t, "@import \"foo.css\"", "@import \"foo.css\";\n", ": WARNING: Expected \";\" but found end of file\n") - expectPrinted(t, "@import url(\"foo.css\";", "@import url(foo.css);\n", ": WARNING: Expected \")\" to go with \"(\"\n: NOTE: The unbalanced \"(\" is here:\n") + expectPrinted(t, "@import url(\"foo.css\";", "@import \"foo.css\";\n", ": WARNING: Expected \")\" to go with \"(\"\n: NOTE: The unbalanced \"(\" is here:\n") expectPrinted(t, "@import noturl(\"foo.css\");", "@import noturl(\"foo.css\");\n", ": WARNING: Expected URL token but found \"noturl(\"\n") - expectPrinted(t, "@import url(", "@import url(;\n", `: WARNING: Expected URL token but found bad URL token -: ERROR: Expected ")" to end URL token + expectPrinted(t, "@import url(foo.css", "@import \"foo.css\";\n", `: WARNING: Expected ")" to end URL token +: NOTE: The unbalanced "(" is here: : WARNING: Expected ";" but found end of file `) @@ -2222,10 +2222,14 @@ func TestParseErrorRecovery(t *testing.T) { expectPrinted(t, "x { y: {", "x {\n y: {};\n}\n", ": WARNING: Expected \"}\" to go with \"{\"\n: NOTE: The unbalanced \"{\" is here:\n") expectPrinted(t, "x { y: z(", "x {\n y: z();\n}\n", ": WARNING: Expected \")\" to go with \"(\"\n: NOTE: The unbalanced \"(\" is here:\n") expectPrinted(t, "x { y: z(abc", "x {\n y: z(abc);\n}\n", ": WARNING: Expected \")\" to go with \"(\"\n: NOTE: The unbalanced \"(\" is here:\n") - expectPrinted(t, "x { y: url(", "x {\n y: url(;\n}\n", - ": ERROR: Expected \")\" to end URL token\n: WARNING: Expected \"}\" to go with \"{\"\n: NOTE: The unbalanced \"{\" is here:\n") - expectPrinted(t, "x { y: url(abc", "x {\n y: url(abc;\n}\n", - ": ERROR: Expected \")\" to end URL token\n: WARNING: Expected \"}\" to go with \"{\"\n: NOTE: The unbalanced \"{\" is here:\n") + expectPrinted(t, "x { y: url(", "x {\n y: url();\n}\n", + ": WARNING: Expected \")\" to end URL token\n: NOTE: The unbalanced \"(\" is here:\n: WARNING: Expected \"}\" to go with \"{\"\n: NOTE: The unbalanced \"{\" is here:\n") + expectPrinted(t, "x { y: url(abc", "x {\n y: url(abc);\n}\n", + ": WARNING: Expected \")\" to end URL token\n: NOTE: The unbalanced \"(\" is here:\n: WARNING: Expected \"}\" to go with \"{\"\n: NOTE: The unbalanced \"{\" is here:\n") + expectPrinted(t, "x { y: url(; }", "x {\n y: url(; };\n}\n", + ": WARNING: Expected \")\" to end URL token\n: NOTE: The unbalanced \"(\" is here:\n: WARNING: Expected \"}\" to go with \"{\"\n: NOTE: The unbalanced \"{\" is here:\n") + expectPrinted(t, "x { y: url(abc;", "x {\n y: url(abc;);\n}\n", + ": WARNING: Expected \")\" to end URL token\n: NOTE: The unbalanced \"(\" is here:\n: WARNING: Expected \"}\" to go with \"{\"\n: NOTE: The unbalanced \"{\" is here:\n") expectPrinted(t, "/* @license */ x {} /* @preserve", "/* @license */\nx {\n}\n", ": ERROR: Expected \"*/\" to terminate multi-line comment\n: NOTE: The multi-line comment starts here:\n") expectPrinted(t, "a { b: c; d: 'e\n f: g; h: i }", "a {\n b: c;\n d: 'e\n f: g;\n h: i;\n}\n", ": WARNING: Unterminated string token\n") diff --git a/internal/resolver/resolver.go b/internal/resolver/resolver.go index 45ba7d5b05c..99698321e94 100644 --- a/internal/resolver/resolver.go +++ b/internal/resolver/resolver.go @@ -829,7 +829,7 @@ func (r resolverQuery) resolveWithoutSymlinks(sourceDir string, sourceDirInfo *d // Check both relative and package paths for CSS URL tokens, with relative // paths taking precedence over package paths to match Webpack behavior. isPackagePath := IsPackagePath(importPath) - checkRelative := !isPackagePath || r.kind == ast.ImportURL || r.kind == ast.ImportAt + checkRelative := !isPackagePath || r.kind == ast.ImportURL || r.kind == ast.ImportAt || r.kind == ast.ImportAtConditional checkPackage := isPackagePath if checkRelative {