diff --git a/lib/modules/manager/gradle/common.ts b/lib/modules/manager/gradle/common.ts deleted file mode 100644 index 054cd5f63e9578..00000000000000 --- a/lib/modules/manager/gradle/common.ts +++ /dev/null @@ -1,52 +0,0 @@ -export { MAVEN_REPO } from '../../datasource/maven/common'; - -export const JCENTER_REPO = 'https://jcenter.bintray.com/'; -export const GOOGLE_REPO = 'https://dl.google.com/android/maven2/'; -export const GRADLE_PLUGIN_PORTAL_REPO = 'https://plugins.gradle.org/m2/'; - -// TODO: convert to types -// eslint-disable-next-line typescript-enum/no-enum -export enum TokenType { - Space = 'space', - LineComment = 'lineComment', - MultiComment = 'multiComment', - Newline = 'newline', - - Semicolon = 'semicolon', - Colon = 'colon', - Dot = 'dot', - Comma = 'comma', - Operator = 'operator', - - Assignment = 'assignment', - - Word = 'word', - - LeftParen = 'leftParen', - RightParen = 'rightParen', - - LeftBracket = 'leftBracket', - RightBracket = 'rightBracket', - - LeftBrace = 'leftBrace', - RightBrace = 'rightBrace', - - SingleQuotedStart = 'singleQuotedStart', - SingleQuotedFinish = 'singleQuotedFinish', - - DoubleQuotedStart = 'doubleQuotedStart', - StringInterpolation = 'interpolation', - IgnoredInterpolationStart = 'ignoredInterpolation', - Variable = 'variable', - DoubleQuotedFinish = 'doubleQuotedFinish', - - TripleSingleQuotedStart = 'tripleQuotedStart', - TripleDoubleQuotedStart = 'tripleDoubleQuotedStart', - TripleQuotedFinish = 'tripleQuotedFinish', - - Chars = 'chars', - EscapedChar = 'escapedChar', - String = 'string', - - UnknownFragment = 'unknownFragment', -} diff --git a/lib/modules/manager/gradle/parser.spec.ts b/lib/modules/manager/gradle/parser.spec.ts index 2bfc8ce166a8a8..194333c029a7cf 100644 --- a/lib/modules/manager/gradle/parser.spec.ts +++ b/lib/modules/manager/gradle/parser.spec.ts @@ -1,12 +1,7 @@ import { Fixtures } from '../../../../test/fixtures'; import { fs, logger } from '../../../../test/util'; -import { - GOOGLE_REPO, - GRADLE_PLUGIN_PORTAL_REPO, - JCENTER_REPO, - MAVEN_REPO, -} from './common'; import { parseGradle, parseProps } from './parser'; +import { REGISTRY_URLS } from './parser/common'; jest.mock('../../../util/fs'); @@ -140,11 +135,11 @@ describe('modules/manager/gradle/parser', () => { describe('predefined registries', () => { test.each` input | output - ${'mavenCentral()'} | ${MAVEN_REPO} - ${'google()'} | ${GOOGLE_REPO} - ${'google { content { includeGroup "foo" } }'} | ${GOOGLE_REPO} - ${'gradlePluginPortal()'} | ${GRADLE_PLUGIN_PORTAL_REPO} - ${'jcenter()'} | ${JCENTER_REPO} + ${'mavenCentral()'} | ${REGISTRY_URLS.mavenCentral} + ${'google()'} | ${REGISTRY_URLS.google} + ${'google { content { includeGroup "foo" } }'} | ${REGISTRY_URLS.google} + ${'gradlePluginPortal()'} | ${REGISTRY_URLS.gradlePluginPortal} + ${'jcenter()'} | ${REGISTRY_URLS.jcenter} `('$input', ({ input, output }) => { const { urls } = parseGradle(input); expect(urls).toStrictEqual([output].filter(Boolean)); diff --git a/lib/modules/manager/gradle/tokenizer.spec.ts b/lib/modules/manager/gradle/tokenizer.spec.ts deleted file mode 100644 index 1095e7f6596083..00000000000000 --- a/lib/modules/manager/gradle/tokenizer.spec.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { TokenType } from './common'; -import { extractRawTokens, tokenize } from './tokenizer'; - -function tokenTypes(input: string): string[] { - return extractRawTokens(input).map((token) => token.type); -} - -describe('modules/manager/gradle/tokenizer', () => { - it('extractTokens', () => { - const samples = { - ' ': [TokenType.Space], - '\t': [TokenType.Space], - '\r': [TokenType.Space], - '\t\r': [TokenType.Space], - '\r\t': [TokenType.Space], - '// foobar': [TokenType.LineComment], - '/* foobar */': [TokenType.MultiComment], - '/* foo *//* bar */': [TokenType.MultiComment, TokenType.MultiComment], - '/* foo\nbar\nbaz */': [TokenType.MultiComment], - '/* foo\r\nbar\r\nbaz */': [TokenType.MultiComment], - '\n\n': [TokenType.Newline, TokenType.Newline], - ':': [TokenType.Colon], - ';': [TokenType.Semicolon], - '.': [TokenType.Dot], - '==': [TokenType.Operator], - '=': [TokenType.Assignment], - foo: [TokenType.Word], - 'foo.bar': [TokenType.Word, TokenType.Dot, TokenType.Word], - 'foo()': [TokenType.Word, TokenType.LeftParen, TokenType.RightParen], - 'foo[]': [TokenType.Word, TokenType.LeftBracket, TokenType.RightBracket], - '{{}}': [ - TokenType.LeftBrace, - TokenType.LeftBrace, - TokenType.RightBrace, - TokenType.RightBrace, - ], - '@': [TokenType.UnknownFragment], - "'\\''": [ - TokenType.SingleQuotedStart, - TokenType.EscapedChar, - TokenType.SingleQuotedFinish, - ], - "'\\\"'": [ - TokenType.SingleQuotedStart, - TokenType.EscapedChar, - TokenType.SingleQuotedFinish, - ], - "'\\'\\\"'": [ - TokenType.SingleQuotedStart, - TokenType.EscapedChar, - TokenType.EscapedChar, - TokenType.SingleQuotedFinish, - ], - "'x'": [ - TokenType.SingleQuotedStart, - TokenType.Chars, - TokenType.SingleQuotedFinish, - ], - "'\n'": [ - TokenType.SingleQuotedStart, - TokenType.Chars, - TokenType.SingleQuotedFinish, - ], - "'$x'": [ - TokenType.SingleQuotedStart, - TokenType.Chars, - TokenType.SingleQuotedFinish, - ], - "''''''": ['tripleQuotedStart', 'tripleQuotedFinish'], - "'''x'''": ['tripleQuotedStart', TokenType.Chars, 'tripleQuotedFinish'], - "'''\n'''": ['tripleQuotedStart', TokenType.Chars, 'tripleQuotedFinish'], - "'''\\''''": [ - 'tripleQuotedStart', - TokenType.EscapedChar, - 'tripleQuotedFinish', - ], - "'''\\\"'''": [ - 'tripleQuotedStart', - TokenType.EscapedChar, - 'tripleQuotedFinish', - ], - "'''\\'\\\"'''": [ - 'tripleQuotedStart', - TokenType.EscapedChar, - TokenType.EscapedChar, - 'tripleQuotedFinish', - ], - '""': [TokenType.DoubleQuotedStart, TokenType.DoubleQuotedFinish], - '"\\""': [ - TokenType.DoubleQuotedStart, - TokenType.EscapedChar, - TokenType.DoubleQuotedFinish, - ], - '"\\\'"': [ - TokenType.DoubleQuotedStart, - TokenType.EscapedChar, - TokenType.DoubleQuotedFinish, - ], - '"\\"\\\'"': [ - TokenType.DoubleQuotedStart, - TokenType.EscapedChar, - TokenType.EscapedChar, - TokenType.DoubleQuotedFinish, - ], - '"x"': [ - TokenType.DoubleQuotedStart, - TokenType.Chars, - TokenType.DoubleQuotedFinish, - ], - '"\n"': [ - TokenType.DoubleQuotedStart, - TokenType.Chars, - TokenType.DoubleQuotedFinish, - ], - - '"${x}"': [ - TokenType.DoubleQuotedStart, - TokenType.Variable, - TokenType.DoubleQuotedFinish, - ], - - '"${foo}"': [ - TokenType.DoubleQuotedStart, - TokenType.Variable, - TokenType.DoubleQuotedFinish, - ], - - '"${x()}"': [ - TokenType.DoubleQuotedStart, - TokenType.IgnoredInterpolationStart, - TokenType.UnknownFragment, - TokenType.RightBrace, - TokenType.DoubleQuotedFinish, - ], - - '"${x{}}"': [ - TokenType.DoubleQuotedStart, - TokenType.IgnoredInterpolationStart, - TokenType.UnknownFragment, - TokenType.LeftBrace, - TokenType.RightBrace, - TokenType.RightBrace, - TokenType.DoubleQuotedFinish, - ], - }; - for (const [str, result] of Object.entries(samples)) { - expect(tokenTypes(str)).toStrictEqual(result); - } - }); - - it('tokenize', () => { - const samples = { - '@': [{ type: TokenType.UnknownFragment }], - '@@@': [{ type: TokenType.UnknownFragment }], - "'foobar'": [{ type: TokenType.String, value: 'foobar' }], - "'\\b'": [{ type: TokenType.String, value: '\b' }], - "'''foobar'''": [{ type: TokenType.String, value: 'foobar' }], - '"foobar"': [{ type: TokenType.String, value: 'foobar' }], - '"$foo"': [ - { - type: TokenType.StringInterpolation, - children: [{ type: TokenType.Variable }], - }, - ], - - '" foo ${ bar } baz "': [ - { - type: TokenType.StringInterpolation, - children: [ - { type: TokenType.String, value: ' foo ' }, - { type: TokenType.Variable, value: 'bar' }, - { type: TokenType.String, value: ' baz ' }, - ], - }, - ], - - '"${ x + y }"': [{ type: TokenType.StringInterpolation, isValid: false }], - }; - for (const [str, result] of Object.entries(samples)) { - expect(tokenize(str)).toMatchObject(result); - } - }); -}); diff --git a/lib/modules/manager/gradle/tokenizer.ts b/lib/modules/manager/gradle/tokenizer.ts deleted file mode 100644 index 4a6a19e909dd6a..00000000000000 --- a/lib/modules/manager/gradle/tokenizer.ts +++ /dev/null @@ -1,219 +0,0 @@ -import moo from 'moo'; -import { regEx } from '../../../util/regex'; -import { TokenType } from './common'; -import type { StringInterpolation, Token } from './types'; - -const escapedCharRegex = /\\['"bfnrt\\]/; // TODO #12870 -const escapedChars = { - [TokenType.EscapedChar]: { - match: escapedCharRegex, - value: (x: string): string => - /* istanbul ignore next */ - ({ - "\\'": "'", - '\\"': '"', - '\\b': '\b', - '\\f': '\f', - '\\n': '\n', - '\\r': '\r', - '\\t': '\t', - '\\\\': '\\', - }[x] ?? x), - }, -}; - -const lexer = moo.states({ - // Top-level Groovy lexemes - main: { - [TokenType.LineComment]: { match: /\/\/.*?$/ }, // TODO #12870 - [TokenType.MultiComment]: { match: /\/\*[^]*?\*\//, lineBreaks: true }, // TODO #12870 - [TokenType.Newline]: { match: /\r?\n/, lineBreaks: true }, // TODO #12870 - [TokenType.Space]: { match: /[ \t\r]+/ }, // TODO #12870 - [TokenType.Semicolon]: ';', - [TokenType.Colon]: ':', - [TokenType.Dot]: '.', - [TokenType.Comma]: ',', - [TokenType.Operator]: /(?:==|\+=?|-=?|\/=?|\*\*?|\.+|:)/, // TODO #12870 - [TokenType.Assignment]: '=', - [TokenType.Word]: { match: /[a-zA-Z$_][a-zA-Z0-9$_]*/ }, // TODO #12870 - [TokenType.LeftParen]: { match: '(' }, - [TokenType.RightParen]: { match: ')' }, - [TokenType.LeftBracket]: { match: '[' }, - [TokenType.RightBracket]: { match: ']' }, - [TokenType.LeftBrace]: { match: '{', push: 'main' }, - [TokenType.RightBrace]: { match: '}', pop: 1 }, - [TokenType.TripleSingleQuotedStart]: { - match: "'''", - push: TokenType.TripleSingleQuotedStart, - }, - [TokenType.TripleDoubleQuotedStart]: { - match: '"""', - push: TokenType.TripleDoubleQuotedStart, - }, - [TokenType.SingleQuotedStart]: { - match: "'", - push: TokenType.SingleQuotedStart, - }, - [TokenType.DoubleQuotedStart]: { - match: '"', - push: TokenType.DoubleQuotedStart, - }, - [TokenType.UnknownFragment]: moo.fallback, - }, - - // Tokenize triple-quoted string literal characters - [TokenType.TripleSingleQuotedStart]: { - ...escapedChars, - [TokenType.TripleQuotedFinish]: { match: "'''", pop: 1 }, - [TokenType.Chars]: moo.fallback, - }, - [TokenType.TripleDoubleQuotedStart]: { - ...escapedChars, - [TokenType.TripleQuotedFinish]: { match: '"""', pop: 1 }, - [TokenType.Chars]: moo.fallback, - }, - - // Tokenize single-quoted string literal characters - [TokenType.SingleQuotedStart]: { - ...escapedChars, - [TokenType.SingleQuotedFinish]: { match: "'", pop: 1 }, - [TokenType.Chars]: moo.fallback, - }, - - // Tokenize double-quoted string literal chars and interpolations - [TokenType.DoubleQuotedStart]: { - ...escapedChars, - [TokenType.DoubleQuotedFinish]: { match: '"', pop: 1 }, - variable: { - // Supported: ${foo}, $foo, ${ foo.bar.baz }, $foo.bar.baz - match: - /\${\s*[a-zA-Z_][a-zA-Z0-9_]*(?:\s*\.\s*[a-zA-Z_][a-zA-Z0-9_]*)*\s*}|\$[a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z_][a-zA-Z0-9_]*)*/, // TODO #12870 - value: (x: string): string => - x.replace(regEx(/^\${?\s*/), '').replace(regEx(/\s*}$/), ''), - }, - [TokenType.IgnoredInterpolationStart]: { - match: /\${/, // TODO #12870 - push: TokenType.IgnoredInterpolationStart, - }, - [TokenType.Chars]: moo.fallback, - }, - - // Ignore interpolation of complex expressions˙, - // but track the balance of braces to find the end of interpolation. - [TokenType.IgnoredInterpolationStart]: { - [TokenType.LeftBrace]: { - match: '{', - push: TokenType.IgnoredInterpolationStart, - }, - [TokenType.RightBrace]: { match: '}', pop: 1 }, - [TokenType.UnknownFragment]: moo.fallback, - }, -}); - -// -// Turn substrings of chars and escaped chars into single String token -// -function processChars(acc: Token[], token: Token): Token[] { - const tokenType = token.type; - const prevToken: Token = acc[acc.length - 1]; - if ([TokenType.Chars, TokenType.EscapedChar].includes(tokenType)) { - // istanbul ignore if - if (prevToken?.type === TokenType.String) { - prevToken.value += token.value; - } else { - acc.push({ ...token, type: TokenType.String }); - } - } else { - acc.push(token); - } - return acc; -} - -export function isInterpolationToken( - token: Token -): token is StringInterpolation { - return token?.type === TokenType.StringInterpolation; -} - -// -// Turn all tokens between double quote pairs into StringInterpolation token -// -function processInterpolation(acc: Token[], token: Token): Token[] { - if (token.type === TokenType.DoubleQuotedStart) { - // This token will accumulate further strings and variables - const interpolationToken: StringInterpolation = { - type: TokenType.StringInterpolation, - children: [], - isValid: true, - isComplete: false, - offset: token.offset + 1, - value: '', - }; - acc.push(interpolationToken); - return acc; - } - - const prevToken: Token = acc[acc.length - 1]; - if (isInterpolationToken(prevToken) && !prevToken.isComplete) { - const type = token.type; - if (type === TokenType.DoubleQuotedFinish) { - if ( - prevToken.isValid && - prevToken.children.every(({ type: t }) => t === TokenType.String) - ) { - // Nothing to interpolate, replace to String - acc[acc.length - 1] = { - type: TokenType.String, - value: prevToken.children.map(({ value }) => value).join(''), - offset: prevToken.offset, - }; - return acc; - } - prevToken.isComplete = true; - } else if (type === TokenType.String || type === TokenType.Variable) { - prevToken.children.push(token); - } else { - prevToken.children.push(token); - prevToken.isValid = false; - } - } else { - acc.push(token); - } - return acc; -} - -const filteredTokens = [ - TokenType.Space, - TokenType.LineComment, - TokenType.MultiComment, - TokenType.Newline, - TokenType.Semicolon, - TokenType.SingleQuotedStart, - TokenType.SingleQuotedFinish, - TokenType.DoubleQuotedFinish, - TokenType.TripleSingleQuotedStart, - TokenType.TripleDoubleQuotedStart, - TokenType.TripleQuotedFinish, -]; - -function filterTokens({ type }: Token): boolean { - return !filteredTokens.includes(type); -} - -export function extractRawTokens(input: string): Token[] { - lexer.reset(input); - return Array.from(lexer).map( - ({ type, offset, value }) => ({ type, offset, value } as Token) - ); -} - -export function processTokens(tokens: Token[]): Token[] { - return tokens - .reduce(processChars, []) - .reduce(processInterpolation, []) - .filter(filterTokens); -} - -export function tokenize(input: string): Token[] { - return processTokens(extractRawTokens(input)); -} diff --git a/lib/modules/manager/gradle/types.ts b/lib/modules/manager/gradle/types.ts index dce472166a0ae0..b455142ce84d63 100644 --- a/lib/modules/manager/gradle/types.ts +++ b/lib/modules/manager/gradle/types.ts @@ -1,6 +1,5 @@ import type { lexer } from 'good-enough-parser'; import type { PackageDependency } from '../types'; -import type { TokenType } from './common'; export interface GradleManagerData { fileReplacePosition?: number; @@ -15,53 +14,6 @@ export interface VariableData extends GradleManagerData { export type PackageVariables = Record; export type VariableRegistry = Record; -export interface Token { - type: TokenType; - value: string; - offset: number; -} - -export interface StringInterpolation extends Token { - type: TokenType.StringInterpolation; - children: Token[]; // Tokens inside double-quoted string that are subject of interpolation - isComplete: boolean; // True if token has parsed completely - isValid: boolean; // False if string contains something unprocessable -} - -// Matcher on single token -export interface SyntaxMatcher { - matchType: TokenType | TokenType[]; - matchValue?: string | string[]; - lookahead?: boolean; - tokenMapKey?: string; -} - -export type TokenMap = Record; - -export interface SyntaxHandlerInput { - packageFile?: string; - variables: PackageVariables; - tokenMap: TokenMap; -} - -export type SyntaxHandlerOutput = { - deps?: PackageDependency[]; - vars?: PackageVariables; - urls?: string[]; - scriptFile?: string | null; -} | null; - -export interface SyntaxMatchConfig { - matchers: SyntaxMatcher[]; - handler: (_: SyntaxHandlerInput) => SyntaxHandlerOutput; -} - -export interface MatchConfig { - tokens: Token[]; - variables: PackageVariables; - packageFile?: string; -} - export interface ParseGradleResult { deps: PackageDependency[]; urls: string[]; diff --git a/lib/modules/manager/gradle/utils.spec.ts b/lib/modules/manager/gradle/utils.spec.ts index cef7ca76d90682..dde376e318fb25 100644 --- a/lib/modules/manager/gradle/utils.spec.ts +++ b/lib/modules/manager/gradle/utils.spec.ts @@ -1,8 +1,6 @@ -import { TokenType } from './common'; import type { VariableRegistry } from './types'; import { getVars, - interpolateString, isDependencyString, parseDependencyString, reorderFiles, @@ -76,34 +74,6 @@ describe('modules/manager/gradle/utils', () => { expect(parseDependencyString('-Xep:ParameterName:OFF')).toBeNull(); }); - it('interpolateString', () => { - expect(interpolateString([], {})).toBeEmptyString(); - expect( - interpolateString( - [ - { type: TokenType.String, value: 'foo' }, - { type: TokenType.Variable, value: 'bar' }, - { type: TokenType.String, value: 'baz' }, - ] as never, - { - bar: { value: 'BAR' }, - } as never - ) - ).toBe('fooBARbaz'); - expect( - interpolateString( - [{ type: TokenType.Variable, value: 'foo' }] as never, - {} as never - ) - ).toBeNull(); - expect( - interpolateString( - [{ type: TokenType.UnknownFragment, value: 'foo' }] as never, - {} as never - ) - ).toBeNull(); - }); - it('reorderFiles', () => { expect( reorderFiles([ diff --git a/lib/modules/manager/gradle/utils.ts b/lib/modules/manager/gradle/utils.ts index c34b9bbce21db2..8fe72d9e088106 100644 --- a/lib/modules/manager/gradle/utils.ts +++ b/lib/modules/manager/gradle/utils.ts @@ -1,11 +1,9 @@ import upath from 'upath'; import { regEx } from '../../../util/regex'; import type { PackageDependency } from '../types'; -import { TokenType } from './common'; import type { GradleManagerData, PackageVariables, - Token, VariableRegistry, } from './types'; @@ -83,30 +81,6 @@ export function parseDependencyString( }; } -export function interpolateString( - childTokens: Token[], - variables: PackageVariables -): string | null { - const resolvedSubstrings: string[] = []; - for (const childToken of childTokens) { - const type = childToken.type; - if (type === TokenType.String) { - resolvedSubstrings.push(childToken.value); - } else if (type === TokenType.Variable) { - const varName = childToken.value; - const varData = variables[varName]; - if (varData) { - resolvedSubstrings.push(varData.value); - } else { - return null; - } - } else { - return null; - } - } - return resolvedSubstrings.join(''); -} - const gradleVersionsFileRegex = regEx('^versions\\.gradle(?:\\.kts)?$', 'i'); const gradleBuildFileRegex = regEx('^build\\.gradle(?:\\.kts)?$', 'i'); diff --git a/lib/modules/manager/sbt/extract.ts b/lib/modules/manager/sbt/extract.ts index 2d63a18243c199..49cce676574be6 100644 --- a/lib/modules/manager/sbt/extract.ts +++ b/lib/modules/manager/sbt/extract.ts @@ -8,7 +8,7 @@ import { SBT_PLUGINS_REPO, SbtPluginDatasource, } from '../../datasource/sbt-plugin'; -import { MAVEN_REPO } from '../gradle/common'; +import { REGISTRY_URLS } from '../gradle/parser/common'; import type { PackageDependency, PackageFile } from '../types'; import { normalizeScalaVersion } from './util'; @@ -274,7 +274,7 @@ export function extractPackageFile( parsedResult = scala.query(content, query, { vars: {}, deps: [], - registryUrls: [MAVEN_REPO], + registryUrls: [REGISTRY_URLS.mavenCentral], }); } catch (err) /* istanbul ignore next */ { logger.warn({ err }, 'Sbt parsing error');