Skip to content

Commit

Permalink
move outline from Compat + move semanticdbTextDocument to `Compil…
Browse files Browse the repository at this point in the history
…ers`
  • Loading branch information
kasiaMarek committed Apr 17, 2024
1 parent 555c133 commit 5cd84e8
Show file tree
Hide file tree
Showing 14 changed files with 193 additions and 316 deletions.
101 changes: 100 additions & 1 deletion metals/src/main/scala/scala/meta/internal/metals/Compilers.scala
Expand Up @@ -16,10 +16,12 @@ import scala.util.control.NonFatal

import scala.meta.inputs.Input
import scala.meta.inputs.Position
import scala.meta.internal.builds.SbtBuildTool
import scala.meta.internal.metals.CompilerOffsetParamsUtils
import scala.meta.internal.metals.CompilerRangeParamsUtils
import scala.meta.internal.metals.Compilers.PresentationCompilerKey
import scala.meta.internal.metals.MetalsEnrichments._
import scala.meta.internal.mtags.MD5
import scala.meta.internal.parsing.Trees
import scala.meta.internal.pc.EmptySymbolSearch
import scala.meta.internal.pc.JavaPresentationCompiler
Expand All @@ -28,6 +30,7 @@ import scala.meta.internal.pc.PcSymbolInformation
import scala.meta.internal.pc.ScalaPresentationCompiler
import scala.meta.internal.worksheets.WorksheetPcData
import scala.meta.internal.worksheets.WorksheetProvider
import scala.meta.internal.{semanticdb => s}
import scala.meta.io.AbsolutePath
import scala.meta.pc.AutoImportsResult
import scala.meta.pc.CancelToken
Expand Down Expand Up @@ -88,7 +91,8 @@ class Compilers(
)(implicit ec: ExecutionContextExecutorService, rc: ReportContext)
extends Cancelable {
val plugins = new CompilerPlugins()
val outlineFilesProvider = new OutlineFilesProvider(buildTargets, buffers)
private val outlineFilesProvider =
new OutlineFilesProvider(buildTargets, buffers)

// Not a TrieMap because we want to avoid loading duplicate compilers for the same build target.
// Not a `j.u.c.ConcurrentHashMap` because it can deadlock in `computeIfAbsent` when the absent
Expand Down Expand Up @@ -1540,6 +1544,101 @@ class Compilers(
debugItem
}

def semanticdbTextDocument(
source: AbsolutePath,
text: String,
): s.TextDocument = {
val (pc, optBuildTarget) =
loadCompiler(source).getOrElse((fallbackCompiler(source), None))

val (prependedLinesSize, modifiedText) =
Option
.when(source.isSbt)(
buildTargets
.sbtAutoImports(source)
)
.flatten
.fold((0, text))(imports =>
(imports.size, SbtBuildTool.prependAutoImports(text, imports))
)

// NOTE(olafur): it's unfortunate that we block on `semanticdbTextDocument`
// here but to avoid it we would need to refactor the `Semanticdbs` trait,
// which requires more effort than it's worth.
val params = new CompilerVirtualFileParams(
source.toURI,
modifiedText,
token = EmptyCancelToken,
outlineFiles = outlineFilesProvider.getOutlineFiles(optBuildTarget),
)
val bytes = pc
.semanticdbTextDocument(params)
.get(
config.initialConfig.compilers.timeoutDelay,
config.initialConfig.compilers.timeoutUnit,
)
val textDocument = {
val doc = s.TextDocument.parseFrom(bytes)
if (doc.text.isEmpty()) doc.withText(text)
else doc
}
if (prependedLinesSize > 0)
cleanupAutoImports(textDocument, text, prependedLinesSize)
else textDocument
}

private def cleanupAutoImports(
document: s.TextDocument,
originalText: String,
linesSize: Int,
): s.TextDocument = {

def adjustRange(range: s.Range): Option[s.Range] = {
val nextStartLine = range.startLine - linesSize
val nextEndLine = range.endLine - linesSize
if (nextEndLine >= 0) {
val nextRange = range.copy(
startLine = nextStartLine,
endLine = nextEndLine,
)
Some(nextRange)
} else None
}

val adjustedOccurences =
document.occurrences.flatMap { occurence =>
occurence.range
.flatMap(adjustRange)
.map(r => occurence.copy(range = Some(r)))
}

val adjustedDiagnostic =
document.diagnostics.flatMap { diagnostic =>
diagnostic.range
.flatMap(adjustRange)
.map(r => diagnostic.copy(range = Some(r)))
}

val adjustedSynthetic =
document.synthetics.flatMap { synthetic =>
synthetic.range
.flatMap(adjustRange)
.map(r => synthetic.copy(range = Some(r)))
}

s.TextDocument(
schema = document.schema,
uri = document.uri,
text = originalText,
md5 = MD5.compute(originalText),
language = document.language,
symbols = document.symbols,
occurrences = adjustedOccurences,
diagnostics = adjustedDiagnostic,
synthetics = adjustedSynthetic,
)
}

}

object Compilers {
Expand Down
Expand Up @@ -6,7 +6,6 @@ import java.util.Collections
import scala.util.Success
import scala.util.Try

import scala.meta.internal.builds.SbtBuildTool
import scala.meta.internal.metals.MetalsEnrichments._
import scala.meta.internal.mtags.MD5
import scala.meta.internal.mtags.Semanticdbs
Expand Down Expand Up @@ -139,120 +138,7 @@ final class InteractiveSemanticdbs(
javaInteractiveSemanticdb.fold(s.TextDocument())(
_.textDocument(source, text)
)
else scalaCompile(source, text)
}

private def scalaCompile(
source: AbsolutePath,
text: String,
): s.TextDocument = {
def worksheetCompiler =
if (source.isWorksheet) compilers().loadWorksheetCompiler(source)
else None
def fromTarget = for {
buildTarget <- buildTargets.inverseSources(source)
pc <- compilers().loadCompiler(buildTarget)
} yield (pc, Some(buildTarget))

val (pc, optBuildTarget) = worksheetCompiler
.map((_, None))
.orElse(fromTarget)
.orElse {
// load presentation compiler for sources that were create by a worksheet definition request
tables.worksheetSources
.getWorksheet(source)
.flatMap(compilers().loadWorksheetCompiler)
.map((_, None))
}
.getOrElse((compilers().fallbackCompiler(source), None))

val (prependedLinesSize, modifiedText) =
Option
.when(source.isSbt)(
buildTargets
.sbtAutoImports(source)
)
.flatten
.fold((0, text))(imports =>
(imports.size, SbtBuildTool.prependAutoImports(text, imports))
)

// NOTE(olafur): it's unfortunate that we block on `semanticdbTextDocument`
// here but to avoid it we would need to refactor the `Semanticdbs` trait,
// which requires more effort than it's worth.
val params = new CompilerVirtualFileParams(
source.toURI,
modifiedText,
token = EmptyCancelToken,
outlineFiles =
compilers().outlineFilesProvider.getOutlineFiles(optBuildTarget),
)
val bytes = pc
.semanticdbTextDocument(params)
.get(
clientConfig.initialConfig.compilers.timeoutDelay,
clientConfig.initialConfig.compilers.timeoutUnit,
)
val textDocument = {
val doc = s.TextDocument.parseFrom(bytes)
if (doc.text.isEmpty()) doc.withText(text)
else doc
}
if (prependedLinesSize > 0)
cleanupAutoImports(textDocument, text, prependedLinesSize)
else textDocument
}

private def cleanupAutoImports(
document: s.TextDocument,
originalText: String,
linesSize: Int,
): s.TextDocument = {

def adjustRange(range: s.Range): Option[s.Range] = {
val nextStartLine = range.startLine - linesSize
val nextEndLine = range.endLine - linesSize
if (nextEndLine >= 0) {
val nextRange = range.copy(
startLine = nextStartLine,
endLine = nextEndLine,
)
Some(nextRange)
} else None
}

val adjustedOccurences =
document.occurrences.flatMap { occurence =>
occurence.range
.flatMap(adjustRange)
.map(r => occurence.copy(range = Some(r)))
}

val adjustedDiagnostic =
document.diagnostics.flatMap { diagnostic =>
diagnostic.range
.flatMap(adjustRange)
.map(r => diagnostic.copy(range = Some(r)))
}

val adjustedSynthetic =
document.synthetics.flatMap { synthetic =>
synthetic.range
.flatMap(adjustRange)
.map(r => synthetic.copy(range = Some(r)))
}

s.TextDocument(
schema = document.schema,
uri = document.uri,
text = originalText,
md5 = MD5.compute(originalText),
language = document.language,
symbols = document.symbols,
occurrences = adjustedOccurences,
diagnostics = adjustedDiagnostic,
synthetics = adjustedSynthetic,
)
else compilers().semanticdbTextDocument(source, text)
}

}
Expand Up @@ -93,18 +93,8 @@ class OutlineFilesProvider(

def enrichWithOutlineFiles(
path: AbsolutePath
)(vFile: CompilerVirtualFileParams): CompilerVirtualFileParams = {
val optOutlineFiles =
for {
bt <- buildTargets.inferBuildTarget(path)
provider <- outlineFiles.get(bt)
outlineFiles <- provider.outlineFiles()
} yield outlineFiles

optOutlineFiles
.map(outlineFiles => vFile.copy(outlineFiles = Optional.of(outlineFiles)))
.getOrElse(vFile)
}
)(vFile: CompilerVirtualFileParams): CompilerVirtualFileParams =
enrichWithOutlineFiles(buildTargets.inferBuildTarget(path))(vFile)

def clear(): Unit = {
outlineFiles.clear()
Expand Down
6 changes: 0 additions & 6 deletions mtags/src/main/scala-2.11/scala/meta/internal/pc/Compat.scala
Expand Up @@ -3,8 +3,6 @@ package scala.meta.internal.pc
import scala.tools.nsc.reporters.Reporter
import scala.tools.nsc.reporters.StoreReporter

import scala.meta.pc.OutlineFiles

trait Compat { this: MetalsGlobal =>
def metalsFunctionArgTypes(tpe: Type): List[Type] = {
val dealiased = tpe.dealiasWiden
Expand All @@ -21,8 +19,4 @@ trait Compat { this: MetalsGlobal =>
def isAliasCompletion(m: Member): Boolean = false

def constantType(c: ConstantType): ConstantType = c

def runOutline(files: OutlineFiles): Unit = {
// no outline compilation for 2.11
}
}
@@ -0,0 +1,9 @@
package scala.meta.internal.pc

import scala.meta.pc.OutlineFiles

trait Outline { this: MetalsGlobal =>
def runOutline(files: OutlineFiles): Unit = {
// no outline compilation for 2.11
}
}
37 changes: 1 addition & 36 deletions mtags/src/main/scala-2.12/scala/meta/internal/pc/Compat.scala
@@ -1,14 +1,8 @@
package scala.meta.internal.pc

import java.{util => ju}

import scala.reflect.internal.Reporter
import scala.tools.nsc.reporters.Reporter
import scala.tools.nsc.reporters.StoreReporter

import scala.meta.internal.jdk.CollectionConverters._
import scala.meta.pc.OutlineFiles
import scala.meta.pc.VirtualFileParams

trait Compat { this: MetalsGlobal =>
def metalsFunctionArgTypes(tpe: Type): List[Type] =
definitions.functionOrSamArgTypes(tpe)
Expand All @@ -22,33 +16,4 @@ trait Compat { this: MetalsGlobal =>
def isAliasCompletion(m: Member): Boolean = false

def constantType(c: ConstantType): ConstantType = c

def runOutline(files: OutlineFiles): Unit = {
this.settings.Youtline.value = true
runOutline(files.files)
if (files.isFirstCompileSubstitute()) {
// if first compilation substitute we compile all files twice
// first to emit symbols, second so signatures have information about those symbols
// this isn't a perfect strategy but much better than single compile
runOutline(files.files, forceNewUnit = true)
}
this.settings.Youtline.value = false
}

private def runOutline(
files: ju.List[VirtualFileParams],
forceNewUnit: Boolean = false
): Unit = {
files.asScala.foreach { params =>
val unit = this.addCompilationUnit(
params.text(),
params.uri.toString(),
cursor = None,
isOutline = true,
forceNew = forceNewUnit
)
this.typeCheck(unit)
this.richCompilationCache.put(params.uri().toString(), unit)
}
}
}

0 comments on commit 5cd84e8

Please sign in to comment.