diff --git a/metals/src/main/resources/shared-db/migration/V1__Jar_type_hierarchy.sql b/metals/src/main/resources/shared-db/migration/V1__Jar_type_hierarchy.sql new file mode 100644 index 00000000000..33dab7f2df2 --- /dev/null +++ b/metals/src/main/resources/shared-db/migration/V1__Jar_type_hierarchy.sql @@ -0,0 +1,18 @@ +-- Indexed jars, the MD5 digest of path, modified time and size as key +create table indexed_jar( + id int auto_increment unique, + md5 varchar primary key, + type_hierarchy_indexed bit +); + +-- Type hierarchy information, e.g. symbol: "a/MyException#", extended_name: "Exception" +create table type_hierarchy( + symbol varchar not null, + parent_name varchar not null, + path varchar not null, + jar int, + is_resolved bit, + foreign key (jar) references indexed_jar (id) on delete cascade +); + +create index type_hierarchy_jar on type_hierarchy(jar); diff --git a/metals/src/main/scala/scala/meta/internal/metals/H2ConnectionProvider.scala b/metals/src/main/scala/scala/meta/internal/metals/H2ConnectionProvider.scala new file mode 100644 index 00000000000..669f104d25b --- /dev/null +++ b/metals/src/main/scala/scala/meta/internal/metals/H2ConnectionProvider.scala @@ -0,0 +1,194 @@ +package scala.meta.internal.metals + +import java.nio.file.Files +import java.sql.Connection +import java.sql.DriverManager +import java.sql.SQLException +import java.util.Properties +import java.util.concurrent.atomic.AtomicReference + +import scala.util.Try +import scala.util.control.NonFatal + +import scala.meta.internal.metals.MetalsEnrichments._ +import scala.meta.internal.metals.Tables.ConnectionState +import scala.meta.internal.pc.InterruptException +import scala.meta.io.AbsolutePath + +import org.flywaydb.core.Flyway +import org.flywaydb.core.api.FlywayException +import org.h2.mvstore.DataUtils +import org.h2.mvstore.MVStoreException +import org.h2.tools.Upgrade + +abstract class H2ConnectionProvider( + directory: => AbsolutePath, + name: String, + migrations: String, +) extends Cancelable { + + protected val ref: AtomicReference[ConnectionState] = + new AtomicReference(ConnectionState.Empty) + private val user = "sa" + + protected def connection: Connection = connect() + + protected def optDirectory: Option[AbsolutePath] = Try(directory).toOption + protected def databasePath: Option[AbsolutePath] = + optDirectory.map(_.resolve("metals.h2.db")) + + def connect(): Connection = { + ref.get() match { + case empty @ ConnectionState.Empty => + if (ref.compareAndSet(empty, ConnectionState.InProgress)) { + val conn = tryAutoServer() + ref.set(ConnectionState.Connected(conn)) + conn + } else + connect() + case Tables.ConnectionState.InProgress => + Thread.sleep(100) + connect() + case Tables.ConnectionState.Connected(conn) => + conn + } + } + + // The try/catch dodge-ball court in these methods is not glamorous, I'm sure it can be refactored for more + // readability and extensibility but it seems to get the job done for now. The most important goals are: + // 1. Never fail to establish a connection, even if that means using an in-memory database with degraded UX. + // 2. Log helpful error message with actionable advice on how to fix the problem. + private def tryAutoServer(): Connection = { + try persistentConnection(isAutoServer = true) + catch { + case NonFatal(e) => + val message = + s"unable to setup persistent H2 database with AUTO_SERVER=true, falling back to AUTO_SERVER=false." + e match { + case InterruptException() => + scribe.info(message) + case _ => + scribe.error(e) + } + tryNoAutoServer() + } + } + + protected def tryNoAutoServer(): Connection = { + try { + persistentConnection(isAutoServer = false) + } catch { + case NonFatal(e) => + scribe.error(e) + inMemoryConnection() + } + } + + protected def inMemoryConnection(): Connection = { + tryUrl(s"jdbc:h2:mem:${name};DB_CLOSE_DELAY=-1") + } + + protected def persistentConnection(isAutoServer: Boolean): Connection = { + val autoServer = + if (isAutoServer) ";AUTO_SERVER=TRUE" + else "" + val dbfile = directory.resolve("metals") + // from "h2" % "2.0.206" the only option is the MVStore, which uses `metals.mv.db` file + val oldDbfile = directory.resolve("metals.h2.db") + if (oldDbfile.exists) { + scribe.info(s"Deleting old database format $oldDbfile") + oldDbfile.delete() + } + Files.createDirectories(dbfile.toNIO.getParent) + System.setProperty( + "h2.bindAddress", + System.getProperty("h2.bindAddress", "127.0.0.1"), + ) + val url = s"jdbc:h2:file:$dbfile$autoServer" + upgradeIfNeeded(url) + tryUrl(url) + } + + private def tryUrl(url: String): Connection = { + val flyway = + Flyway.configure + .dataSource(url, user, null) + .locations(s"classpath:$migrations") + .cleanDisabled(false) + .load() + migrateOrRestart(flyway) + DriverManager.getConnection(url, user, null) + } + + /** + * Between h2 "2.1.x" and "2.2.x" write/read formats in MVStore changed + * (https://github.com/h2database/h2database/pull/3834) + */ + private def upgradeIfNeeded(url: String): Unit = { + val oldVersion = 214 + val formatVersionChangedMessage = + "The write format 2 is smaller than the supported format 3" + try { + DriverManager.getConnection(url, user, null).close() + } catch { + case e: SQLException if e.getErrorCode() == 90048 => + e.getCause() match { + case e: MVStoreException + if e.getErrorCode() == DataUtils.ERROR_UNSUPPORTED_FORMAT && + e.getMessage().startsWith(formatVersionChangedMessage) => + val info: Properties = new Properties() + info.put("user", user) + try { + val didUpgrade = Upgrade.upgrade(url, info, oldVersion) + if (didUpgrade) scribe.info(s"Upgraded H2 database.") + else deleteDatabase() + } catch { + case NonFatal(_) => deleteDatabase() + } + + case e => throw e + } + } + } + + private def migrateOrRestart( + flyway: Flyway + ): Unit = { + try { + flyway.migrate() + } catch { + case _: FlywayException => + scribe.warn(s"resetting database: $databasePath") + flyway.clean() + flyway.migrate() + } + } + + private def deleteDatabase() = + optDirectory.foreach { directory => + val dbFile = directory.resolve("metals.mv.db") + if (dbFile.exists) { + scribe.warn( + s"Deleting old database, due to failed database upgrade. Non-default build tool and build server choices will be lost." + ) + dbFile.delete() + } + } + + def databaseExists(): Boolean = + databasePath.exists(_.exists) + + def cancel(): Unit = { + ref.get() match { + case v @ ConnectionState.Connected(conn) => + if (ref.compareAndSet(v, ConnectionState.Empty)) { + conn.close() + } + case ConnectionState.InProgress => + Thread.sleep(100) + cancel() + case _ => + } + } + +} diff --git a/metals/src/main/scala/scala/meta/internal/metals/Indexer.scala b/metals/src/main/scala/scala/meta/internal/metals/Indexer.scala index bd9cd93e28a..ba4e72db7e9 100644 --- a/metals/src/main/scala/scala/meta/internal/metals/Indexer.scala +++ b/metals/src/main/scala/scala/meta/internal/metals/Indexer.scala @@ -15,6 +15,7 @@ import scala.concurrent.Future import scala.concurrent.Promise import scala.util.control.NonFatal +import scala.meta.Dialect import scala.meta.dialects._ import scala.meta.inputs.Input import scala.meta.internal.bsp.BspSession @@ -78,6 +79,7 @@ final case class Indexer( workspaceFolder: AbsolutePath, implementationProvider: ImplementationProvider, resetService: () => Unit, + sharedIndices: SqlSharedIndices, )(implicit rc: ReportContext) { private implicit def ec: ExecutionContextExecutorService = executionContext @@ -497,7 +499,18 @@ final case class Indexer( case Right(zip) => scribe.debug(s"Indexing JDK sources from $zip") usedJars += zip - definitionIndex.addJDKSources(zip) + val dialect = ScalaVersions.dialectForDependencyJar(zip.filename) + sharedIndices.jvmTypeHierarchy.getTypeHierarchy(zip) match { + case Some(overrides) => + definitionIndex.addIndexedSourceJar(zip, Nil, dialect) + implementationProvider.addTypeHierarchyElements(overrides) + case None => + val (_, overrides) = indexJar(zip, dialect) + sharedIndices.jvmTypeHierarchy.addTypeHierarchyInfo( + zip, + overrides, + ) + } case Left(notFound) => val candidates = notFound.candidates.mkString(", ") scribe.warn( @@ -507,9 +520,9 @@ final case class Indexer( for { item <- dependencySources.getItems.asScala } { - jdkSources.foreach(source => + jdkSources.foreach { source => data.addDependencySource(source, item.getTarget) - ) + } } usedJars.toSet } @@ -612,22 +625,6 @@ final case class Indexer( */ private def addSourceJarSymbols(path: AbsolutePath): Unit = { val dialect = ScalaVersions.dialectForDependencyJar(path.filename) - def indexJar() = { - val indexResult = definitionIndex.addSourceJar(path, dialect) - val toplevels = indexResult.flatMap { - case IndexingResult(path, toplevels, _) => - toplevels.map((_, path)) - } - val overrides = indexResult.flatMap { - case IndexingResult(path, _, list) => - list.flatMap { case (symbol, overridden) => - overridden.map((path, symbol, _)) - } - } - implementationProvider.addTypeHierarchyElements(overrides) - (toplevels, overrides) - } - tables.jarSymbols.getTopLevels(path) match { case Some(toplevels) => tables.jarSymbols.getTypeHierarchy(path) match { @@ -635,15 +632,30 @@ final case class Indexer( definitionIndex.addIndexedSourceJar(path, toplevels, dialect) implementationProvider.addTypeHierarchyElements(overrides) case None => - val (_, overrides) = indexJar() + val (_, overrides) = indexJar(path, dialect) tables.jarSymbols.addTypeHierarchyInfo(path, overrides) } case None => - val (toplevels, overrides) = indexJar() + val (toplevels, overrides) = indexJar(path, dialect) tables.jarSymbols.putJarIndexingInfo(path, toplevels, overrides) } } + private def indexJar(path: AbsolutePath, dialect: Dialect) = { + val indexResult = definitionIndex.addSourceJar(path, dialect) + val toplevels = indexResult.flatMap { + case IndexingResult(path, toplevels, _) => + toplevels.map((_, path)) + } + val overrides = indexResult.flatMap { case IndexingResult(path, _, list) => + list.flatMap { case (symbol, overridden) => + overridden.map((path, symbol, _)) + } + } + implementationProvider.addTypeHierarchyElements(overrides) + (toplevels, overrides) + } + def reindexWorkspaceSources( paths: Seq[AbsolutePath] ): Unit = { diff --git a/metals/src/main/scala/scala/meta/internal/metals/JarTopLevels.scala b/metals/src/main/scala/scala/meta/internal/metals/JarTopLevels.scala index 2d218d9189e..07e8d1d0317 100644 --- a/metals/src/main/scala/scala/meta/internal/metals/JarTopLevels.scala +++ b/metals/src/main/scala/scala/meta/internal/metals/JarTopLevels.scala @@ -19,12 +19,16 @@ import scala.meta.internal.mtags.ResolvedOverriddenSymbol import scala.meta.internal.mtags.UnresolvedOverriddenSymbol import scala.meta.io.AbsolutePath +import org.h2.jdbc.JdbcBatchUpdateException +import org.h2.jdbc.JdbcSQLIntegrityConstraintViolationException + /** * Handles caching of Jar Top Level Symbols in H2 * * Wrapper around the indexed_jar and toplevel_symbol sql tables. */ -final class JarTopLevels(conn: () => Connection) { +final class JarTopLevels(conn: () => Connection) + extends JarTypeHierarchy(conn) { /** * Retrieves top level Scala symbols of a jar from H2 @@ -60,42 +64,6 @@ final class JarTopLevels(conn: () => Connection) { None } - def getTypeHierarchy( - jar: AbsolutePath - ): Option[List[(AbsolutePath, String, OverriddenSymbol)]] = - try { - val fs = getFileSystem(jar) - val toplevels = List.newBuilder[(AbsolutePath, String, OverriddenSymbol)] - conn() - .query( - """select th.symbol, th.parent_name, th.path, th.is_resolved - |from indexed_jar ij - |left join type_hierarchy th - |on ij.id=th.jar - |where ij.type_hierarchy_indexed=true and ij.md5=?""".stripMargin - ) { _.setString(1, getMD5Digest(jar)) } { rs => - if ( - rs.getString(1) != null && rs - .getString(2) != null && rs.getString(4) != null - ) { - val symbol = rs.getString(1) - val parentName = rs.getString(2) - val path = AbsolutePath(fs.getPath(rs.getString(3))) - val isResolved = rs.getBoolean(4) - val overridden = - if (isResolved) ResolvedOverriddenSymbol(parentName) - else UnresolvedOverriddenSymbol(parentName) - toplevels += ((path, symbol, overridden)) - } - } - .headOption - .map(_ => toplevels.result) - } catch { - case error @ (_: ZipError | _: ZipException) => - scribe.warn(s"corrupted jar $jar: $error") - None - } - /** * Stores the top level symbols for the Jar * @@ -111,27 +79,19 @@ final class JarTopLevels(conn: () => Connection) { if (toplevels.isEmpty && type_hierarchy.isEmpty) 0 else { // Add jar to H2 - var jarStmt: PreparedStatement = null - val jar = - try { - jarStmt = conn().prepareStatement( - s"insert into indexed_jar (md5, type_hierarchy_indexed) values (?, ?)", - Statement.RETURN_GENERATED_KEYS, + val jar = addJar(path) + jar + .map(jar => + putToplevels(jar, toplevels) + putTypeHierarchyInfo( + jar, + type_hierarchy, ) - jarStmt.setString(1, getMD5Digest(path)) - jarStmt.setBoolean(2, true) - jarStmt.executeUpdate() - val rs = jarStmt.getGeneratedKeys - rs.next() - rs.getInt("id") - } finally { - if (jarStmt != null) jarStmt.close() - } - putToplevels(jar, toplevels) + putTypeHierarchyInfo(jar, type_hierarchy) + ) + .getOrElse(0) } } - def addTypeHierarchyInfo( + override def addTypeHierarchyInfo( path: AbsolutePath, type_hierarchy: List[(AbsolutePath, String, OverriddenSymbol)], ): Int = { @@ -177,12 +137,110 @@ final class JarTopLevels(conn: () => Connection) { } // Return number of rows inserted symbolStmt.executeBatch().sum + } catch { + case e: JdbcBatchUpdateException => + scribe.warn(e) + 0 } finally { if (symbolStmt != null) symbolStmt.close() } } else 0 - private def putTypeHierarchyInfo( + /** + * Delete the jars that are not used and their top level symbols + * + * @param usedPaths paths of the used Jars + * @return number of jars deleted + */ + def deleteNotUsedTopLevels(usedPaths: Array[AbsolutePath]): Int = { + val md5s = usedPaths.map(getMD5Digest).map("'" + _ + "'").mkString(",") + conn().update { + s"delete from indexed_jar where md5 not in ($md5s)" + } { _ => () } + } + + def clearAll(): Unit = { + val statement1 = conn().prepareStatement("truncate table toplevel_symbol") + statement1.execute() + val statement2 = + conn().prepareStatement("truncate table type_hierarchy") + statement2.execute() + val statement3 = conn().prepareStatement("delete from indexed_jar") + statement3.execute() + } + +} + +class JarTypeHierarchy(conn: () => Connection) { + + def getTypeHierarchy( + jar: AbsolutePath + ): Option[List[(AbsolutePath, String, OverriddenSymbol)]] = + try { + val fs = getFileSystem(jar) + val toplevels = List.newBuilder[(AbsolutePath, String, OverriddenSymbol)] + conn() + .query( + """select th.symbol, th.parent_name, th.path, th.is_resolved + |from indexed_jar ij + |left join type_hierarchy th + |on ij.id=th.jar + |where ij.type_hierarchy_indexed=true and ij.md5=?""".stripMargin + ) { _.setString(1, getMD5Digest(jar)) } { rs => + if ( + rs.getString(1) != null && rs + .getString(2) != null && rs.getString(4) != null + ) { + val symbol = rs.getString(1) + val parentName = rs.getString(2) + val path = AbsolutePath(fs.getPath(rs.getString(3))) + val isResolved = rs.getBoolean(4) + val overridden = + if (isResolved) ResolvedOverriddenSymbol(parentName) + else UnresolvedOverriddenSymbol(parentName) + toplevels += ((path, symbol, overridden)) + } + } + .headOption + .map(_ => toplevels.result) + } catch { + case error @ (_: ZipError | _: ZipException) => + scribe.warn(s"corrupted jar $jar: $error") + None + } + + def addTypeHierarchyInfo( + path: AbsolutePath, + type_hierarchy: List[(AbsolutePath, String, OverriddenSymbol)], + ): Int = { + val jar = addJar(path) + jar.map(putTypeHierarchyInfo(_, type_hierarchy)).getOrElse(0) + } + + protected def addJar(path: AbsolutePath): Option[Int] = { + var jarStmt: PreparedStatement = null + try { + jarStmt = conn().prepareStatement( + s"insert into indexed_jar (md5, type_hierarchy_indexed) values (?, ?)", + Statement.RETURN_GENERATED_KEYS, + ) + jarStmt.setString(1, getMD5Digest(path)) + jarStmt.setBoolean(2, true) + jarStmt.executeUpdate() + val rs = jarStmt.getGeneratedKeys + rs.next() + Some(rs.getInt("id")) + } catch { + case e: JdbcSQLIntegrityConstraintViolationException => + // since we don't synchronize we might end up adding the same jar twice + scribe.warn(e) + None + } finally { + if (jarStmt != null) jarStmt.close() + } + } + + protected def putTypeHierarchyInfo( jar: Int, type_hierarchy: List[(AbsolutePath, String, OverriddenSymbol)], ): Int = @@ -210,34 +268,15 @@ final class JarTopLevels(conn: () => Connection) { } // Return number of rows inserted symbolStmt.executeBatch().sum + } catch { + case e: JdbcBatchUpdateException => + scribe.warn(e) + 0 } finally { if (symbolStmt != null) symbolStmt.close() } } else 0 - /** - * Delete the jars that are not used and their top level symbols - * - * @param usedPaths paths of the used Jars - * @return number of jars deleted - */ - def deleteNotUsedTopLevels(usedPaths: Array[AbsolutePath]): Int = { - val md5s = usedPaths.map(getMD5Digest).map("'" + _ + "'").mkString(",") - conn().update { - s"delete from indexed_jar where md5 not in ($md5s)" - } { _ => () } - } - - def clearAll(): Unit = { - val statement1 = conn().prepareStatement("truncate table toplevel_symbol") - statement1.execute() - val statement2 = - conn().prepareStatement("truncate table type_hierarchy") - statement2.execute() - val statement3 = conn().prepareStatement("delete from indexed_jar") - statement3.execute() - } - def getMD5Digest(path: AbsolutePath): String = { val attributes = Files .getFileAttributeView(path.toNIO, classOf[BasicFileAttributeView]) diff --git a/metals/src/main/scala/scala/meta/internal/metals/MetalsLspService.scala b/metals/src/main/scala/scala/meta/internal/metals/MetalsLspService.scala index c9d4c1929f9..133edbb1ec6 100644 --- a/metals/src/main/scala/scala/meta/internal/metals/MetalsLspService.scala +++ b/metals/src/main/scala/scala/meta/internal/metals/MetalsLspService.scala @@ -1560,6 +1560,8 @@ abstract class MetalsLspService( def fileWatcher: FileWatcher + private val sharedIndices = new SqlSharedIndices + protected val indexer: Indexer = Indexer( () => workspaceReload, check, @@ -1594,6 +1596,7 @@ abstract class MetalsLspService( folder, implementationProvider, resetService, + sharedIndices, ) def projectInfo: MetalsServiceInfo diff --git a/metals/src/main/scala/scala/meta/internal/metals/SqlSharedIndices.scala b/metals/src/main/scala/scala/meta/internal/metals/SqlSharedIndices.scala new file mode 100644 index 00000000000..1a642a590a1 --- /dev/null +++ b/metals/src/main/scala/scala/meta/internal/metals/SqlSharedIndices.scala @@ -0,0 +1,16 @@ +package scala.meta.internal.metals + +import java.nio.file.Paths + +import scala.meta.io.AbsolutePath + +class SqlSharedIndices + extends H2ConnectionProvider( + directory = + AbsolutePath(Paths.get(sys.props("user.home"))).resolve(".metals"), + name = "metals-shared", + migrations = "/shared-db/migration", + ) { + + val jvmTypeHierarchy: JarTypeHierarchy = new JarTypeHierarchy(() => connect) +} diff --git a/metals/src/main/scala/scala/meta/internal/metals/Tables.scala b/metals/src/main/scala/scala/meta/internal/metals/Tables.scala index 7b8f10f8f35..7f3d7969ad8 100644 --- a/metals/src/main/scala/scala/meta/internal/metals/Tables.scala +++ b/metals/src/main/scala/scala/meta/internal/metals/Tables.scala @@ -1,31 +1,20 @@ package scala.meta.internal.metals -import java.nio.file.Files import java.sql.Connection -import java.sql.DriverManager -import java.sql.SQLException -import java.util.Properties -import java.util.concurrent.atomic.AtomicReference import scala.util.control.NonFatal import scala.meta.internal.builds.Digests -import scala.meta.internal.metals.MetalsEnrichments._ -import scala.meta.internal.pc.InterruptException import scala.meta.io.AbsolutePath -import org.flywaydb.core.Flyway -import org.flywaydb.core.api.FlywayException -import org.h2.mvstore.DataUtils -import org.h2.mvstore.MVStoreException -import org.h2.tools.Upgrade - final class Tables( workspace: AbsolutePath, time: Time, -) extends Cancelable { - - import Tables.ConnectionState +) extends H2ConnectionProvider( + directory = workspace.resolve(".metals"), + name = "metals", + migrations = "/db/migration", + ) { val jarSymbols = new JarTopLevels(() => connection) val digests = @@ -43,67 +32,7 @@ final class Tables( val fingerprints = new Fingerprints(() => connection) - private val ref: AtomicReference[ConnectionState] = - new AtomicReference(ConnectionState.Empty) - - private val user = "sa" - - def connect(): Connection = { - ref.get() match { - case empty @ ConnectionState.Empty => - if (ref.compareAndSet(empty, ConnectionState.InProgress)) { - val conn = tryAutoServer() - ref.set(ConnectionState.Connected(conn)) - conn - } else - connect() - case Tables.ConnectionState.InProgress => - Thread.sleep(100) - connect() - case Tables.ConnectionState.Connected(conn) => - conn - } - } - - def cancel(): Unit = { - ref.get() match { - case v @ ConnectionState.Connected(conn) => - if (ref.compareAndSet(v, ConnectionState.Empty)) { - conn.close() - } - case ConnectionState.InProgress => - Thread.sleep(100) - cancel() - case _ => - } - } - - def databaseExists(): Boolean = - databasePath.exists - - private def connection: Connection = connect() - - // The try/catch dodge-ball court in these methods is not glamorous, I'm sure it can be refactored for more - // readability and extensibility but it seems to get the job done for now. The most important goals are: - // 1. Never fail to establish a connection, even if that means using an in-memory database with degraded UX. - // 2. Log helpful error message with actionable advice on how to fix the problem. - private def tryAutoServer(): Connection = { - try persistentConnection(isAutoServer = true) - catch { - case NonFatal(e) => - val message = - s"unable to setup persistent H2 database with AUTO_SERVER=true, falling back to AUTO_SERVER=false." - e match { - case InterruptException() => - scribe.info(message) - case _ => - scribe.error(e) - } - tryNoAutoServer() - } - } - - private def tryNoAutoServer(): Connection = { + override protected def tryNoAutoServer(): Connection = { try { persistentConnection(isAutoServer = true) } catch { @@ -121,95 +50,6 @@ final class Tables( } } - private def databasePath: AbsolutePath = - workspace.resolve(Directories.database) - - private def inMemoryConnection(): Connection = { - tryUrl("jdbc:h2:mem:metals;DB_CLOSE_DELAY=-1") - } - - private def persistentConnection(isAutoServer: Boolean): Connection = { - val autoServer = - if (isAutoServer) ";AUTO_SERVER=TRUE" - else "" - val dbfile = workspace.resolve(".metals").resolve("metals") - // from "h2" % "2.0.206" the only option is the MVStore, which uses `metals.mv.db` file - val oldDbfile = workspace.resolve(".metals").resolve("metals.h2.db") - if (oldDbfile.exists) { - scribe.info(s"Deleting old database format $oldDbfile") - oldDbfile.delete() - } - Files.createDirectories(dbfile.toNIO.getParent) - System.setProperty( - "h2.bindAddress", - System.getProperty("h2.bindAddress", "127.0.0.1"), - ) - val url = s"jdbc:h2:file:$dbfile$autoServer" - upgradeIfNeeded(url) - tryUrl(url) - } - - private def tryUrl(url: String): Connection = { - val flyway = - Flyway.configure.dataSource(url, user, null).cleanDisabled(false).load() - migrateOrRestart(flyway) - DriverManager.getConnection(url, user, null) - } - - private def migrateOrRestart( - flyway: Flyway - ): Unit = { - try { - flyway.migrate() - } catch { - case _: FlywayException => - scribe.warn(s"resetting database: $databasePath") - flyway.clean() - flyway.migrate() - } - } - - /** - * Between h2 "2.1.x" and "2.2.x" write/read formats in MVStore changed - * (https://github.com/h2database/h2database/pull/3834) - */ - private def upgradeIfNeeded(url: String): Unit = { - val oldVersion = 214 - val formatVersionChangedMessage = - "The write format 2 is smaller than the supported format 3" - try { - DriverManager.getConnection(url, user, null).close() - } catch { - case e: SQLException if e.getErrorCode() == 90048 => - e.getCause() match { - case e: MVStoreException - if e.getErrorCode() == DataUtils.ERROR_UNSUPPORTED_FORMAT && - e.getMessage().startsWith(formatVersionChangedMessage) => - val info: Properties = new Properties() - info.put("user", user) - try { - val didUpgrade = Upgrade.upgrade(url, info, oldVersion) - if (didUpgrade) scribe.info(s"Upgraded H2 database.") - else deleteDatabase() - } catch { - case NonFatal(_) => deleteDatabase() - } - - case e => throw e - } - } - } - - private def deleteDatabase() = { - val dbFile = workspace.resolve(".metals").resolve("metals.mv.db") - if (dbFile.exists) { - scribe.warn( - s"Deleting old database, due to failed database upgrade. Non-default build tool and build server choices will be lost." - ) - dbFile.delete() - } - } - def cleanAll(): Unit = { try { jarSymbols.clearAll() diff --git a/mtags/src/main/scala/scala/meta/internal/mtags/GlobalSymbolIndex.scala b/mtags/src/main/scala/scala/meta/internal/mtags/GlobalSymbolIndex.scala index c8c9be77bf8..27cc008ba38 100644 --- a/mtags/src/main/scala/scala/meta/internal/mtags/GlobalSymbolIndex.scala +++ b/mtags/src/main/scala/scala/meta/internal/mtags/GlobalSymbolIndex.scala @@ -90,8 +90,6 @@ trait GlobalSymbolIndex { dialect: Dialect ): List[IndexingResult] - def addJDKSources(jar: AbsolutePath): Unit - /** * The same as `addSourceJar` except for directories */ diff --git a/mtags/src/main/scala/scala/meta/internal/mtags/OnDemandSymbolIndex.scala b/mtags/src/main/scala/scala/meta/internal/mtags/OnDemandSymbolIndex.scala index 6640523e0ac..c6699b08814 100644 --- a/mtags/src/main/scala/scala/meta/internal/mtags/OnDemandSymbolIndex.scala +++ b/mtags/src/main/scala/scala/meta/internal/mtags/OnDemandSymbolIndex.scala @@ -94,23 +94,6 @@ final class OnDemandSymbolIndex( } ) - override def addJDKSources(jar: AbsolutePath): Unit = - tryRun( - jar, - List.empty, { - try { - getOrCreateBucket(dialects.Scala213Source3).addJDKSources(jar) - } catch { - case e: ZipError => - onError(new IndexingExceptions.InvalidJarException(jar, e)) - List.empty - case e: ZipException => - onError(new IndexingExceptions.InvalidJarException(jar, e)) - List.empty - } - } - ) - // Used to add cached toplevel symbols to index def addIndexedSourceJar( jar: AbsolutePath, diff --git a/mtags/src/main/scala/scala/meta/internal/mtags/SymbolIndexBucket.scala b/mtags/src/main/scala/scala/meta/internal/mtags/SymbolIndexBucket.scala index 2957fe0704a..f84162fed37 100644 --- a/mtags/src/main/scala/scala/meta/internal/mtags/SymbolIndexBucket.scala +++ b/mtags/src/main/scala/scala/meta/internal/mtags/SymbolIndexBucket.scala @@ -60,13 +60,6 @@ class SymbolIndexBucket( } else List.empty } - def addJDKSources( - jar: AbsolutePath - ): Unit = { - sourceJars.addEntry(jar.toNIO) - PlatformFileIO.newJarFileSystem(jar, create = false) - } - def addSourceJar( jar: AbsolutePath ): List[IndexingResult] = { @@ -102,6 +95,7 @@ class SymbolIndexBucket( } } } + PlatformFileIO.newJarFileSystem(jar, create = false) } def addSourceFile( diff --git a/tests/mtest/src/main/scala/tests/DelegatingGlobalSymbolIndex.scala b/tests/mtest/src/main/scala/tests/DelegatingGlobalSymbolIndex.scala index 4d7f7e16d59..73e38a87dbd 100644 --- a/tests/mtest/src/main/scala/tests/DelegatingGlobalSymbolIndex.scala +++ b/tests/mtest/src/main/scala/tests/DelegatingGlobalSymbolIndex.scala @@ -12,7 +12,7 @@ import scala.meta.io.AbsolutePath * Symbol index that delegates all methods to an underlying implementation */ class DelegatingGlobalSymbolIndex( - var underlying: GlobalSymbolIndex = + var underlying: OnDemandSymbolIndex = OnDemandSymbolIndex.empty()(EmptyReportContext) ) extends GlobalSymbolIndex { @@ -36,10 +36,6 @@ class DelegatingGlobalSymbolIndex( underlying.addSourceJar(jar, dialect) } - def addJDKSources(jar: AbsolutePath): Unit = { - underlying.addJDKSources(jar) - } - def addSourceDirectory( dir: AbsolutePath, dialect: Dialect diff --git a/tests/mtest/src/main/scala/tests/PCSuite.scala b/tests/mtest/src/main/scala/tests/PCSuite.scala index b62c5e14805..a027d144dd8 100644 --- a/tests/mtest/src/main/scala/tests/PCSuite.scala +++ b/tests/mtest/src/main/scala/tests/PCSuite.scala @@ -37,7 +37,9 @@ trait PCSuite { .withRepositories(allRepos: _*) protected def indexJdkSources: Unit = JdkSources() match { - case Right(jdk) => index.addJDKSources(jdk) + case Right(jdk) => + // We don't actually need to index, since java toplevels are trivial. + index.underlying.addIndexedSourceJar(jdk, Nil, dialect) case _ => } diff --git a/tests/slow/src/test/scala/tests/sbt/SbtServerSuite.scala b/tests/slow/src/test/scala/tests/sbt/SbtServerSuite.scala index 9a5a5b4f98a..4689e879066 100644 --- a/tests/slow/src/test/scala/tests/sbt/SbtServerSuite.scala +++ b/tests/slow/src/test/scala/tests/sbt/SbtServerSuite.scala @@ -118,7 +118,7 @@ class SbtServerSuite |object A { | val foo: Int = "aaa" |} - |/.metals/ + |/.metals/a.txt | |""".stripMargin, V.scala213, diff --git a/tests/unit/src/test/scala/tests/ImplementationLspSuite.scala b/tests/unit/src/test/scala/tests/ImplementationLspSuite.scala index 03780d9ec87..9e855042e70 100644 --- a/tests/unit/src/test/scala/tests/ImplementationLspSuite.scala +++ b/tests/unit/src/test/scala/tests/ImplementationLspSuite.scala @@ -254,9 +254,8 @@ class ImplementationLspSuite extends BaseImplementationSuite("implementation") { |""".stripMargin, ) - // we currently don't index JDK sources check( - "java-classes".ignore, + "java-classes", """|/a/src/main/scala/a/Main.scala |package a |class <> extends Exce@@ption @@ -641,23 +640,64 @@ class ImplementationLspSuite extends BaseImplementationSuite("implementation") { |""".stripMargin, ) - if (!isJava11) { + if (isJava17) { checkSymbols( "exception", """package a |class MyException extends Excep@@tion |""".stripMargin, """|a/MyException# - |scala/ScalaReflectionException# - |scala/reflect/internal/FatalError# - |scala/reflect/internal/MissingRequirementError# - |scala/reflect/internal/Positions#ValidateException# - |scala/reflect/macros/Enclosures#EnclosureException# - |scala/reflect/macros/ParseException# - |scala/reflect/macros/ReificationException# - |scala/reflect/macros/TypecheckException# - |scala/reflect/macros/UnexpectedReificationException# + |com/sun/beans/finder/SignatureException# + |com/sun/imageio/plugins/jpeg/JFIFMarkerSegment#IllegalThumbException# + |com/sun/jdi/AbsentInformationException# + |com/sun/jdi/ClassNotLoadedException# + |com/sun/jdi/ClassNotPreparedException# + |com/sun/jdi/IncompatibleThreadStateException# + |com/sun/jdi/InconsistentDebugInfoException# + |com/sun/jdi/InternalException# + |com/sun/jdi/InvalidCodeIndexException# + |com/sun/jdi/InvalidLineNumberException# + |com/sun/jdi/InvalidModuleException# + |com/sun/jdi/InvalidStackFrameException# + |com/sun/jdi/InvalidTypeException# + |com/sun/jdi/InvocationException# + |com/sun/jdi/NativeMethodException# + |com/sun/jdi/ObjectCollectedException# + |com/sun/jdi/VMCannotBeModifiedException# + |com/sun/jdi/VMDisconnectedException# + |com/sun/jdi/VMMismatchException# + |com/sun/jdi/VMOutOfMemoryException# + |com/sun/jdi/connect/IllegalConnectorArgumentsException# + |com/sun/jdi/connect/TransportTimeoutException# + |com/sun/jdi/connect/VMStartException# + |com/sun/jdi/connect/spi/ClosedConnectionException# + |com/sun/jdi/request/DuplicateRequestException# + |com/sun/jdi/request/InvalidRequestStateException# + |com/sun/jndi/ldap/LdapReferralException# + |com/sun/media/sound/InvalidDataException# + |com/sun/media/sound/InvalidFormatException# + |com/sun/media/sound/RIFFInvalidDataException# + |com/sun/media/sound/RIFFInvalidFormatException# + |com/sun/nio/sctp/IllegalReceiveException# + |com/sun/nio/sctp/IllegalUnbindException# + |com/sun/nio/sctp/InvalidStreamException# + |com/sun/org/apache/bcel/internal/classfile/ClassFormatException# + |com/sun/org/apache/bcel/internal/generic/ClassGenException# + |com/sun/org/apache/bcel/internal/generic/TargetLostException# + |com/sun/org/apache/xalan/internal/xsltc/TransletException# + |com/sun/org/apache/xalan/internal/xsltc/compiler/CompilerException# + |com/sun/org/apache/xalan/internal/xsltc/compiler/IllegalCharException# + |com/sun/org/apache/xalan/internal/xsltc/compiler/util/TypeCheckError# + |com/sun/org/apache/xerces/internal/dom/AbortException# + |com/sun/org/apache/xerces/internal/dom/RangeExceptionImpl# + |com/sun/org/apache/xerces/internal/impl/dv/DVFactoryException# + |com/sun/org/apache/xerces/internal/impl/dv/DatatypeException# + |com/sun/org/apache/xerces/internal/impl/dv/InvalidDatatypeFacetException# + |com/sun/org/apache/xerces/internal/impl/dv/InvalidDatatypeValueException# + |com/sun/org/apache/xerces/internal/impl/dv/xs/SchemaDateTimeException# + |com/sun/org/apache/xerces/internal/impl/io/MalformedByteSequenceException# |""".stripMargin, + topLines = Some(50), ) }