Skip to content

Commit

Permalink
- rework library file imports
Browse files Browse the repository at this point in the history
  - split into library file scanning and importing tasks
  - use reference counted zip fs wrapper to avoid issues when multiple of this task run in parallel on the same library files
  - use coroutines to make parallel
- various file operation cleanup
  • Loading branch information
jpenilla committed Dec 21, 2024
1 parent 741bc34 commit a931fbe
Show file tree
Hide file tree
Showing 8 changed files with 297 additions and 303 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,8 @@ class ServerPatchingTasks(
val importLibFiles = tasks.register<ImportLibraryFiles>("import${namePart()}LibraryFiles") {
patches.from(config.featurePatchDir, config.sourcePatchDir)
devImports.set(config.devImports.fileExists(project))
libraries.from(softspoon.importLibraryFiles.map { it.libraries })
libraryFileIndex.set(softspoon.indexLibraryFiles.flatMap { it.outputFile })
libraries.from(softspoon.indexLibraryFiles.map { it.libraries })
}

val setup = tasks.register<ForkSetup>("run${namePart()}VanillaSetup") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import io.papermc.paperweight.tasks.mache.DecompileJar
import io.papermc.paperweight.tasks.mache.RemapJar
import io.papermc.paperweight.tasks.mache.SetupVanilla
import io.papermc.paperweight.tasks.softspoon.ImportLibraryFiles
import io.papermc.paperweight.tasks.softspoon.IndexLibraryFiles
import io.papermc.paperweight.tasks.softspoon.SetupPaperScript
import io.papermc.paperweight.util.*
import io.papermc.paperweight.util.constants.*
Expand Down Expand Up @@ -112,15 +113,20 @@ class SoftSpoonTasks(
secondFile.set(collectPaperATsFromPatches.flatMap { it.outputFile })
}

val importLibraryFiles = tasks.register<ImportLibraryFiles>("importPaperLibraryFiles") {
patches.from(project.coreExt.paper.sourcePatchDir, project.coreExt.paper.featurePatchDir)
devImports.set(project.coreExt.paper.devImports.fileExists(project))
val indexLibraryFiles = tasks.register<IndexLibraryFiles>("indexLibraryFiles") {
libraries.from(
allTasks.downloadPaperLibrariesSources.flatMap { it.outputDir },
allTasks.downloadMcLibrariesSources.flatMap { it.outputDir }
)
}

val importLibraryFiles = tasks.register<ImportLibraryFiles>("importPaperLibraryFiles") {
patches.from(project.coreExt.paper.sourcePatchDir, project.coreExt.paper.featurePatchDir)
devImports.set(project.coreExt.paper.devImports.fileExists(project))
libraryFileIndex.set(indexLibraryFiles.flatMap { it.outputFile })
libraries.from(indexLibraryFiles.map { it.libraries })
}

private fun SetupVanilla.configureSetupMacheSources() {
group = "mache"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,23 +112,25 @@ abstract class SetupVanilla : JavaLauncherTask() {
}

println("Copy initial sources...")
inputFile.convertToPath().openZip().walk()
.filter(predicate.get())
.forEach {
val target = outputPath.resolve(it.toString().substring(1))
target.parent.createDirectories()
if (it.toString().endsWith(".nbt")) {
// nbt files are binary, so we can just copy them
it.copyTo(target)
} else {
// for text files we make sure we have a trailing newline
var content = it.readText()
if (!content.endsWith("\n")) {
content += "\n"
inputFile.path.openZip().use { inputFileFs ->
inputFileFs.walkSequence()
.filter(predicate.get()::test)
.forEach {
val target = outputPath.resolve(it.toString().substring(1))
target.parent.createDirectories()
if (it.toString().endsWith(".nbt")) {
// nbt files are binary, so we can just copy them
it.copyTo(target)
} else {
// for text files we make sure we have a trailing newline
var content = it.readText()
if (!content.endsWith("\n")) {
content += "\n"
}
target.writeText(content)
}
target.writeText(content)
}
}
}

println("Setup git repo...")
if (!macheOld.isPresent) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,80 @@

package io.papermc.paperweight.tasks.softspoon

import com.github.salomonbrys.kotson.typeToken
import io.papermc.paperweight.PaperweightException
import io.papermc.paperweight.tasks.*
import io.papermc.paperweight.util.*
import io.papermc.paperweight.util.constants.*
import java.nio.file.Path
import java.util.concurrent.ConcurrentHashMap
import kotlin.io.path.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
import org.gradle.api.file.ConfigurableFileCollection
import org.gradle.api.file.DirectoryProperty
import org.gradle.api.file.FileCollection
import org.gradle.api.file.RegularFileProperty
import org.gradle.api.logging.LogLevel
import org.gradle.api.tasks.InputFile
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.Optional
import org.gradle.api.tasks.OutputDirectory
import org.gradle.api.tasks.OutputFile
import org.gradle.api.tasks.TaskAction

private data class LibraryImport(val libraryFileName: String, val importFilePath: String)

abstract class IndexLibraryFiles : BaseTask() {

@get:InputFiles
abstract val libraries: ConfigurableFileCollection

@get:OutputFile
abstract val outputFile: RegularFileProperty

override fun init() {
super.init()
outputFile.set(layout.cache.resolve(paperTaskOutput("json")))
}

@TaskAction
fun run() {
val possible = findPossibleLibraryImports(libraries.sourcesJars())
outputFile.path.cleanFile().bufferedWriter().use { writer ->
gson.toJson(possible, writer)
}
}

private fun findPossibleLibraryImports(libFiles: List<Path>): Collection<LibraryImport> = runBlocking {
val found = ConcurrentHashMap.newKeySet<LibraryImport>()
val suffix = ".java"
libFiles.forEach { libFile ->
launch(Dispatchers.IO) {
libFile.openZipSafe().use { zipFile ->
zipFile.walkSequence()
.filter { it.isRegularFile() && it.name.endsWith(suffix) }
.map { sourceFile ->
LibraryImport(libFile.name, sourceFile.toString().substring(1))
}
.forEach(found::add)
}
}
}
return@runBlocking found
}
}

abstract class ImportLibraryFiles : BaseTask() {

@get:Optional
@get:InputFiles
abstract val libraries: ConfigurableFileCollection

@get:InputFile
abstract val libraryFileIndex: RegularFileProperty

@get:Optional
@get:InputFiles
abstract val patches: ConfigurableFileCollection
Expand All @@ -62,17 +117,115 @@ abstract class ImportLibraryFiles : BaseTask() {
outputDir.path.deleteRecursive()
outputDir.path.createDirectories()
if (!libraries.isEmpty && !patches.isEmpty) {
val patchFiles = patches.files.flatMap { it.toPath().walk().filter { path -> path.toString().endsWith(".patch") }.toList() }
McDev.importMcDev(
val index = libraryFileIndex.path.bufferedReader().use { reader ->
gson.fromJson<Set<LibraryImport>>(reader, typeToken<Set<LibraryImport>>())
}
val patchFiles = patches.files.flatMap { it.toPath().filesMatchingRecursive("*.patch") }
importLibraryFiles(
patchFiles,
null,
devImports.pathOrNull,
outputDir.path,
null,
libraries.files.map { it.toPath() },
true,
""
libraries.sourcesJars(),
index,
true
)
}
}

private fun importLibraryFiles(
patches: Iterable<Path>,
importsFile: Path?,
targetDir: Path,
libFiles: List<Path>,
index: Set<LibraryImport>,
printOutput: Boolean,
) = runBlocking {
// Import library classes
val allImports = findLibraryImports(importsFile, libFiles, index, patches)
val importsByLib = allImports.groupBy { it.libraryFileName }
logger.log(if (printOutput) LogLevel.LIFECYCLE else LogLevel.DEBUG, "Importing {} classes from library sources...", allImports.size)

for ((libraryFileName, imports) in importsByLib) {
val libFile = libFiles.firstOrNull { it.name == libraryFileName }
?: throw PaperweightException("Failed to find library: $libraryFileName for classes ${imports.map { it.importFilePath }}")
launch(Dispatchers.IO) {
libFile.openZipSafe().use { zipFile ->
for (import in imports) {
val outputFile = targetDir.resolve(import.importFilePath)
if (outputFile.exists()) {
continue
}
outputFile.parent.createDirectories()

val libEntry = zipFile.getPath(import.importFilePath)
libEntry.copyTo(outputFile)
}
}
}
}
}

private suspend fun usePatchLines(patches: Iterable<Path>, consumer: (String) -> Unit) = coroutineScope {
for (patch in patches) {
launch(Dispatchers.IO) {
patch.useLines { lines ->
lines.forEach { consumer(it) }
}
}
}
}

private suspend fun findLibraryImports(
libraryImports: Path?,
libFiles: List<Path>,
index: Set<LibraryImport>,
patchFiles: Iterable<Path>
): Set<LibraryImport> {
val result = hashSetOf<LibraryImport>()

// Imports from library-imports.txt
libraryImports?.useLines { lines ->
lines.filterNot { it.startsWith('#') }
.map { it.split(' ') }
.filter { it.size == 2 }
.mapTo(result) { parts ->
val libFileName = libFiles.firstOrNull { it.name.startsWith(parts[0]) }?.name
?: throw PaperweightException("Failed to read library line '${parts[0]} ${parts[1]}', no library file was found.")
LibraryImport(libFileName, parts[1].removeSuffix(".java").replace('.', '/') + ".java")
}
}

// Scan patches for necessary imports
result += findNeededLibraryImports(patchFiles, index)

return result
}

private suspend fun findNeededLibraryImports(
patchFiles: Iterable<Path>,
index: Set<LibraryImport>,
): Set<LibraryImport> {
val knownImportMap = index.associateBy { it.importFilePath }
val prefix = "+++ b/"
val needed = ConcurrentHashMap.newKeySet<LibraryImport>()
usePatchLines(patchFiles) { line ->
if (!line.startsWith(prefix)) {
return@usePatchLines
}
val key = line.substring(prefix.length)
val value = knownImportMap[key]
if (value != null) {
needed += value
}
}
return needed
}
}

private fun FileCollection.sourcesJars(): List<Path> {
val libFiles = files.map { it.toPath() }.flatMap { it.filesMatchingRecursive("*-sources.jar") }
if (libFiles.isEmpty()) {
throw PaperweightException("No library files found")
}
return libFiles
}
Loading

0 comments on commit a931fbe

Please sign in to comment.