Skip to content

Commit

Permalink
Add super classes to decompiler cache ClassEntry (#1118)
Browse files Browse the repository at this point in the history
* Add super classes to decompiler cache ClassEntry

* And implement

* Add existing classes jar for the decompiler
  • Loading branch information
modmuss50 authored May 14, 2024
1 parent db65759 commit 8110523
Show file tree
Hide file tree
Showing 8 changed files with 499 additions and 134 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -49,11 +49,13 @@ public WorkRequest prepareJob(Path inputJar) throws IOException {
boolean hasSomeExisting = false;

Path incompleteJar = Files.createTempFile("loom-cache-incomplete", ".jar");
Path existingJar = Files.createTempFile("loom-cache-existing", ".jar");
Path existingClassesJar = Files.createTempFile("loom-cache-existingClasses", ".jar");
Path existingSourcesJar = Files.createTempFile("loom-cache-existingSources", ".jar");

// We must delete the empty files, so they can be created as a zip
Files.delete(incompleteJar);
Files.delete(existingJar);
Files.delete(existingClassesJar);
Files.delete(existingSourcesJar);

// Sources name -> hash
Map<String, String> outputNameMap = new HashMap<>();
Expand All @@ -64,12 +66,14 @@ public WorkRequest prepareJob(Path inputJar) throws IOException {

try (FileSystemUtil.Delegate inputFs = FileSystemUtil.getJarFileSystem(inputJar, false);
FileSystemUtil.Delegate incompleteFs = FileSystemUtil.getJarFileSystem(incompleteJar, true);
FileSystemUtil.Delegate existingFs = FileSystemUtil.getJarFileSystem(existingJar, true)) {
FileSystemUtil.Delegate existingSourcesFs = FileSystemUtil.getJarFileSystem(existingSourcesJar, true);
FileSystemUtil.Delegate existingClassesFs = FileSystemUtil.getJarFileSystem(existingClassesJar, true)) {
final List<ClassEntry> inputClasses = JarWalker.findClasses(inputFs);
final Map<String, String> rawEntryHashes = getEntryHashes(inputClasses, inputFs.getRoot());

for (ClassEntry entry : inputClasses) {
String outputFileName = entry.sourcesFileName();
String fullHash = baseHash + "/" + entry.hash(inputFs.getRoot());
String fullHash = baseHash + "/" + entry.hashSuperHierarchy(rawEntryHashes);

final CachedData entryData = fileStore.getEntry(fullHash);

Expand All @@ -82,10 +86,12 @@ public WorkRequest prepareJob(Path inputJar) throws IOException {
LOGGER.debug("Cached entry ({}) not found, going to process {}", fullHash, outputFileName);
misses++;
} else {
final Path outputPath = existingFs.getPath(outputFileName);
final Path outputPath = existingSourcesFs.getPath(outputFileName);
Files.createDirectories(outputPath.getParent());
Files.writeString(outputPath, entryData.sources());

entry.copyTo(inputFs.getRoot(), existingClassesFs.getRoot());

if (entryData.lineNumbers() != null) {
lineNumbersMap.put(entryData.className(), entryData.lineNumbers());
} else {
Expand All @@ -110,25 +116,42 @@ public WorkRequest prepareJob(Path inputJar) throws IOException {
if (isIncomplete && !hasSomeExisting) {
// The cache contained nothing of use, fully process the input jar
Files.delete(incompleteJar);
Files.delete(existingJar);
Files.delete(existingClassesJar);
Files.delete(existingSourcesJar);

LOGGER.info("No cached entries found, going to process the whole jar");
return new FullWorkJob(inputJar, outputJar, outputNameMap)
.asRequest(stats, lineNumbers);
} else if (isIncomplete) {
// The cache did not contain everything so we have some work to do
LOGGER.info("Some cached entries found, using partial work job");
return new PartialWorkJob(incompleteJar, existingJar, outputJar, outputNameMap)
return new PartialWorkJob(incompleteJar, existingSourcesJar, existingClassesJar, outputJar, outputNameMap)
.asRequest(stats, lineNumbers);
} else {
// The cached contained everything we need, so the existing jar is the output
LOGGER.info("All cached entries found, using completed work job");
Files.delete(incompleteJar);
return new CompletedWorkJob(existingJar)
Files.delete(existingClassesJar);
return new CompletedWorkJob(existingSourcesJar)
.asRequest(stats, lineNumbers);
}
}

private static Map<String, String> getEntryHashes(List<ClassEntry> entries, Path root) throws IOException {
final Map<String, String> rawEntryHashes = new HashMap<>();

for (ClassEntry entry : entries) {
String hash = entry.hash(root);
rawEntryHashes.put(entry.name(), hash);

for (String s : entry.innerClasses()) {
rawEntryHashes.put(s, hash);
}
}

return Collections.unmodifiableMap(rawEntryHashes);
}

public void completeJob(Path output, WorkJob workJob, ClassLineNumbers lineNumbers) throws IOException {
if (workJob instanceof CompletedWorkJob completedWorkJob) {
// Fully complete, nothing new to cache
Expand Down Expand Up @@ -189,7 +212,7 @@ public void completeJob(Path output, WorkJob workJob, ClassLineNumbers lineNumbe
if (workJob instanceof PartialWorkJob partialWorkJob) {
// Copy all the existing items to the output jar
try (FileSystemUtil.Delegate outputFs = FileSystemUtil.getJarFileSystem(partialWorkJob.output(), false);
FileSystemUtil.Delegate existingFs = FileSystemUtil.getJarFileSystem(partialWorkJob.existing(), false);
FileSystemUtil.Delegate existingFs = FileSystemUtil.getJarFileSystem(partialWorkJob.existingSources(), false);
Stream<Path> walk = Files.walk(existingFs.getRoot())) {
Iterator<Path> iterator = walk.iterator();

Expand All @@ -208,7 +231,8 @@ public void completeJob(Path output, WorkJob workJob, ClassLineNumbers lineNumbe
}
}

Files.delete(partialWorkJob.existing());
Files.delete(partialWorkJob.existingSources());
Files.delete(partialWorkJob.existingClasses());
Files.move(partialWorkJob.output(), output);
} else if (workJob instanceof FullWorkJob fullWorkJob) {
// Nothing to merge, just use the output jar
Expand Down Expand Up @@ -259,11 +283,12 @@ public record CompletedWorkJob(Path completed) implements WorkJob {
* Some work needs to be done.
*
* @param incomplete A path to jar file containing all the classes to be processed
* @param existing A path pointing to a jar containing existing classes that have previously been processed
* @param existingSources A path pointing to a jar containing existing sources that have previously been processed
* @param existingClasses A path pointing to a jar containing existing classes that have previously been processed
* @param output A path to a temporary jar where work output should be written to
* @param outputNameMap A map of sources name to hash
*/
public record PartialWorkJob(Path incomplete, Path existing, Path output, Map<String, String> outputNameMap) implements WorkToDoJob {
public record PartialWorkJob(Path incomplete, Path existingSources, Path existingClasses, Path output, Map<String, String> outputNameMap) implements WorkToDoJob {
}

/**
Expand Down
49 changes: 44 additions & 5 deletions src/main/java/net/fabricmc/loom/decompilers/cache/ClassEntry.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,23 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.StringJoiner;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import net.fabricmc.loom.util.Checksum;

public record ClassEntry(String parentClass, List<String> innerClasses) {
/**
* @param name The class name
* @param innerClasses A list of inner class names
* @param superClasses A list of parent classes (super and interface) from the class and all inner classes
*/
public record ClassEntry(String name, List<String> innerClasses, List<String> superClasses) {
private static final Logger LOGGER = LoggerFactory.getLogger(ClassEntry.class);

/**
* Copy the class and its inner classes to the target root.
* @param sourceRoot The root of the source jar
Expand All @@ -41,9 +53,9 @@ public record ClassEntry(String parentClass, List<String> innerClasses) {
* @throws IOException If an error occurs while copying the files
*/
public void copyTo(Path sourceRoot, Path targetRoot) throws IOException {
Path targetPath = targetRoot.resolve(parentClass);
Path targetPath = targetRoot.resolve(name);
Files.createDirectories(targetPath.getParent());
Files.copy(sourceRoot.resolve(parentClass), targetPath);
Files.copy(sourceRoot.resolve(name), targetPath);

for (String innerClass : innerClasses) {
Files.copy(sourceRoot.resolve(innerClass), targetRoot.resolve(innerClass));
Expand All @@ -60,7 +72,7 @@ public void copyTo(Path sourceRoot, Path targetRoot) throws IOException {
public String hash(Path root) throws IOException {
StringJoiner joiner = new StringJoiner(",");

joiner.add(Checksum.sha256Hex(Files.readAllBytes(root.resolve(parentClass))));
joiner.add(Checksum.sha256Hex(Files.readAllBytes(root.resolve(name))));

for (String innerClass : innerClasses) {
joiner.add(Checksum.sha256Hex(Files.readAllBytes(root.resolve(innerClass))));
Expand All @@ -69,7 +81,34 @@ public String hash(Path root) throws IOException {
return Checksum.sha256Hex(joiner.toString().getBytes());
}

/**
* Return a hash of the class and its super classes.
*/
public String hashSuperHierarchy(Map<String, String> hashes) throws IOException {
final String selfHash = Objects.requireNonNull(hashes.get(name), "Hash for own class not found");

if (superClasses.isEmpty()) {
return selfHash;
}

StringJoiner joiner = new StringJoiner(",");
joiner.add(selfHash);

for (String superClass : superClasses) {
final String superHash = hashes.get(superClass + ".class");

if (superHash != null) {
joiner.add(superHash);
} else if (!superClass.startsWith("java/")) {
// This will happen if the super class is not part of the input jar
LOGGER.debug("Hash for super class {} of {} not found", superClass, name);
}
}

return Checksum.sha256Hex(joiner.toString().getBytes());
}

public String sourcesFileName() {
return parentClass.replace(".class", ".java");
return name.replace(".class", ".java");
}
}
85 changes: 81 additions & 4 deletions src/main/java/net/fabricmc/loom/decompilers/cache/JarWalker.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@
package net.fabricmc.loom.decompilers.cache;

import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
Expand All @@ -33,11 +37,22 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Stream;

import org.gradle.api.JavaVersion;
import org.objectweb.asm.ClassReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import net.fabricmc.loom.util.CompletableFutureCollector;
import net.fabricmc.loom.util.FileSystemUtil;

public final class JarWalker {
Expand Down Expand Up @@ -88,7 +103,8 @@ public static List<ClassEntry> findClasses(FileSystemUtil.Delegate fs) throws IO

Collections.sort(outerClasses);

List<ClassEntry> classEntries = new ArrayList<>();
final Executor executor = getExecutor();
List<CompletableFuture<ClassEntry>> classEntries = new ArrayList<>();

for (String outerClass : outerClasses) {
List<String> innerClasList = innerClasses.get(outerClass);
Expand All @@ -99,10 +115,71 @@ public static List<ClassEntry> findClasses(FileSystemUtil.Delegate fs) throws IO
Collections.sort(innerClasList);
}

ClassEntry classEntry = new ClassEntry(outerClass, Collections.unmodifiableList(innerClasList));
classEntries.add(classEntry);
classEntries.add(getClassEntry(outerClass, innerClasList, fs, executor));
}

return Collections.unmodifiableList(classEntries);
try {
return classEntries.stream()
.collect(CompletableFutureCollector.allOf())
.get(10, TimeUnit.MINUTES);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new RuntimeException("Failed to get class entries", e);
}
}

private static CompletableFuture<ClassEntry> getClassEntry(String outerClass, List<String> innerClasses, FileSystemUtil.Delegate fs, Executor executor) {
List<CompletableFuture<List<String>>> parentClassesFutures = new ArrayList<>();

// Get the super classes of the outer class and any inner classes
parentClassesFutures.add(CompletableFuture.supplyAsync(() -> getSuperClasses(outerClass, fs), executor));

for (String innerClass : innerClasses) {
parentClassesFutures.add(CompletableFuture.supplyAsync(() -> getSuperClasses(innerClass, fs), executor));
}

return parentClassesFutures.stream()
.collect(CompletableFutureCollector.allOf())
.thenApply(lists -> lists.stream()
.flatMap(List::stream)
.filter(JarWalker::isNotReservedClass)
.distinct()
.toList())
.thenApply(parentClasses -> new ClassEntry(outerClass, innerClasses, parentClasses));
}

private static List<String> getSuperClasses(String classFile, FileSystemUtil.Delegate fs) {
try (InputStream is = Files.newInputStream(fs.getPath(classFile))) {
final ClassReader reader = new ClassReader(is);

List<String> parentClasses = new ArrayList<>();
String superName = reader.getSuperName();

if (superName != null) {
parentClasses.add(superName);
}

Collections.addAll(parentClasses, reader.getInterfaces());
return Collections.unmodifiableList(parentClasses);
} catch (IOException e) {
throw new UncheckedIOException("Failed to read class file: " + classFile, e);
}
}

private static Executor getExecutor() {
if (JavaVersion.current().isCompatibleWith(JavaVersion.VERSION_21)) {
try {
Method m = Executors.class.getMethod("newVirtualThreadPerTaskExecutor");
return (ExecutorService) m.invoke(null);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException("Failed to create virtual thread executor", e);
}
}

return ForkJoinPool.commonPool();
}

// Slight optimization, if we skip over Object
private static boolean isNotReservedClass(String name) {
return !"java/lang/Object".equals(name);
}
}
Loading

0 comments on commit 8110523

Please sign in to comment.