Skip to content

Commit

Permalink
Fix up some issues and switch mappings backend
Browse files Browse the repository at this point in the history
  • Loading branch information
lukebemish committed Oct 18, 2024
1 parent 8fa4a69 commit 3eac523
Show file tree
Hide file tree
Showing 11 changed files with 746 additions and 73 deletions.
5 changes: 4 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,10 @@ dependencies {
implementation 'info.picocli:picocli:4.7.6'
implementation 'org.slf4j:slf4j-simple:2.0.13'
implementation 'org.ow2.asm:asm:9.7'
implementation 'net.neoforged:srgutils:1.0.9'
implementation 'net.fabricmc:mapping-io:0.6.1'
// For reading/writing parchment json files
implementation "org.parchmentmc:feather:1.1.0"
implementation "org.parchmentmc.feather:io-gson:1.1.0"
annotationProcessor 'info.picocli:picocli-codegen:4.7.6'
implementation(project(':')) {
capabilities {
Expand Down
7 changes: 7 additions & 0 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,13 @@ plugins {
dependencyResolutionManagement {
repositories {
mavenCentral()
maven {
name 'ParchmentMC'
url 'https://maven.parchmentmc.org/'
content {
includeGroupAndSubgroups 'org.parchmentmc'
}
}
}

repositoriesMode = RepositoriesMode.FAIL_ON_PROJECT_REPOS
Expand Down
21 changes: 20 additions & 1 deletion src/main/java/dev/lukebemish/taskgraphrunner/runtime/Task.java
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;

Expand Down Expand Up @@ -182,10 +183,24 @@ private static void executeNode(Context context, GraphNode node) {
node.task.taskFuture.complete(null);
} catch (Throwable t) {
node.task.taskFuture.completeExceptionally(t);
node.task.killDependents(node.dependents);
}
});
}

public static class DependencyFailedToExecuteException extends RuntimeException {}

private void killDependents(List<GraphNode> nodes) {
for (var dependent : nodes) {
var task = dependent.task;
if (task.taskFuture.isDone()) {
continue;
}
task.taskFuture.completeExceptionally(new DependencyFailedToExecuteException());
task.killDependents(dependent.dependents);
}
}

static void executeTasks(Context context, Map<String, Map<String, Path>> actions) {
var originalNodes = assemble(context, actions);
for (var node : originalNodes) {
Expand All @@ -197,6 +212,11 @@ static void executeTasks(Context context, Map<String, Map<String, Path>> actions
for (var node : originalNodes) {
try {
node.task.taskFuture.get();
} catch (ExecutionException e) {
if (e.getCause() instanceof DependencyFailedToExecuteException) {
continue;
}
suppressed.add(e.getCause());
} catch (Throwable t) {
suppressed.add(t);
}
Expand Down Expand Up @@ -288,7 +308,6 @@ private void execute(Context context) {
if (allOutputsMatch && upToDate(lastExecuted, context)) {
JsonElement newInputState = recordedValue(context);
if (newInputState.equals(existingInputState)) {
executed.set(true);
LOGGER.debug("Task `" + name + "` is up-to-date.");
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,33 +7,66 @@
import dev.lukebemish.taskgraphrunner.model.WorkItem;
import dev.lukebemish.taskgraphrunner.runtime.Context;
import dev.lukebemish.taskgraphrunner.runtime.TaskInput;
import net.neoforged.srgutils.IMappingBuilder;
import net.neoforged.srgutils.IMappingFile;
import net.fabricmc.mappingio.MappingReader;
import net.fabricmc.mappingio.MappingWriter;
import net.fabricmc.mappingio.format.MappingFormat;
import net.fabricmc.mappingio.tree.MappingTree;
import net.fabricmc.mappingio.tree.MemoryMappingTree;
import net.fabricmc.mappingio.tree.VisitableMappingTree;

import java.io.IOException;
import java.io.UncheckedIOException;
import java.io.Writer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.zip.ZipFile;

public sealed interface MappingsSourceImpl {
IMappingFile makeMappings(Context context);
MappingTree makeMappings(Context context);

List<TaskInput> inputs();

static IMappingFile.Format getFormat(MappingsFormat format) {
interface MappingConsumer extends AutoCloseable {
void accept(MappingTree mappings) throws IOException;

@Override
void close() throws IOException;

static MappingConsumer wrap(MappingWriter writer) {
return new MappingConsumer() {
@Override
public void accept(MappingTree mappings) throws IOException {
mappings.accept(writer);
}

@Override
public void close() throws IOException {
writer.close();
}
};
}
}

static MappingConsumer getWriter(Writer writer, MappingsFormat format) throws IOException {
return switch (format) {
case SRG -> IMappingFile.Format.SRG;
case XSRG -> IMappingFile.Format.XSRG;
case CSRG -> IMappingFile.Format.CSRG;
case TSRG -> IMappingFile.Format.TSRG;
case TSRG2 -> IMappingFile.Format.TSRG2;
case PROGUARD -> IMappingFile.Format.PG;
case TINY1 -> IMappingFile.Format.TINY1;
case TINY2 -> IMappingFile.Format.TINY;
case SRG -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.SRG_FILE));
case XSRG -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.XSRG_FILE));
case CSRG -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.CSRG_FILE));
case TSRG -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.TSRG_FILE));
case TSRG2 -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.TSRG_2_FILE));
case PROGUARD -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.PROGUARD_FILE));
case TINY -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.TINY_FILE));
case TINY2 -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.TINY_2_FILE));
case ENIGMA -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.ENIGMA_FILE));
case JAM -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.JAM_FILE));
case RECAF -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.RECAF_SIMPLE_FILE));
case JOBF -> MappingConsumer.wrap(MappingWriter.create(writer, MappingFormat.JOBF_FILE));
case PARCHMENT -> new ParchmentMappingWriter(writer);
};
}

Expand Down Expand Up @@ -74,16 +107,16 @@ public ChainedFiles(int andIncrement, TaskInput.FileListInput files) {
}

@Override
public IMappingFile makeMappings(Context context) {
return files.paths(context).stream()
public MappingTree makeMappings(Context context) {
return MappingsUtil.chain(files.paths(context).stream()
.map(p -> {
try {
return IMappingFile.load(p.toFile());
return loadMappings(p);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
})
.reduce(IMappingFile::chain).orElse(IMappingBuilder.create("source", "target").build().getMap("source", "target"));
.toList());
}

@Override
Expand All @@ -92,6 +125,36 @@ public List<TaskInput> inputs() {
}
}

private static MappingTree loadMappings(Path path) throws IOException {
var extension = path.getFileName().toString().substring(path.getFileName().toString().lastIndexOf('.') + 1);
if ("zip".equals(extension) || "jar".equals(extension)) {
// It's an archive file; let's open it and find the entry we need, if we can, saving it to a temp file
try (var zip = new ZipFile(path.toFile())) {
var tinyMappings = zip.getEntry("mappings/mappings.tiny");
if (tinyMappings != null) {
var tempFile = Files.createTempFile("crochet-extracted", ".tiny");
try (var input = zip.getInputStream(tinyMappings)) {
Files.copy(input, tempFile, StandardCopyOption.REPLACE_EXISTING);
}
return loadMappings(tempFile);
}
var parchmentJson = zip.getEntry("parchment.json");
if (parchmentJson != null) {
var tempFile = Files.createTempFile("crochet-extracted", ".json");
try (var input = zip.getInputStream(parchmentJson)) {
Files.copy(input, tempFile, StandardCopyOption.REPLACE_EXISTING);
}
return loadMappings(tempFile);
}
}
} else if ("json".equals(extension)) {
return ParchmentMappingsReader.loadMappings(path);
}
VisitableMappingTree tree = new MemoryMappingTree();
MappingReader.read(path, tree);
return tree;
}

final class ChainedSource implements MappingsSourceImpl {
private final TaskInput.ValueInput label;
private final List<MappingsSourceImpl> sources;
Expand All @@ -102,10 +165,10 @@ public ChainedSource(int andIncrement, List<MappingsSourceImpl> sources) {
}

@Override
public IMappingFile makeMappings(Context context) {
return sources.stream()
public MappingTree makeMappings(Context context) {
return MappingsUtil.chain(sources.stream()
.map(s -> s.makeMappings(context))
.reduce(IMappingFile::chain).orElse(IMappingBuilder.create("source", "target").build().getMap("source", "target"));
.toList());
}

@Override
Expand All @@ -128,16 +191,16 @@ public MergedFiles(int andIncrement, TaskInput.FileListInput files) {
}

@Override
public IMappingFile makeMappings(Context context) {
return files.paths(context).stream()
public MappingTree makeMappings(Context context) {
return MappingsUtil.merge(files.paths(context).stream()
.map(p -> {
try {
return IMappingFile.load(p.toFile());
return loadMappings(p);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
})
.reduce(IMappingFile::merge).orElse(IMappingBuilder.create("source", "target").build().getMap("source", "target"));
.toList());
}

@Override
Expand All @@ -156,10 +219,10 @@ public MergedSource(int andIncrement, List<MappingsSourceImpl> sources) {
}

@Override
public IMappingFile makeMappings(Context context) {
return sources.stream()
public MappingTree makeMappings(Context context) {
return MappingsUtil.merge(sources.stream()
.map(s -> s.makeMappings(context))
.reduce(IMappingFile::merge).orElse(IMappingBuilder.create("source", "target").build().getMap("source", "target"));
.toList());
}

@Override
Expand All @@ -182,9 +245,9 @@ public ReverseSource(int andIncrement, MappingsSourceImpl source) {
}

@Override
public IMappingFile makeMappings(Context context) {
IMappingFile mappings = source.makeMappings(context);
return mappings.reverse();
public MappingTree makeMappings(Context context) {
MappingTree mappings = source.makeMappings(context);
return MappingsUtil.reverse(mappings);
}

@Override
Expand All @@ -205,32 +268,10 @@ public FileSource(int andIncrement, TaskInput.HasFileInput input) {
}

@Override
public IMappingFile makeMappings(Context context) {
public MappingTree makeMappings(Context context) {
try {
var path = input.path(context);
var extension = path.getFileName().toString().substring(path.getFileName().toString().lastIndexOf('.') + 1);
if ("zip".equals(extension) || "jar".equals(extension)) {
// It's an archive file; let's open it and find the entry we need, if we can, saving it to a temp file
try (var zip = new ZipFile(path.toFile())) {
var tinyMappings = zip.getEntry("mappings/mappings.tiny");
if (tinyMappings != null) {
var tempFile = Files.createTempFile("crochet-extracted", ".tiny");
try (var input = zip.getInputStream(tinyMappings)) {
Files.copy(input, tempFile);
}
return IMappingFile.load(tempFile.toFile());
}
var parchmentJson = zip.getEntry("parchment.json");
if (parchmentJson != null) {
var tempFile = Files.createTempFile("crochet-extracted", ".json");
try (var input = zip.getInputStream(parchmentJson)) {
Files.copy(input, tempFile);
}
return IMappingFile.load(tempFile.toFile());
}
}
}
return IMappingFile.load(input.path(context).toFile());
return loadMappings(path);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
Expand Down
Loading

0 comments on commit 3eac523

Please sign in to comment.