Skip to content

Commit

Permalink
de-synched things, added progress, fixed some edge cases. Export free…
Browse files Browse the repository at this point in the history
…zes at item #7224, no idea why
  • Loading branch information
Hermanoid committed Jan 14, 2024
1 parent 271be98 commit c7ced80
Show file tree
Hide file tree
Showing 3 changed files with 83 additions and 28 deletions.
74 changes: 51 additions & 23 deletions src/main/java/com/hermanoid/nerd/RecipeDumper.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import codechicken.nei.ItemList;
import codechicken.nei.NEIClientConfig;
import codechicken.nei.NEIClientUtils;
import codechicken.nei.PositionedStack;
import codechicken.nei.config.DataDumper;
import codechicken.nei.recipe.GuiCraftingRecipe;
Expand All @@ -20,11 +21,14 @@
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.RegistryNamespaced;
import org.apache.commons.lang3.NotImplementedException;
import org.jetbrains.annotations.NotNull;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.stream.Stream;

// This dumper will likely be pretty heavy when run on a large modpack
Expand All @@ -39,19 +43,20 @@ public RecipeDumper(String name) {
}

public String version = "1.0";
long progressInterval = 2500L;

public int totalQueries = -1;
public int dumpedQueries = -1;
private boolean dumpActive = false;
private Timer timer = new Timer();

private Multimap<String, IRecipeInfoExtractor> recipeInfoExtractors = HashMultimap.create();
private final Multimap<String, IRecipeInfoExtractor> recipeInfoExtractors = HashMultimap.create();

public void registerRecipeInfoExtractor(IRecipeInfoExtractor extractor){
for(String id : extractor.getCompatibleHandlers())
recipeInfoExtractors.put(id, extractor);
}



@Override
public String[] header() {
return new String[] { "Name", "ID", "NBT", "Handler Name", "Handler Recipe Index", "Ingredients", "Other Items",
Expand All @@ -76,7 +81,6 @@ private JsonObject stackToDetailedJson(ItemStack stack) {
private JsonArray stacksToJsonArray(List<PositionedStack> stacks) {
JsonArray arr = new JsonArray();
for (PositionedStack stack : stacks) {
Item item = stack.item.getItem();
JsonObject itemObj = stackToDetailedJson(stack.item);
arr.add(itemObj);
}
Expand Down Expand Up @@ -119,9 +123,6 @@ private JsonObject extractJsonRecipeData(QueryResult queryResult){
for (int recipeIndex = 0; recipeIndex < handler.numRecipes(); recipeIndex++) {
JsonObject recipeDump = new JsonObject();
// Collapse Ingredient Lists into JSON format to keep CSV file sizes from going *completely* crazy
// List<> ingredients = handler.getIngredientStacks(recipeIndex).stream().map(
// pos_stack -> pos_stack.item.getItem()
// ).collect(Collectors.toList());
recipeDump.add("ingredients", stacksToJsonArray(handler.getIngredientStacks(recipeIndex)));
recipeDump.add("other_stacks", stacksToJsonArray(handler.getOtherStacks(recipeIndex)));
if (handler.getResultStack(recipeIndex) != null) {
Expand All @@ -146,7 +147,7 @@ public Stream<JsonObject> getQueryDumps(List<ItemStack> items) {
// Since the bulk of work here is the query, which is already parallel,
// I'm not sure how much performance gain (if any) this would cause.
return items.stream()
.limit(500)
// .limit(500)
.map(this::performQuery)
// .parallel()
.map(this::extractJsonRecipeData);
Expand All @@ -162,11 +163,6 @@ public String renderName() {
@Override
public String getFileExtension() {
return ".json";
// return switch (getMode()) {
// case 0 -> ".csv";
// case 1 -> ".json";
// default -> null;
// };
}

@Override
Expand All @@ -191,8 +187,7 @@ public void dumpTo(File file) throws IOException {
dumpJson(file);
}

// If you don't wanna hold all this crap in memory at once, you're going to have to work for it.
public void dumpJson(File file) throws IOException {
private void doDumpJson(File file){
final String[] header = header();
final FileWriter writer;
final JsonWriter jsonWriter;
Expand All @@ -211,9 +206,6 @@ public void dumpJson(File file) throws IOException {

jsonWriter.name("queries").beginArray();
Object lock = new Object();

// AtomicReference<IOException> error = new AtomicReference<>(null);

getQueryDumps(items).forEach(obj ->
{
synchronized (lock){
Expand All @@ -222,11 +214,6 @@ public void dumpJson(File file) throws IOException {
}
});

// // Super cool error handling.
// if (error.get() != null){
// throw error.get();
// }

jsonWriter.endArray();

jsonWriter.endObject();
Expand All @@ -239,6 +226,47 @@ public void dumpJson(File file) throws IOException {
dumpedQueries = -1;
}

// If you don't wanna hold all this crap in memory at once, you're going to have to work for it.
public void dumpJson(File file) throws IOException {
if(dumpActive){
NEIClientUtils.printChatMessage(new ChatComponentTranslation(
"nei.options.tools.dump.recipes.duplicate"
));
return;
}
dumpActive = true;
TimerTask progressTask = getProgressTask();
Thread workerThread = new Thread(()-> {
try{
doDumpJson(file);
}finally{
dumpActive = false;
progressTask.cancel();
}
NEIClientUtils.printChatMessage(new ChatComponentTranslation(
"nei.options.tools.dump.recipes.complete"
));
});
workerThread.start();
}

@NotNull
private TimerTask getProgressTask() {
TimerTask progressTask = new TimerTask() {
@Override
public void run() {
NEIClientUtils.printChatMessage(new ChatComponentTranslation(
"nei.options.tools.dump.recipes.progress",
dumpedQueries,
totalQueries,
(float)dumpedQueries/totalQueries*100
));
}
};
timer.schedule(progressTask, 0, progressInterval);
return progressTask;
}

@Override
public int modeCount() {
return 1; // Only JSON
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,18 @@
import codechicken.nei.recipe.ICraftingHandler;
import codechicken.nei.util.NBTJson;
import com.google.gson.*;
import com.google.gson.reflect.TypeToken;
import gregtech.api.util.GT_Recipe;
import gregtech.common.fluid.GT_Fluid;
import gregtech.nei.GT_NEI_DefaultHandler;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidStack;

import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

public class GTDefaultRecipeInfoExtractor implements IRecipeInfoExtractor {
Expand All @@ -35,9 +40,12 @@ private static class GTRecipeExclusionStrategy implements ExclusionStrategy {
// The automatic serializer doesn't like the rarity enum, I dunno why
"rarity",
// I don't think a fluid's corresponding block is useful, and it causes breaky recursion
"block"
"block",
// Some recipes are GT_Recipe_WithAlt, which have more evil ItemStacks we can't serialize.
"mOreDictAlt"

));
List<Type> badTypes = Arrays.asList(GT_NEI_DefaultHandler.class, ItemStack.class, FluidStack.class);
@Override
public boolean shouldSkipField(FieldAttributes f) {

Expand All @@ -46,11 +54,12 @@ public boolean shouldSkipField(FieldAttributes f) {

@Override
public boolean shouldSkipClass(Class<?> clazz) {
return clazz.equals(GT_NEI_DefaultHandler.class); // Block recursion
return badTypes.contains(clazz);
}
}

private class FluidStackSerializer implements JsonSerializer<FluidStack>{
private static final Type fluidType = new TypeToken<Fluid>(){}.getType();
@Override
public JsonElement serialize(FluidStack src, Type typeOfSrc, JsonSerializationContext context) {
// Fluids have some goofy unserializable things, similar to ItemStacks
Expand All @@ -59,12 +68,22 @@ public JsonElement serialize(FluidStack src, Type typeOfSrc, JsonSerializationCo
if(src.tag != null){
root.add("tag", NBTJson.toJsonObject(src.tag));
}
JsonObject fluid = (JsonObject) gson.toJsonTree(src.getFluid());
// Some fluids (like water) are defined using anonymous types
// I think that specifying the type for GT_Fluids would throw away information,
// but for non-GT_Fluids, we'll need to un-anonymize this beeswax.
JsonObject fluid = null;
if(src.getFluid().getClass().equals(GT_Fluid.class)){
fluid = (JsonObject) gson.toJsonTree(src.getFluid());
}else{
fluid = (JsonObject) gson.toJsonTree(src.getFluid(), fluidType);
}
// Manually serialize rarity bc wierdness
fluid.addProperty("rarity", src.getFluid().getRarity().toString());
// Slap on some info that's only available via method calls
fluid.addProperty("id", src.getFluidID());
root.add("fluid", fluid);


return root;
}
}
Expand All @@ -85,7 +104,12 @@ public GTDefaultRecipeInfoExtractor(){
public JsonElement extractInfo(ICraftingHandler handler, int recipeIndex) {
GT_NEI_DefaultHandler gthandler = (GT_NEI_DefaultHandler) handler;
GT_Recipe recipe = gthandler.getCache().get(recipeIndex).mRecipe;
return gson.toJsonTree(recipe);
try{
return gson.toJsonTree(recipe);
}catch(Exception e){
System.out.println("O poop");
return null;
}
}

@Override
Expand Down
5 changes: 4 additions & 1 deletion src/main/resources/assets/nei/lang/en_US.lang
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
nei.options.tools.dump.recipes=NEI Recipes
nei.options.tools.dump.recipes.dumped=recipes were dumped to %s
nei.options.tools.dump.recipes.dumped=Recipe dump started to %s
nei.options.tools.dump.recipes.mode.0=CSV
nei.options.tools.dump.recipes.mode.1=Json
nei.options.tools.dump.recipes.progress=Recipe Dump Progress %d/%d (%.2f%%)
nei.options.tools.dump.recipes.complete=Recipe Dump Complete!
nei.options.tools.dump.recipes.duplicate=Recipe Dump is already active, ignoring request

0 comments on commit c7ced80

Please sign in to comment.