Compare commits

...

6 Commits

Author SHA1 Message Date
akastijn 42c9530348 Refactor reload method and remove redundant debug logs 2025-07-15 21:12:27 +02:00
akastijn ab86d77069 Automatically load particles that are added to or updated in the particles folder 2025-07-14 21:53:31 +02:00
Teriuihi 02c4f818a0 Adjust particle loading to work with webui coordinates 2025-06-23 00:45:07 +02:00
Teriuihi ed91cf6810 Add Jenkins pipeline for building, archiving, and sending Discord notifications 2025-06-22 23:38:21 +02:00
Teriuihi 8aa22a3e7a Add support for color gradients and sizes in particle configurations
Enhanced `ParticleConfig` to handle color gradients and sizes for particles with `DustOptions` and `DustTransition`. Updated `ParticleInfo` to include `colorGradientEnd` and `size` properties. Refactored particle data handling for improved flexibility.
2025-06-22 22:07:10 +02:00
Teriuihi 481cb007bf Correct directory name 2025-06-22 21:47:28 +02:00
12 changed files with 397 additions and 56 deletions

20
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,20 @@
pipeline {
agent any
stages {
stage('Gradle') {
steps {
sh 'bash gradlew shadowJar'
}
}
stage('Archive') {
steps {
archiveArtifacts artifacts: 'build/libs/', followSymlinks: false
}
}
stage('discord') {
steps {
discordSend description: "Build: ${BUILD_NUMBER}", showChangeset: true, result: currentBuild.currentResult, title: currentBuild.fullProjectName, webhookURL: env.discordwebhook
}
}
}
}

View File

@ -7,16 +7,23 @@ import com.alttd.config.ParticleConfig;
import com.alttd.database.Database; import com.alttd.database.Database;
import com.alttd.listeners.*; import com.alttd.listeners.*;
import com.alttd.objects.APartType; import com.alttd.objects.APartType;
import com.alttd.storage.AutoReload;
import com.alttd.util.Logger; import com.alttd.util.Logger;
import lombok.Getter; import lombok.Getter;
import org.bukkit.plugin.PluginManager; import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin; import org.bukkit.plugin.java.JavaPlugin;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
public class AltitudeParticles extends JavaPlugin { public class AltitudeParticles extends JavaPlugin {
@Getter @Getter
public static AltitudeParticles instance; public static AltitudeParticles instance;
private static AutoReload autoReload = null;
@Override @Override
public void onLoad() { public void onLoad() {
instance = this; instance = this;
@ -46,9 +53,30 @@ public class AltitudeParticles extends JavaPlugin {
} }
public void reload() { public void reload() {
Logger.info("Reloading AltitudeParticles...");
Config.reload(); Config.reload();
DatabaseConfig.reload(); DatabaseConfig.reload();
ParticleConfig.reload(); ParticleConfig.reload();
startAutoReload();
}
private static void startAutoReload() {
Path path = Path.of(Config.AUTO_RELOAD_PATH);
File file = path.toFile();
if (file.exists() && file.isDirectory()) {
try {
if (autoReload != null) {
autoReload.stop();
}
autoReload = new AutoReload(path);
autoReload.startWatching();
} catch (IOException e) {
Logger.severe("Failed to start AutoReload at path %", Config.AUTO_RELOAD_PATH);
Logger.error("Failed to start AutoReload", e);
}
} else {
Logger.severe("Failed to start AutoReload at path %", Config.AUTO_RELOAD_PATH);
}
} }
} }

View File

@ -98,4 +98,9 @@ public final class Config extends AbstractConfig {
CLICK_BLOCK_COOL_DOWN = config.getInt("cool_down.click-block", CLICK_BLOCK_COOL_DOWN); CLICK_BLOCK_COOL_DOWN = config.getInt("cool_down.click-block", CLICK_BLOCK_COOL_DOWN);
TELEPORT_ARRIVE_COOL_DOWN = config.getInt("cool_down.teleport-arrive", TELEPORT_ARRIVE_COOL_DOWN); TELEPORT_ARRIVE_COOL_DOWN = config.getInt("cool_down.teleport-arrive", TELEPORT_ARRIVE_COOL_DOWN);
} }
public static String AUTO_RELOAD_PATH = "/mnt/configs/AltitudeParticles/particles";
private static void loadAutoReload() {
AUTO_RELOAD_PATH = config.getString("auto-reload.path", AUTO_RELOAD_PATH);
}
} }

View File

@ -8,26 +8,32 @@ import com.alttd.objects.ParticleSet;
import com.alttd.storage.ParticleStorage; import com.alttd.storage.ParticleStorage;
import com.alttd.util.Logger; import com.alttd.util.Logger;
import com.destroystokyo.paper.ParticleBuilder; import com.destroystokyo.paper.ParticleBuilder;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.bukkit.Color; import org.bukkit.Color;
import org.bukkit.Material; import org.bukkit.Material;
import org.bukkit.Particle; import org.bukkit.Particle;
import org.bukkit.block.data.BlockData; import org.bukkit.block.data.BlockData;
import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.ItemStack;
import org.jetbrains.annotations.NotNull;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.nio.file.*;
import java.util.HexFormat; import java.nio.file.attribute.BasicFileAttributes;
import java.util.List; import java.util.*;
import java.util.Map;
public class ParticleConfig { public class ParticleConfig {
private static final int MAX_DEPTH = 1;
private static final File particlesDir = new File(File.separator + "mnt" + File.separator + "configs" private static final File particlesDir = new File(File.separator + "mnt" + File.separator + "configs"
+ File.separator + "AltitudeParticles" + File.separator + "particles"); + File.separator + "AltitudeParticles" + File.separator + "particles");
private static ParticleConfig instance = null; private static ParticleConfig instance = null;
private static final ObjectMapper objectMapper = new ObjectMapper(); private static final ObjectMapper objectMapper = new ObjectMapper();
static {
objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
objectMapper.disable(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE);
}
private static ParticleConfig getInstance() { private static ParticleConfig getInstance() {
if (instance == null) if (instance == null)
@ -37,28 +43,92 @@ public class ParticleConfig {
/** /**
* Finds all files in particles directory that are valid .json files * Finds all files in particles directory that are valid .json files
* Only searches one level deep into subdirectories
*
* @return all files found * @return all files found
*/ */
private List<File> getJsonFiles() { private List<File> getJsonFiles() {
List<File> files = new ArrayList<>(); List<File> files = new ArrayList<>();
// Ensure particles directory exists
if (!ensureParticlesDirectoryExists()) {
return files;
}
try {
Files.walkFileTree(particlesDir.toPath(), getJsonFileVistor(files));
} catch (IOException e) {
Logger.warning("Error while traversing directory: " + e.getMessage());
}
return files;
}
private FileVisitor<? super @NotNull Path> getJsonFileVistor(List<File> files) {
return new SimpleFileVisitor<>() {
private int depth = 0;
@Override
public @NotNull FileVisitResult preVisitDirectory(@NotNull Path dir, @NotNull BasicFileAttributes attrs) {
if (depth > ParticleConfig.MAX_DEPTH) {
return FileVisitResult.SKIP_SUBTREE;
}
depth++;
return FileVisitResult.CONTINUE;
}
@Override
public @NotNull FileVisitResult visitFile(@NotNull Path file, @NotNull BasicFileAttributes attrs) {
File physicalFile = file.toFile();
if (isValidJsonFile(physicalFile)) {
files.add(physicalFile);
}
return FileVisitResult.CONTINUE;
}
@Override
public @NotNull FileVisitResult postVisitDirectory(@NotNull Path dir, IOException exc) {
depth--;
return FileVisitResult.CONTINUE;
}
};
}
/**
* Ensures that the particles directory exists and is a directory
*
* @return true if directory exists or was created successfully, false otherwise
*/
private boolean ensureParticlesDirectoryExists() {
if (!particlesDir.exists()) { if (!particlesDir.exists()) {
if (!particlesDir.mkdir()) if (!particlesDir.mkdirs()) {
Logger.warning("Unable to create particles directory"); Logger.warning("Unable to create particles directory");
return files; return false;
} }
return true;
}
if (!particlesDir.isDirectory()) { if (!particlesDir.isDirectory()) {
Logger.warning("Particles directory doesn't exist (it's a file??)"); Logger.warning("Particles path exists but is not a directory: " + particlesDir.getAbsolutePath());
return files; return false;
} }
File[] validFiles = particlesDir.listFiles(file -> file.isFile() && file.canRead() && file.getName().endsWith(".json"));
if (validFiles == null) return true;
return files; }
files.addAll(List.of(validFiles));
return files; /**
* Checks if a file is a valid JSON file
*
* @param file the file to check
* @return true if the file is a valid JSON file
*/
private boolean isValidJsonFile(File file) {
return file.isFile() && file.canRead() && file.getName().endsWith(".json");
} }
/** /**
* Converts a ParticleData object to a ParticleSet * Converts a ParticleData object to a ParticleSet
*
* @param particleData The ParticleData object to convert * @param particleData The ParticleData object to convert
* @return A ParticleSet created from the ParticleData * @return A ParticleSet created from the ParticleData
*/ */
@ -78,30 +148,41 @@ public class ParticleConfig {
double z = particleInfo.getZ(); double z = particleInfo.getZ();
ParticleBuilder particleBuilder = new ParticleBuilder(particleType); ParticleBuilder particleBuilder = new ParticleBuilder(particleType);
Class<?> dataType = particleType.getDataType();
// Handle different particle data types // Handle different particle data types
if (particleType.getDataType().equals(Particle.DustOptions.class) && particleInfo.getColor() != null) { if (dataType.equals(Particle.DustOptions.class)) {
int rgb = HexFormat.fromHexDigits(particleInfo.getColor()); if (particleInfo.getColor() != null) {
particleBuilder.data(new Particle.DustOptions(Color.fromRGB(rgb), 1)); particleBuilder.color(getColor(particleInfo.getColor()),
particleInfo.getSize());
} }
// else if (particleType.getDataType().equals(MaterialData.class)) { } else if (dataType.equals(Particle.DustTransition.class)) {
// //TODO implement if (particleInfo.getColorGradientEnd() != null) {
// } particleBuilder.colorTransition(getColor(particleInfo.getColor()),
else if (particleType.getDataType().equals(BlockData.class)) { getColor(particleInfo.getColorGradientEnd()),
particleInfo.getSize());
}
}
else if (dataType.equals(Color.class)) {
particleBuilder.color(getColor(particleInfo.getColor()));
} else if (dataType.equals(BlockData.class)) {
particleBuilder.data(Material.STONE.createBlockData());
//TODO implement //TODO implement
} else if (particleType.getDataType().equals(Integer.class)) { } else if (dataType.equals(Integer.class)) {
particleBuilder.data(1);
//TODO implement //TODO implement
} else if (particleType.getDataType().equals(Float.class)) { } else if (dataType.equals(Float.class)) {
particleBuilder.data(1f);
//TODO implement //TODO implement
} else if (particleType.getDataType().equals(Particle.DustTransition.class)) { } else if (dataType.equals(ItemStack.class)) {
//TODO implement particleBuilder.data(new ItemStack(Material.STONE));
} else if (particleType.getDataType().equals(ItemStack.class)) {
//TODO implement //TODO implement
} else if (particleInfo.getExtra() != null) { } else if (particleInfo.getExtra() != null) {
particleBuilder.extra(particleInfo.getExtra()); particleBuilder.extra(particleInfo.getExtra());
} }
aParticleList.add(new AParticle(x, y, z, randomOffset, particleBuilder)); //Add 0.2 to adjust for the player model being 1.6 blocks high
aParticleList.add(new AParticle(x, y + 0.2, z, randomOffset, particleBuilder));
} }
loadedFrames.add(new Frame(aParticleList)); loadedFrames.add(new Frame(aParticleList));
@ -125,11 +206,26 @@ public class ParticleConfig {
); );
} }
private Color getColor(String hexColor) {
int color = HexFormat.fromHexDigits(hexColor);
if (hexColor.length() == 6) {
return Color.fromARGB(color);
} else {
return Color.fromRGB(color);
}
}
public static void reload() { public static void reload() {
ParticleStorage.clear(); ParticleStorage.clear();
ParticleConfig instance = getInstance(); instance = getInstance();
for (File file : instance.getJsonFiles()) { for (File file : instance.getJsonFiles()) {
loadParticleFromFile(file);
}
}
public static void loadParticleFromFile(File file) {
instance = getInstance();
try { try {
ParticleData particleData = objectMapper.readValue(file, ParticleData.class); ParticleData particleData = objectMapper.readValue(file, ParticleData.class);
@ -143,4 +239,3 @@ public class ParticleConfig {
} }
} }
} }
}

View File

@ -1,4 +1,4 @@
package com.alttd.frameSpawners; package com.alttd.frame_spawners;
import com.alttd.AltitudeParticles; import com.alttd.AltitudeParticles;
import com.alttd.config.Config; import com.alttd.config.Config;

View File

@ -1,4 +1,4 @@
package com.alttd.frameSpawners; package com.alttd.frame_spawners;
import com.alttd.AltitudeParticles; import com.alttd.AltitudeParticles;
import com.alttd.config.Config; import com.alttd.config.Config;

View File

@ -36,6 +36,14 @@ import java.util.List;
@Setter @Setter
@Getter @Getter
public class ParticleData { public class ParticleData {
// TODO add optional property for a list of users that can use the particle
// If that list is present the particle should be loaded as a dev particle
// Dev particles should disable all others while in use and all be grouped together
// (since the dev should know what each particle is and does)
// Seeing dev particles should require a permission
@JsonProperty("user_list")
private List<String> userList;
@JsonProperty("particle_name") @JsonProperty("particle_name")
private String particleName; private String particleName;

View File

@ -32,6 +32,12 @@ public class ParticleInfo {
// For DustOptions // For DustOptions
private String color; private String color;
@JsonProperty("color_gradient_end")
private String colorGradientEnd;
// For DustOptions
@JsonProperty(value = "size", defaultValue = "1")
private int size;
// For other particle types // For other particle types
private Double extra; private Double extra;

View File

@ -2,8 +2,8 @@ package com.alttd.objects;
import com.alttd.AltitudeParticles; import com.alttd.AltitudeParticles;
import com.alttd.config.Config; import com.alttd.config.Config;
import com.alttd.frameSpawners.FrameSpawnerLocation; import com.alttd.frame_spawners.FrameSpawnerLocation;
import com.alttd.frameSpawners.FrameSpawnerPlayer; import com.alttd.frame_spawners.FrameSpawnerPlayer;
import com.alttd.storage.PlayerSettings; import com.alttd.storage.PlayerSettings;
import com.alttd.util.Logger; import com.alttd.util.Logger;
import de.myzelyam.api.vanish.VanishAPI; import de.myzelyam.api.vanish.VanishAPI;

View File

@ -0,0 +1,171 @@
package com.alttd.storage;
import com.alttd.config.ParticleConfig;
import lombok.extern.slf4j.Slf4j;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Map;
@Slf4j
public class AutoReload {
private final WatchService watchService;
private final Map<WatchKey, Path> keys;
private final Path rootDirectory;
private volatile boolean running = true;
public AutoReload(Path directory) throws IOException {
this.watchService = FileSystems.getDefault().newWatchService();
this.keys = new HashMap<>();
this.rootDirectory = directory;
register(directory);
registerAll(directory);
}
private void registerAll(Path start) throws IOException {
Files.walkFileTree(start, new SimpleFileVisitor<>() {
@Override
public @NotNull FileVisitResult preVisitDirectory(@NotNull Path path, @NotNull BasicFileAttributes attrs) throws IOException {
if (path.toFile().isDirectory()) {
register(path);
}
return FileVisitResult.CONTINUE;
}
});
}
private void register(@NotNull Path dir) throws IOException {
WatchKey key = dir.register(watchService,
StandardWatchEventKinds.ENTRY_CREATE,
StandardWatchEventKinds.ENTRY_DELETE,
StandardWatchEventKinds.ENTRY_MODIFY);
keys.put(key, dir);
}
public void startWatching() {
log.info("Starting watch thread.");
Thread watchThread = new Thread(() -> {
log.info("Watch thread started.");
while (running) {
log.info("Watch thread loop start");
WatchKey key;
try {
key = watchService.take();
log.info("Watch thread loop key {}", key.toString());
} catch (InterruptedException e) {
log.error("Interrupted while waiting for key", e);
return;
}
if (!running) {
log.info("Exiting watch thread.");
return;
}
Path dir = keys.get(key);
if (dir == null) {
log.warn("Detected unknown key: {}. Ignoring.", key.toString());
continue;
}
detectChanges(key, dir);
if (!key.reset()) {
keys.remove(key);
if (keys.isEmpty()) {
log.info("No longer watching any directories. Exiting.");
break;
}
}
}
});
watchThread.start();
}
private void detectChanges(@NotNull WatchKey key, Path dir) {
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind<?> kind = event.kind();
if (kind == StandardWatchEventKinds.OVERFLOW) {
log.warn("Detected overflow event. Ignoring.");
continue;
}
Path child = resolveEventPath(event, dir);
boolean isDirectory = Files.isDirectory(child);
if (shouldIgnoreDirectoryEvent(isDirectory, dir)) {
continue;
}
if (kind == StandardWatchEventKinds.ENTRY_CREATE && isDirectory) {
handleNewDirectoryCreation(child);
continue;
}
if (isDirectory) {
continue;
}
handleFileEvent(kind, child);
}
}
private @NotNull Path resolveEventPath(@NotNull WatchEvent<?> event, Path dir) {
Object context = event.context();
if (!(context instanceof Path path)) {
throw new IllegalArgumentException("Expected event context to be a Path, but got: " + context);
}
return dir.resolve(path);
}
private boolean shouldIgnoreDirectoryEvent(boolean isDirectory, Path dir) {
if (isDirectory && !dir.equals(rootDirectory)) {
log.warn("Detected directory {} outside of root directory. Ignoring.", dir);
return true;
}
return false;
}
private void handleNewDirectoryCreation(Path child) {
try {
log.info("Registering new directory: {}", child);
registerAll(child);
} catch (IOException e) {
log.error("Failed to register directory: {}", child);
}
}
private void handleFileEvent(WatchEvent.Kind<?> kind, Path child) {
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
log.debug("Detected file modification: {}", child);
reloadFile(child);
} else if (kind == StandardWatchEventKinds.ENTRY_DELETE) {
log.debug("Detected file deletion: {}", child);
handleFileDeletion();
} else if (kind == StandardWatchEventKinds.ENTRY_CREATE) {
log.debug("Detected file creation: {}", child);
reloadFile(child);
} else {
log.warn("Unknown event kind: {}", kind);
}
}
private void reloadFile(Path child) {
ParticleConfig.loadParticleFromFile(child.toFile());
}
private void handleFileDeletion() {
log.info("Detected file deletion. Reloading all particles.");
ParticleConfig.reload();
}
public void stop() {
running = false;
}
}

View File

@ -2,18 +2,26 @@ package com.alttd.storage;
import com.alttd.objects.APartType; import com.alttd.objects.APartType;
import com.alttd.objects.ParticleSet; import com.alttd.objects.ParticleSet;
import com.alttd.util.Logger;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Optional;
public class ParticleStorage { public class ParticleStorage {
private static final HashMap<APartType, List<ParticleSet>> particles = new HashMap<>(); private static final HashMap<APartType, List<ParticleSet>> particles = new HashMap<>();
public static void addParticleSet(APartType aPartType, ParticleSet particleSet) { public static void addParticleSet(APartType aPartType, ParticleSet particleSet) {
List<ParticleSet> particleSets = particles.getOrDefault(aPartType, new ArrayList<>()); List<ParticleSet> particleSets = particles.getOrDefault(aPartType, new ArrayList<>());
if (particleSets.contains(particleSet)) Optional<ParticleSet> existingParticleSet = particleSets.stream()
.filter(p -> p.getParticleId().equalsIgnoreCase(particleSet.getParticleId()))
.findAny();
if (existingParticleSet.isPresent()) {
Logger.warning("Overwriting particle set %", particleSet.getParticleId());
particleSets.remove(existingParticleSet.get());
return; return;
}
particleSets.add(particleSet); particleSets.add(particleSet);
particles.put(aPartType, particleSets); particles.put(aPartType, particleSets);
} }