Use the plugin logger instead of the global logger

This commit is contained in:
Brokkonaut
2018-07-31 14:16:34 +02:00
parent 76500f2e51
commit a1d622ebbd
5 changed files with 93 additions and 100 deletions

View File

@ -36,8 +36,6 @@ import static de.diddiz.util.BukkitUtils.giveTool;
import static de.diddiz.util.BukkitUtils.saveSpawnHeight;
import static de.diddiz.util.Utils.isInt;
import static de.diddiz.util.Utils.listing;
import static org.bukkit.Bukkit.getLogger;
import static org.bukkit.Bukkit.getServer;
public class CommandsHandler implements CommandExecutor {
private final LogBlock logblock;
@ -114,7 +112,7 @@ public class CommandsHandler implements CommandExecutor {
if (logblock.hasPermission(sender, "logblock.lookup")) {
World world = null;
if (args.length > 1) {
world = getServer().getWorld(args[1]);
world = logblock.getServer().getWorld(args[1]);
} else if (sender instanceof Player) {
world = ((Player) sender).getWorld();
}
@ -356,7 +354,7 @@ public class CommandsHandler implements CommandExecutor {
sender.sendMessage(ChatColor.RED + "Not enough arguments given");
} catch (final Exception ex) {
sender.sendMessage(ChatColor.RED + "Error, check server.log");
getLogger().log(Level.WARNING, "Exception in commands handler: ", ex);
logblock.getLogger().log(Level.WARNING, "Exception in commands handler: ", ex);
}
return true;
}
@ -427,7 +425,7 @@ public class CommandsHandler implements CommandExecutor {
rs.close();
}
} catch (final SQLException ex) {
getLogger().log(Level.SEVERE, "[CommandsHandler] SQL exception on close", ex);
logblock.getLogger().log(Level.SEVERE, "[CommandsHandler] SQL exception on close", ex);
}
}
}
@ -492,7 +490,7 @@ public class CommandsHandler implements CommandExecutor {
}
} catch (final Exception ex) {
sender.sendMessage(ChatColor.RED + "Exception, check error log");
getLogger().log(Level.SEVERE, "[Lookup] " + params.getQuery() + ": ", ex);
logblock.getLogger().log(Level.SEVERE, "[Lookup] " + params.getQuery() + ": ", ex);
} finally {
close();
}
@ -548,7 +546,7 @@ public class CommandsHandler implements CommandExecutor {
sender.sendMessage(ChatColor.GREEN + "Wrote " + counter + " lines.");
} catch (final Exception ex) {
sender.sendMessage(ChatColor.RED + "Exception, check error log");
getLogger().log(Level.SEVERE, "[WriteLogFile] " + params.getQuery() + " (file was " + file.getAbsolutePath() + "): ", ex);
logblock.getLogger().log(Level.SEVERE, "[WriteLogFile] " + params.getQuery() + " (file was " + file.getAbsolutePath() + "): ", ex);
} finally {
close();
}
@ -621,7 +619,7 @@ public class CommandsHandler implements CommandExecutor {
}
} catch (final Exception ex) {
sender.sendMessage(ChatColor.RED + "Exception, check error log");
getLogger().log(Level.SEVERE, "[Teleport] " + params.getQuery() + ": ", ex);
logblock.getLogger().log(Level.SEVERE, "[Teleport] " + params.getQuery() + ": ", ex);
} finally {
close();
}
@ -699,7 +697,7 @@ public class CommandsHandler implements CommandExecutor {
}
} catch (final Exception ex) {
sender.sendMessage(ChatColor.RED + "Exception, check error log");
getLogger().log(Level.SEVERE, "[Rollback] " + params.getQuery() + ": ", ex);
logblock.getLogger().log(Level.SEVERE, "[Rollback] " + params.getQuery() + ": ", ex);
} finally {
close();
}
@ -760,7 +758,7 @@ public class CommandsHandler implements CommandExecutor {
sender.sendMessage(ChatColor.GREEN + "Redo finished successfully (" + editor.getElapsedTime() + " ms, " + editor.getSuccesses() + "/" + changes + " blocks" + (editor.getErrors() > 0 ? ", " + ChatColor.RED + editor.getErrors() + " errors" + ChatColor.GREEN : "") + (editor.getBlacklistCollisions() > 0 ? ", " + editor.getBlacklistCollisions() + " blacklist collisions" : "") + ")");
} catch (final Exception ex) {
sender.sendMessage(ChatColor.RED + "Exception, check error log");
getLogger().log(Level.SEVERE, "[Redo] " + params.getQuery() + ": ", ex);
logblock.getLogger().log(Level.SEVERE, "[Redo] " + params.getQuery() + ": ", ex);
} finally {
close();
}
@ -808,7 +806,7 @@ public class CommandsHandler implements CommandExecutor {
state.execute("SELECT * FROM `" + table + "-blocks` " + join + params.getWhere() + "INTO OUTFILE '" + new File(dumpFolder, time + " " + table + " " + params.getTitle().replace(":", ".") + ".csv").getAbsolutePath().replace("\\", "\\\\") + "' FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\n'");
} catch (final SQLException ex) {
sender.sendMessage(ChatColor.RED + "Error while dumping log. Make sure your MySQL user has access to the LogBlock folder, or disable clearlog.dumpDeletedLog");
getLogger().log(Level.SEVERE, "[ClearLog] Exception while dumping log: ", ex);
logblock.getLogger().log(Level.SEVERE, "[ClearLog] Exception while dumping log: ", ex);
return;
}
}
@ -835,18 +833,18 @@ public class CommandsHandler implements CommandExecutor {
}
} catch (final Exception ex) {
sender.sendMessage(ChatColor.RED + "Exception, check error log");
getLogger().log(Level.SEVERE, "[ClearLog] Exception: ", ex);
logblock.getLogger().log(Level.SEVERE, "[ClearLog] Exception: ", ex);
} finally {
close();
}
}
}
private static ResultSet executeQuery(Statement state, String query) throws SQLException {
private ResultSet executeQuery(Statement state, String query) throws SQLException {
if (Config.debug) {
long startTime = System.currentTimeMillis();
ResultSet rs = state.executeQuery(query);
getLogger().log(Level.INFO, "[LogBlock Debug] Time Taken: " + (System.currentTimeMillis() - startTime) + " milliseconds. Query: " + query);
logblock.getLogger().log(Level.INFO, "[LogBlock Debug] Time Taken: " + (System.currentTimeMillis() - startTime) + " milliseconds. Query: " + query);
return rs;
} else {
return state.executeQuery(query);

View File

@ -33,7 +33,6 @@ import java.util.logging.Level;
import static de.diddiz.LogBlock.config.Config.*;
import static de.diddiz.util.Utils.mysqlTextEscape;
import static de.diddiz.util.BukkitUtils.*;
import static org.bukkit.Bukkit.getLogger;
public class Consumer extends TimerTask {
private final Queue<Row> queue = new LinkedBlockingQueue<Row>();
@ -418,7 +417,7 @@ public class Consumer extends TimerTask {
conn = logblock.getConnection();
if (Config.queueWarningSize > 0 && queue.size() >= Config.queueWarningSize) {
getLogger().info("[Consumer] Queue overloaded. Size: " + getQueueSize());
logblock.getLogger().info("[Consumer] Queue overloaded. Size: " + getQueueSize());
}
if (conn == null) {
@ -437,7 +436,7 @@ public class Consumer extends TimerTask {
if (!addPlayer(state, actor)) {
if (!failedPlayers.contains(actor)) {
failedPlayers.add(actor);
getLogger().warning("[Consumer] Failed to add player " + actor.getName());
logblock.getLogger().warning("[Consumer] Failed to add player " + actor.getName());
}
continue process;
}
@ -477,7 +476,7 @@ public class Consumer extends TimerTask {
try {
PSRow.executeStatements();
} catch (final SQLException ex) {
getLogger().log(Level.SEVERE, "[Consumer] SQL exception on insertion: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Consumer] SQL exception on insertion: ", ex);
break;
}
} else {
@ -485,7 +484,7 @@ public class Consumer extends TimerTask {
try {
state.execute(insert);
} catch (final SQLException ex) {
getLogger().log(Level.SEVERE, "[Consumer] SQL exception on " + insert + ": ", ex);
logblock.getLogger().log(Level.SEVERE, "[Consumer] SQL exception on " + insert + ": ", ex);
break process;
}
}
@ -495,7 +494,7 @@ public class Consumer extends TimerTask {
}
conn.commit();
} catch (final SQLException ex) {
getLogger().log(Level.SEVERE, "[Consumer] SQL exception", ex);
logblock.getLogger().log(Level.SEVERE, "[Consumer] SQL exception", ex);
} finally {
try {
if (state != null) {
@ -505,15 +504,15 @@ public class Consumer extends TimerTask {
conn.close();
}
} catch (final SQLException ex) {
getLogger().log(Level.SEVERE, "[Consumer] SQL exception on close", ex);
logblock.getLogger().log(Level.SEVERE, "[Consumer] SQL exception on close", ex);
}
lock.unlock();
if (debug) {
long timeElapsed = System.currentTimeMillis() - startTime;
float rowPerTime = count / timeElapsed;
getLogger().log(Level.INFO, "[Consumer] Finished consumer cycle in " + timeElapsed + " milliseconds.");
getLogger().log(Level.INFO, "[Consumer] Total rows processed: " + count + ". row/time: " + String.format("%.4f", rowPerTime));
logblock.getLogger().log(Level.INFO, "[Consumer] Finished consumer cycle in " + timeElapsed + " milliseconds.");
logblock.getLogger().log(Level.INFO, "[Consumer] Total rows processed: " + count + ". row/time: " + String.format("%.4f", rowPerTime));
}
}
}
@ -741,10 +740,10 @@ public class Consumer extends TimerTask {
}
} catch (final SQLException ex) {
if (ps1 != null) {
getLogger().log(Level.SEVERE, "[Consumer] Troublesome query: " + ps1.toString());
logblock.getLogger().log(Level.SEVERE, "[Consumer] Troublesome query: " + ps1.toString());
}
if (ps != null) {
getLogger().log(Level.SEVERE, "[Consumer] Troublesome query: " + ps.toString());
logblock.getLogger().log(Level.SEVERE, "[Consumer] Troublesome query: " + ps.toString());
}
throw ex;
} finally {
@ -831,7 +830,7 @@ public class Consumer extends TimerTask {
ps.executeBatch();
} catch (final SQLException ex) {
if (ps != null) {
getLogger().log(Level.SEVERE, "[Consumer] Troublesome query: " + ps.toString());
logblock.getLogger().log(Level.SEVERE, "[Consumer] Troublesome query: " + ps.toString());
}
throw ex;
} finally {

View File

@ -9,7 +9,6 @@ import java.sql.Statement;
import java.util.logging.Level;
import static de.diddiz.util.Utils.newline;
import static org.bukkit.Bukkit.getLogger;
public class DumpedLogImporter implements Runnable {
private final LogBlock logblock;
@ -22,7 +21,7 @@ public class DumpedLogImporter implements Runnable {
public void run() {
final File[] imports = new File("plugins/LogBlock/import/").listFiles(new ExtensionFilenameFilter("sql"));
if (imports != null && imports.length > 0) {
getLogger().info("Found " + imports.length + " imports.");
logblock.getLogger().info("Found " + imports.length + " imports.");
Connection conn = null;
try {
conn = logblock.getConnection();
@ -34,7 +33,7 @@ public class DumpedLogImporter implements Runnable {
final BufferedWriter writer = new BufferedWriter(new FileWriter(new File(logblock.getDataFolder(), "import/failed.txt")));
int successes = 0, errors = 0;
for (final File sqlFile : imports) {
getLogger().info("Trying to import " + sqlFile.getName() + " ...");
logblock.getLogger().info("Trying to import " + sqlFile.getName() + " ...");
final BufferedReader reader = new BufferedReader(new FileReader(sqlFile));
String line;
while ((line = reader.readLine()) != null) {
@ -42,7 +41,7 @@ public class DumpedLogImporter implements Runnable {
st.execute(line);
successes++;
} catch (final Exception ex) {
getLogger().warning("Error while importing: '" + line + "': " + ex.getMessage());
logblock.getLogger().warning("Error while importing: '" + line + "': " + ex.getMessage());
writer.write(line + newline);
errors++;
}
@ -50,13 +49,13 @@ public class DumpedLogImporter implements Runnable {
conn.commit();
reader.close();
sqlFile.delete();
getLogger().info("Successfully imported " + sqlFile.getName() + ".");
logblock.getLogger().info("Successfully imported " + sqlFile.getName() + ".");
}
writer.close();
st.close();
getLogger().info("Successfully imported stored queue. (" + successes + " rows imported, " + errors + " errors)");
logblock.getLogger().info("Successfully imported stored queue. (" + successes + " rows imported, " + errors + " errors)");
} catch (final Exception ex) {
getLogger().log(Level.WARNING, "Error while importing: ", ex);
logblock.getLogger().log(Level.WARNING, "Error while importing: ", ex);
} finally {
if (conn != null) {
try {

View File

@ -29,8 +29,6 @@ import static de.diddiz.util.BukkitUtils.friendlyWorldname;
import de.diddiz.util.ComparableVersion;
import java.util.regex.Pattern;
import static org.bukkit.Bukkit.getLogger;
class Updater {
private final LogBlock logblock;
final int UUID_CONVERT_BATCH_SIZE = 100;
@ -52,7 +50,7 @@ class Updater {
return false;
}
if (configVersion.compareTo(new ComparableVersion("1.2.7")) < 0) {
getLogger().info("Updating tables to 1.2.7 ...");
logblock.getLogger().info("Updating tables to 1.2.7 ...");
if (isLogging(Logging.CHAT)) {
final Connection conn = logblock.getConnection();
try {
@ -62,14 +60,14 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
}
config.set("version", "1.2.7");
}
if (configVersion.compareTo(new ComparableVersion("1.3")) < 0) {
getLogger().info("Updating config to 1.3.0 ...");
logblock.getLogger().info("Updating config to 1.3.0 ...");
for (final String tool : config.getConfigurationSection("tools").getKeys(false)) {
if (config.get("tools." + tool + ".permissionDefault") == null) {
config.set("tools." + tool + ".permissionDefault", "OP");
@ -78,7 +76,7 @@ class Updater {
config.set("version", "1.3.0");
}
if (configVersion.compareTo(new ComparableVersion("1.3.1")) < 0) {
getLogger().info("Updating tables to 1.3.1 ...");
logblock.getLogger().info("Updating tables to 1.3.1 ...");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -87,13 +85,13 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.3.1");
}
if (configVersion.compareTo(new ComparableVersion("1.3.2")) < 0) {
getLogger().info("Updating tables to 1.3.2 ...");
logblock.getLogger().info("Updating tables to 1.3.2 ...");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -102,18 +100,18 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.3.2");
}
if (configVersion.compareTo(new ComparableVersion("1.4")) < 0) {
getLogger().info("Updating config to 1.4.0 ...");
logblock.getLogger().info("Updating config to 1.4.0 ...");
config.set("clearlog.keepLogDays", null);
config.set("version", "1.4.0");
}
if (configVersion.compareTo(new ComparableVersion("1.4.2")) < 0) {
getLogger().info("Updating config to 1.4.2 ...");
logblock.getLogger().info("Updating config to 1.4.2 ...");
for (final String world : config.getStringList("loggedWorlds")) {
final File file = new File(logblock.getDataFolder(), friendlyWorldname(world) + ".yml");
final YamlConfiguration wcfg = YamlConfiguration.loadConfiguration(file);
@ -192,14 +190,14 @@ class Updater {
try {
wcfg.save(file);
} catch (final IOException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
}
}
config.set("clearlog.keepLogDays", null);
config.set("version", "1.4.2");
}
if (configVersion.compareTo(new ComparableVersion("1.5.1")) < 0) {
getLogger().info("Updating tables to 1.5.1 ...");
logblock.getLogger().info("Updating tables to 1.5.1 ...");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -212,13 +210,13 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.5.1");
}
if (configVersion.compareTo(new ComparableVersion("1.5.2")) < 0) {
getLogger().info("Updating tables to 1.5.2 ...");
logblock.getLogger().info("Updating tables to 1.5.2 ...");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -230,18 +228,18 @@ class Updater {
st.execute("ALTER TABLE `lb-players` DROP onlinetime");
st.execute("ALTER TABLE `lb-players` CHANGE onlinetime2 onlinetime INT UNSIGNED NOT NULL");
} else {
getLogger().info("Column lb-players was already modified, skipping it.");
logblock.getLogger().info("Column lb-players was already modified, skipping it.");
}
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.5.2");
}
if (configVersion.compareTo(new ComparableVersion("1.8.1")) < 0) {
getLogger().info("Updating tables to 1.8.1 ...");
logblock.getLogger().info("Updating tables to 1.8.1 ...");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -249,21 +247,21 @@ class Updater {
for (final WorldConfig wcfg : getLoggedWorlds()) {
if (wcfg.isLogging(Logging.CHESTACCESS)) {
st.execute("ALTER TABLE `" + wcfg.table + "-chest` CHANGE itemdata itemdata SMALLINT NOT NULL");
getLogger().info("Table " + wcfg.table + "-chest modified");
logblock.getLogger().info("Table " + wcfg.table + "-chest modified");
}
}
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.8.1");
}
if (configVersion.compareTo(new ComparableVersion("1.9")) < 0) {
getLogger().info("Updating tables to 1.9.0 ...");
getLogger().info("Importing UUIDs for large databases may take some time");
logblock.getLogger().info("Updating tables to 1.9.0 ...");
logblock.getLogger().info("Importing UUIDs for large databases may take some time");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -272,7 +270,7 @@ class Updater {
} catch (final SQLException ex) {
// Error 1060 is MySQL error "column already exists". We want to continue with import if we get that error
if (ex.getErrorCode() != 1060) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
}
@ -292,7 +290,7 @@ class Updater {
rs = st.executeQuery("SELECT COUNT(playername) FROM `lb-players` WHERE LENGTH(UUID)=0");
rs.next();
String total = Integer.toString(rs.getInt(1));
getLogger().info(total + " players to convert");
logblock.getLogger().info(total + " players to convert");
int done = 0;
conn.setAutoCommit(false);
@ -311,7 +309,7 @@ class Updater {
for (Map.Entry<String, Integer> entry : players.entrySet()) {
if (response.get(entry.getKey()) == null) {
theUUID = unimportedPrefix + entry.getKey();
getLogger().warning(entry.getKey() + " not found - giving UUID of " + theUUID);
logblock.getLogger().warning(entry.getKey() + " not found - giving UUID of " + theUUID);
} else {
theUUID = response.get(entry.getKey()).toString();
}
@ -322,7 +320,7 @@ class Updater {
conn.commit();
players.clear();
names.clear();
getLogger().info("Processed " + Integer.toString(done) + " out of " + total);
logblock.getLogger().info("Processed " + Integer.toString(done) + " out of " + total);
rs.close();
rs = st.executeQuery("SELECT playerid,playername FROM `lb-players` WHERE LENGTH(UUID)=0 LIMIT " + Integer.toString(UUID_CONVERT_BATCH_SIZE));
}
@ -332,16 +330,16 @@ class Updater {
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
} catch (Exception ex) {
Bukkit.getLogger().log(Level.SEVERE, "[UUID importer]", ex);
logblock.getLogger().log(Level.SEVERE, "[UUID importer]", ex);
return false;
}
config.set("version", "1.9.0");
}
if (configVersion.compareTo(new ComparableVersion("1.9.4")) < 0) {
getLogger().info("Updating tables to 1.9.4 ...");
logblock.getLogger().info("Updating tables to 1.9.4 ...");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -351,7 +349,7 @@ class Updater {
st.execute("DROP INDEX UUID ON `lb-players`");
} catch (final SQLException ex) {
if (ex.getErrorCode() != 1091) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
}
@ -359,7 +357,7 @@ class Updater {
st.execute("DROP INDEX playername ON `lb-players`");
} catch (final SQLException ex) {
if (ex.getErrorCode() != 1091) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
}
@ -368,7 +366,7 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.9.4");
@ -376,7 +374,7 @@ class Updater {
// Ensure charset for free-text fields is UTF-8, or UTF8-mb4 if possible
// As this may be an expensive operation and the database default may already be this, check on a table-by-table basis before converting
if (configVersion.compareTo(new ComparableVersion("1.10.0")) < 0) {
getLogger().info("Updating tables to 1.10.0 ...");
logblock.getLogger().info("Updating tables to 1.10.0 ...");
final Connection conn = logblock.getConnection();
try {
conn.setAutoCommit(true);
@ -393,14 +391,14 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.10.0");
}
if (configVersion.compareTo(new ComparableVersion("1.12.0")) < 0) {
getLogger().info("Updating tables to 1.12.0 ...");
logblock.getLogger().info("Updating tables to 1.12.0 ...");
if (isLogging(Logging.CHAT)) {
final Connection conn = logblock.getConnection();
try {
@ -410,23 +408,23 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
}
config.set("version", "1.12.0");
}
if (configVersion.compareTo(new ComparableVersion("1.13.0")) < 0) {
getLogger().info("Updating tables to 1.13.0 ...");
logblock.getLogger().info("Updating tables to 1.13.0 ...");
try {
MaterialUpdater1_13 materialUpdater = new MaterialUpdater1_13(logblock);
getLogger().info("Convertig BlockId to BlockData. This can take a while ...");
logblock.getLogger().info("Convertig BlockId to BlockData. This can take a while ...");
final Connection conn = logblock.getConnection();
conn.setAutoCommit(false);
final Statement st = conn.createStatement();
for (final WorldConfig wcfg : getLoggedWorlds()) {
getLogger().info("Processing world " + wcfg.world + "...");
getLogger().info("Processing block changes...");
logblock.getLogger().info("Processing world " + wcfg.world + "...");
logblock.getLogger().info("Processing block changes...");
boolean hadRow = true;
int rowsToConvert = 0;
int done = 0;
@ -434,7 +432,7 @@ class Updater {
ResultSet rs = st.executeQuery("SELECT count(*) as rowcount FROM `" + wcfg.table + "`");
if (rs.next()) {
rowsToConvert = rs.getInt(1);
getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table);
logblock.getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table);
}
rs.close();
@ -481,7 +479,7 @@ class Updater {
insertStatement.setInt(10, z);
insertStatement.addBatch();
} catch (Exception e) {
getLogger().info("Exception in entry " + id + " (" + replaced + ":" + data + "->" + type + ":" + data + "): " + e.getMessage());
logblock.getLogger().info("Exception in entry " + id + " (" + replaced + ":" + data + "->" + type + ":" + data + "): " + e.getMessage());
}
deleteStatement.setInt(1, id);
deleteStatement.addBatch();
@ -495,22 +493,22 @@ class Updater {
}
conn.commit();
logblock.getConsumer().run(); // force a consumer run to save new material mappings
getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
logblock.getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
}
insertStatement.close();
deleteStatement.close();
} catch (SQLException e) {
getLogger().info("Could not convert " + wcfg.table + ": " + e.getMessage());
logblock.getLogger().info("Could not convert " + wcfg.table + ": " + e.getMessage());
}
getLogger().info("Processing chests...");
logblock.getLogger().info("Processing chests...");
rowsToConvert = 0;
done = 0;
try {
ResultSet rs = st.executeQuery("SELECT count(*) as rowcount FROM `" + wcfg.table + "-chest`");
if (rs.next()) {
rowsToConvert = rs.getInt(1);
getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table + "-chest");
logblock.getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table + "-chest");
}
rs.close();
@ -547,23 +545,23 @@ class Updater {
insertChestData.executeBatch();
deleteChest.executeBatch();
conn.commit();
getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
logblock.getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
}
insertChestData.close();
deleteChest.close();
} catch (SQLException e) {
getLogger().info("Could not convert " + wcfg.table + "-chest: " + e.getMessage());
logblock.getLogger().info("Could not convert " + wcfg.table + "-chest: " + e.getMessage());
}
if (wcfg.isLogging(Logging.KILL)) {
getLogger().info("Processing kills...");
logblock.getLogger().info("Processing kills...");
rowsToConvert = 0;
done = 0;
try {
ResultSet rs = st.executeQuery("SELECT count(*) as rowcount FROM `" + wcfg.table + "-kills`");
if (rs.next()) {
rowsToConvert = rs.getInt(1);
getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table + "-kills");
logblock.getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table + "-kills");
}
rs.close();
@ -595,21 +593,21 @@ class Updater {
conn.commit();
logblock.getConsumer().run(); // force a consumer run to save new material mappings
}
getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
logblock.getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
if (!anyRow) {
break;
}
}
updateWeaponStatement.close();
} catch (SQLException e) {
getLogger().info("Could not convert " + wcfg.table + "-kills: " + e.getMessage());
logblock.getLogger().info("Could not convert " + wcfg.table + "-kills: " + e.getMessage());
}
}
}
st.close();
conn.close();
getLogger().info("Updating config to 1.13.0 ...");
logblock.getLogger().info("Updating config to 1.13.0 ...");
config.set("logging.hiddenBlocks", materialUpdater.convertMaterials(config.getStringList("logging.hiddenBlocks")));
config.set("rollback.dontRollback", materialUpdater.convertMaterials(config.getStringList("rollback.dontRollback")));
config.set("rollback.replaceAnyway", materialUpdater.convertMaterials(config.getStringList("rollback.replaceAnyway")));
@ -619,20 +617,20 @@ class Updater {
tSec.set("item", materialUpdater.convertMaterial(tSec.getString("item", "OAK_LOG")));
}
} catch (final SQLException | IOException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
config.set("version", "1.13.0");
}
if (configVersion.compareTo(new ComparableVersion("1.13.1")) < 0) {
getLogger().info("Updating tables to 1.13.1 ...");
logblock.getLogger().info("Updating tables to 1.13.1 ...");
try {
final Connection conn = logblock.getConnection();
conn.setAutoCommit(false);
final Statement st = conn.createStatement();
for (final WorldConfig wcfg : getLoggedWorlds()) {
getLogger().info("Processing world " + wcfg.world + "...");
logblock.getLogger().info("Processing world " + wcfg.world + "...");
ResultSet rsCol = st.executeQuery("SHOW COLUMNS FROM `" + wcfg.table + "-chestdata` LIKE 'itemtype'");
if (!rsCol.next()) {
st.execute("ALTER TABLE `" + wcfg.table + "-chestdata` ADD COLUMN `itemtype` SMALLINT NOT NULL DEFAULT '0'");
@ -646,7 +644,7 @@ class Updater {
ResultSet rs = st.executeQuery("SELECT count(*) as rowcount FROM `" + wcfg.table + "-sign`");
if (rs.next()) {
rowsToConvert = rs.getInt(1);
getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table + "-sign");
logblock.getLogger().info("Converting " + rowsToConvert + " entries in " + wcfg.table + "-sign");
}
rs.close();
@ -689,12 +687,12 @@ class Updater {
insertSignState.executeBatch();
deleteSign.executeBatch();
conn.commit();
getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
logblock.getLogger().info("Done: " + done + "/" + rowsToConvert + " (" + (rowsToConvert > 0 ? (done * 100 / rowsToConvert) : 100) + "%)");
}
insertSignState.close();
deleteSign.close();
} catch (SQLException e) {
getLogger().info("Could not convert " + wcfg.table + "-sign: " + e.getMessage());
logblock.getLogger().info("Could not convert " + wcfg.table + "-sign: " + e.getMessage());
}
}
}
@ -702,7 +700,7 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
@ -722,7 +720,7 @@ class Updater {
st.close();
conn.close();
} catch (final SQLException ex) {
Bukkit.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
logblock.getLogger().log(Level.SEVERE, "[Updater] Error: ", ex);
return false;
}
@ -738,9 +736,9 @@ class Updater {
}
if (rs.next() && !rs.getString("Collation").substring(0, charset.length()).equalsIgnoreCase(charset)) {
st.execute("ALTER TABLE `" + table + "` CONVERT TO CHARSET " + charset);
getLogger().info("Table " + table + " modified");
logblock.getLogger().info("Table " + table + " modified");
} else if (!silent) {
getLogger().info("Table " + table + " already fine, skipping it");
logblock.getLogger().info("Table " + table + " already fine, skipping it");
}
}
@ -781,9 +779,9 @@ class Updater {
conn.close();
}
private static void createTable(DatabaseMetaData dbm, Statement state, String table, String query) throws SQLException {
private void createTable(DatabaseMetaData dbm, Statement state, String table, String query) throws SQLException {
if (!dbm.getTables(null, null, table, null).next()) {
getLogger().log(Level.INFO, "Creating table " + table + ".");
logblock.getLogger().log(Level.INFO, "Creating table " + table + ".");
state.execute("CREATE TABLE `" + table + "` " + query);
if (!dbm.getTables(null, null, table, null).next()) {
throw new SQLException("Table " + table + " not found and failed to create");

View File

@ -34,7 +34,6 @@ import java.util.logging.Level;
import static de.diddiz.LogBlock.config.Config.dontRollback;
import static de.diddiz.LogBlock.config.Config.replaceAnyway;
import static de.diddiz.util.BukkitUtils.*;
import static org.bukkit.Bukkit.getLogger;
public class WorldEditor implements Runnable {
private final LogBlock logblock;
@ -118,7 +117,7 @@ public class WorldEditor implements Runnable {
} catch (final WorldEditorException ex) {
errorList.add(ex);
} catch (final Exception ex) {
getLogger().log(Level.WARNING, "[WorldEditor] Exeption: ", ex);
logblock.getLogger().log(Level.WARNING, "[WorldEditor] Exeption: ", ex);
}
counter++;
if (sender != null) {