Shifted HistoryManager and LoggingService to Guava's DirectExecutor

This commit is contained in:
Rohit Awate 2018-04-27 19:26:18 +05:30
parent 1c0bc2b7b9
commit b70bcd2353
5 changed files with 178 additions and 148 deletions

View file

@ -30,5 +30,11 @@
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.3" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.9.0" level="project" />
<orderEntry type="library" name="Maven: org.xerial:sqlite-jdbc:3.21.0.1" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:guava:24.1-jre" level="project" />
<orderEntry type="library" name="Maven: com.google.code.findbugs:jsr305:1.3.9" level="project" />
<orderEntry type="library" name="Maven: org.checkerframework:checker-compat-qual:2.0.0" level="project" />
<orderEntry type="library" name="Maven: com.google.errorprone:error_prone_annotations:2.1.3" level="project" />
<orderEntry type="library" name="Maven: com.google.j2objc:j2objc-annotations:1.1" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.mojo:animal-sniffer-annotations:1.14" level="project" />
</component>
</module>

View file

@ -102,5 +102,11 @@
<artifactId>sqlite-jdbc</artifactId>
<version>3.21.0.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>24.1-jre</version>
</dependency>
</dependencies>
</project>

View file

@ -16,21 +16,26 @@
package com.rohitawate.everest.util;
import com.google.common.util.concurrent.MoreExecutors;
import com.rohitawate.everest.controllers.HomeWindowController;
import com.rohitawate.everest.util.history.HistoryManager;
import com.rohitawate.everest.util.logging.Level;
import com.rohitawate.everest.util.logging.LoggingService;
import java.util.concurrent.Executor;
public class Services {
public static Thread startServicesThread;
public static HistoryManager historyManager;
public static LoggingService loggingService;
public static HomeWindowController homeWindowController;
public static Executor singleExecutor;
public static void start() {
startServicesThread = new Thread(() -> {
loggingService = new LoggingService(Level.INFO);
historyManager = new HistoryManager();
singleExecutor = MoreExecutors.directExecutor();
});
startServicesThread.start();

View file

@ -25,6 +25,7 @@ import com.rohitawate.everest.util.settings.Settings;
import javax.ws.rs.core.MediaType;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.sql.*;
@ -39,6 +40,7 @@ public class HistoryManager {
private Connection conn;
private JsonNode queries;
private PreparedStatement statement;
private HistorySaver historySaver = new HistorySaver();
public HistoryManager() {
try {
@ -47,7 +49,22 @@ public class HistoryManager {
configFolder.mkdirs();
conn = DriverManager.getConnection("jdbc:sqlite:Everest/config/history.sqlite");
initDatabase();
} catch (Exception E) {
Services.loggingService.logSevere("Exception while initializing HistoryManager.", E, LocalDateTime.now());
} finally {
System.out.println("Connected to database.");
}
}
/**
* Creates and initializes the database with necessary tables if not already done.
*
* @throws IOException
* @throws SQLException
*/
private void initDatabase() throws IOException, SQLException {
// Read all queries from Queries.json
InputStream queriesFile = getClass().getResourceAsStream("/sql/Queries.json");
ObjectMapper mapper = new ObjectMapper();
@ -72,11 +89,6 @@ public class HistoryManager {
statement =
conn.prepareStatement(EverestUtilities.trimString(queries.get("createTuplesTable").toString()));
statement.execute();
} catch (Exception E) {
Services.loggingService.logSevere("Exception while initializing HistoryManager.", E, LocalDateTime.now());
} finally {
System.out.println("Connected to database.");
}
}
// Method is made synchronized to allow only one database transaction at a time.
@ -90,118 +102,8 @@ public class HistoryManager {
if (isDuplicate(state))
return;
new Thread(() -> {
try {
statement =
conn.prepareStatement(EverestUtilities.trimString(queries.get("saveRequest").toString()));
statement.setString(1, state.getHttpMethod());
statement.setString(2, String.valueOf(state.getTarget()));
statement.setString(3, LocalDate.now().toString());
statement.executeUpdate();
// Get latest RequestID to insert into Headers table
statement = conn.prepareStatement("SELECT MAX(ID) AS MaxID FROM Requests");
ResultSet RS = statement.executeQuery();
int requestID = -1;
if (RS.next())
requestID = RS.getInt("MaxID");
if (state.getHeaders().size() > 0) {
// Saves request headers
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveHeader").toString()));
for (Entry entry : state.getHeaders().entrySet()) {
statement.setInt(1, requestID);
statement.setString(2, entry.getKey().toString());
statement.setString(3, entry.getValue().toString());
statement.executeUpdate();
}
}
if (state.getParams().size() > 0) {
// Saves request parameters
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
for (Entry entry : state.getParams().entrySet()) {
statement.setInt(1, requestID);
statement.setString(2, "Param");
statement.setString(3, entry.getKey().toString());
statement.setString(4, entry.getValue().toString());
statement.executeUpdate();
}
}
if (!(state.getHttpMethod().equals("GET") || state.getHttpMethod().equals("DELETE"))) {
// Maps the request to its ContentType for faster recovery
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveRequestContentPair").toString()));
statement.setInt(1, requestID);
statement.setString(2, state.getContentType());
statement.executeUpdate();
// Determines where to fetch the body from, based on the ContentType
switch (state.getContentType()) {
case MediaType.TEXT_PLAIN:
case MediaType.APPLICATION_JSON:
case MediaType.APPLICATION_XML:
case MediaType.TEXT_HTML:
case MediaType.APPLICATION_OCTET_STREAM:
// Saves the body in case of raw content, or the file location in case of binary
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveBody").toString()));
statement.setInt(1, requestID);
statement.setString(2, state.getBody());
statement.executeUpdate();
break;
case MediaType.APPLICATION_FORM_URLENCODED:
if (state.getStringTuples().size() > 0) {
for (Entry<String, String> entry : state.getStringTuples().entrySet()) {
// Saves the string tuples
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
statement.setInt(1, requestID);
statement.setString(2, "String");
statement.setString(3, entry.getKey());
statement.setString(4, entry.getValue());
statement.executeUpdate();
}
}
break;
case MediaType.MULTIPART_FORM_DATA:
if (state.getStringTuples().size() > 0) {
for (Entry<String, String> entry : state.getStringTuples().entrySet()) {
// Saves the string tuples
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
statement.setInt(1, requestID);
statement.setString(2, "String");
statement.setString(3, entry.getKey());
statement.setString(4, entry.getValue());
statement.executeUpdate();
}
}
if (state.getFileTuples().size() > 0) {
for (Entry<String, String> entry : state.getFileTuples().entrySet()) {
// Saves the file tuples
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
statement.setInt(1, requestID);
statement.setString(2, "File");
statement.setString(3, entry.getKey());
statement.setString(4, entry.getValue());
statement.executeUpdate();
}
}
break;
}
}
} catch (SQLException e) {
Services.loggingService.logWarning("Database error.", e, LocalDateTime.now());
}
}, "History Saver Thread").start();
historySaver.state = state;
Services.singleExecutor.execute(historySaver);
// Appends this history item to the HistoryTab
Services.homeWindowController.addHistoryItem(state);
@ -431,4 +333,122 @@ public class HistoryManager {
}
return true;
}
private class HistorySaver implements Runnable {
private DashboardState state;
@Override
public void run() {
try {
statement =
conn.prepareStatement(EverestUtilities.trimString(queries.get("saveRequest").toString()));
statement.setString(1, state.getHttpMethod());
statement.setString(2, String.valueOf(state.getTarget()));
statement.setString(3, LocalDate.now().toString());
statement.executeUpdate();
// Get latest RequestID to insert into Headers table
statement = conn.prepareStatement("SELECT MAX(ID) AS MaxID FROM Requests");
ResultSet RS = statement.executeQuery();
int requestID = -1;
if (RS.next())
requestID = RS.getInt("MaxID");
if (state.getHeaders().size() > 0) {
// Saves request headers
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveHeader").toString()));
for (Entry entry : state.getHeaders().entrySet()) {
statement.setInt(1, requestID);
statement.setString(2, entry.getKey().toString());
statement.setString(3, entry.getValue().toString());
statement.executeUpdate();
}
}
if (state.getParams().size() > 0) {
// Saves request parameters
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
for (Entry entry : state.getParams().entrySet()) {
statement.setInt(1, requestID);
statement.setString(2, "Param");
statement.setString(3, entry.getKey().toString());
statement.setString(4, entry.getValue().toString());
statement.executeUpdate();
}
}
if (!(state.getHttpMethod().equals("GET") || state.getHttpMethod().equals("DELETE"))) {
// Maps the request to its ContentType for faster recovery
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveRequestContentPair").toString()));
statement.setInt(1, requestID);
statement.setString(2, state.getContentType());
statement.executeUpdate();
// Determines where to fetch the body from, based on the ContentType
switch (state.getContentType()) {
case MediaType.TEXT_PLAIN:
case MediaType.APPLICATION_JSON:
case MediaType.APPLICATION_XML:
case MediaType.TEXT_HTML:
case MediaType.APPLICATION_OCTET_STREAM:
// Saves the body in case of raw content, or the file location in case of binary
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveBody").toString()));
statement.setInt(1, requestID);
statement.setString(2, state.getBody());
statement.executeUpdate();
break;
case MediaType.APPLICATION_FORM_URLENCODED:
if (state.getStringTuples().size() > 0) {
for (Entry<String, String> entry : state.getStringTuples().entrySet()) {
// Saves the string tuples
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
statement.setInt(1, requestID);
statement.setString(2, "String");
statement.setString(3, entry.getKey());
statement.setString(4, entry.getValue());
statement.executeUpdate();
}
}
break;
case MediaType.MULTIPART_FORM_DATA:
if (state.getStringTuples().size() > 0) {
for (Entry<String, String> entry : state.getStringTuples().entrySet()) {
// Saves the string tuples
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
statement.setInt(1, requestID);
statement.setString(2, "String");
statement.setString(3, entry.getKey());
statement.setString(4, entry.getValue());
statement.executeUpdate();
}
}
if (state.getFileTuples().size() > 0) {
for (Entry<String, String> entry : state.getFileTuples().entrySet()) {
// Saves the file tuples
statement = conn.prepareStatement(EverestUtilities.trimString(queries.get("saveTuple").toString()));
statement.setInt(1, requestID);
statement.setString(2, "File");
statement.setString(3, entry.getKey());
statement.setString(4, entry.getValue());
statement.executeUpdate();
}
}
break;
}
}
} catch (SQLException e) {
Services.loggingService.logWarning("Database error.", e, LocalDateTime.now());
}
}
}
}

View file

@ -16,10 +16,10 @@
package com.rohitawate.everest.util.logging;
import com.rohitawate.everest.util.Services;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class LoggingService {
private Logger logger;
@ -27,7 +27,6 @@ public class LoggingService {
private String message;
private Exception exception;
private LocalDateTime time;
private ExecutorService executorService;
private SevereLogger severeLogger = new SevereLogger();
private WarningLogger warningLogger = new WarningLogger();
@ -36,22 +35,21 @@ public class LoggingService {
public LoggingService(Level writerLevel) {
logger = new Logger(writerLevel);
dateFormat = DateTimeFormatter.ofPattern("dd/MM/yyyy HH:mm:ss");
executorService = Executors.newSingleThreadExecutor();
}
public void logSevere(String message, Exception exception, LocalDateTime time) {
setValues(message, exception, time);
executorService.execute(severeLogger);
Services.singleExecutor.execute(severeLogger);
}
public void logWarning(String message, Exception exception, LocalDateTime time) {
setValues(message, exception, time);
executorService.execute(warningLogger);
Services.singleExecutor.execute(warningLogger);
}
public void logInfo(String message, LocalDateTime time) {
setValues(message, null, time);
executorService.execute(infoLogger);
Services.singleExecutor.execute(infoLogger);
}
private void setValues(String message, Exception exception, LocalDateTime time) {
@ -60,11 +58,6 @@ public class LoggingService {
this.time = time;
}
@Override
protected void finalize() {
executorService.shutdown();
}
private class SevereLogger implements Runnable {
@Override
public void run() {