Version: 3.2.1 Update
This commit is contained in:
47
src/main/java/hae/utils/project/FileProcessor.java
Normal file
47
src/main/java/hae/utils/project/FileProcessor.java
Normal file
@@ -0,0 +1,47 @@
|
||||
package hae.utils.project;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Comparator;
|
||||
|
||||
public class FileProcessor {
|
||||
public static void deleteDirectoryWithContents(Path pathToBeDeleted) {
|
||||
if (pathToBeDeleted != null) {
|
||||
try {
|
||||
Files.walk(pathToBeDeleted)
|
||||
.sorted(Comparator.reverseOrder())
|
||||
.map(Path::toFile)
|
||||
.forEach(File::delete);
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static byte[] readFileContent(String basePath, String fileName) {
|
||||
Path filePath = Paths.get(basePath, fileName);
|
||||
Path path = Paths.get(basePath);
|
||||
try {
|
||||
byte[] fileContent = Files.readAllBytes(filePath);
|
||||
|
||||
Files.deleteIfExists(filePath);
|
||||
|
||||
boolean isEmpty = isDirectoryEmpty(path);
|
||||
if (isEmpty) {
|
||||
Files.deleteIfExists(path);
|
||||
}
|
||||
|
||||
return fileContent;
|
||||
} catch (Exception e) {
|
||||
return new byte[0];
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isDirectoryEmpty(Path directory) throws Exception {
|
||||
try (DirectoryStream<Path> dirStream = Files.newDirectoryStream(directory)) {
|
||||
return !dirStream.iterator().hasNext();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,21 @@ package hae.utils.project;
|
||||
|
||||
import burp.api.montoya.MontoyaApi;
|
||||
import hae.utils.project.model.HaeFileContent;
|
||||
import org.yaml.snakeyaml.LoaderOptions;
|
||||
import org.yaml.snakeyaml.Yaml;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
import java.util.zip.ZipFile;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
public class ProjectProcessor {
|
||||
@@ -19,59 +26,169 @@ public class ProjectProcessor {
|
||||
this.api = api;
|
||||
}
|
||||
|
||||
public boolean createHaeFile(String haeFilePath, String host, Map<String, List<String>> dataMap, Map<String, Map<String, String>> httpMap) {
|
||||
public boolean createHaeFile(String haeFilePath, String host, Map<String, List<String>> dataMap, Map<String, Map<String, Object>> urlMap, Map<String, Map<String, Object>> httpMap) {
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
||||
List<Future<?>> futures = new ArrayList<>();
|
||||
|
||||
ByteArrayOutputStream dataYamlStream = new ByteArrayOutputStream();
|
||||
ByteArrayOutputStream httpYamlStream = new ByteArrayOutputStream();
|
||||
ByteArrayOutputStream urlYamlStream = new ByteArrayOutputStream();
|
||||
Yaml yaml = new Yaml();
|
||||
|
||||
yaml.dump(dataMap, new OutputStreamWriter(dataYamlStream, StandardCharsets.UTF_8));
|
||||
yaml.dump(httpMap, new OutputStreamWriter(httpYamlStream, StandardCharsets.UTF_8));
|
||||
yaml.dump(urlMap, new OutputStreamWriter(urlYamlStream, StandardCharsets.UTF_8));
|
||||
|
||||
try (ZipOutputStream zipOut = new ZipOutputStream(new FileOutputStream(haeFilePath))) {
|
||||
zipOut.putNextEntry(new ZipEntry("info"));
|
||||
zipOut.write(host.getBytes(StandardCharsets.UTF_8));
|
||||
zipOut.closeEntry();
|
||||
|
||||
zipOut.putNextEntry(new ZipEntry("data.yml"));
|
||||
zipOut.putNextEntry(new ZipEntry("data"));
|
||||
zipOut.write(dataYamlStream.toByteArray());
|
||||
zipOut.closeEntry();
|
||||
|
||||
zipOut.putNextEntry(new ZipEntry("http.yml"));
|
||||
zipOut.write(httpYamlStream.toByteArray());
|
||||
zipOut.putNextEntry(new ZipEntry("url"));
|
||||
zipOut.write(urlYamlStream.toByteArray());
|
||||
zipOut.closeEntry();
|
||||
|
||||
for (String httpHash : httpMap.keySet()) {
|
||||
Map<String, Object> httpItem = httpMap.get(httpHash);
|
||||
futures.add(executorService.submit(() -> {
|
||||
try {
|
||||
ByteArrayOutputStream httpOutStream = new ByteArrayOutputStream();
|
||||
byte[] request = (byte[]) httpItem.get("request");
|
||||
byte[] response = (byte[]) httpItem.get("response");
|
||||
|
||||
httpOutStream.write(response);
|
||||
httpOutStream.write(request);
|
||||
|
||||
synchronized (zipOut) {
|
||||
zipOut.putNextEntry(new ZipEntry(String.format("http/%s", httpHash)));
|
||||
zipOut.write(httpOutStream.toByteArray());
|
||||
zipOut.closeEntry();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
api.logging().logToError("createHaeFile: " + e.getMessage());
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
for (Future<?> future : futures) {
|
||||
try {
|
||||
future.get();
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
api.logging().logToOutput(e.getMessage());
|
||||
api.logging().logToError("createHaeFile: " + e.getMessage());
|
||||
return false;
|
||||
} finally {
|
||||
executorService.shutdown();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public HaeFileContent readHaeFile(String haeFilePath) {
|
||||
HaeFileContent haeFileContent = new HaeFileContent(api);
|
||||
Yaml yaml = new Yaml();
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
|
||||
List<Future<?>> futures = new ArrayList<>();
|
||||
|
||||
try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(haeFilePath))) {
|
||||
ZipEntry entry;
|
||||
while ((entry = zipIn.getNextEntry()) != null) {
|
||||
switch (entry.getName()) {
|
||||
case "info":
|
||||
haeFileContent.setHost(new String(zipIn.readAllBytes(), StandardCharsets.UTF_8));
|
||||
break;
|
||||
case "data.yml":
|
||||
haeFileContent.setDataMap(yaml.load(new InputStreamReader(zipIn, StandardCharsets.UTF_8)));
|
||||
break;
|
||||
case "http.yml":
|
||||
haeFileContent.setHttpMap(yaml.load(new InputStreamReader(zipIn, StandardCharsets.UTF_8)));
|
||||
break;
|
||||
HaeFileContent haeFileContent = new HaeFileContent(api); // 假设api是正确的
|
||||
LoaderOptions loaderOptions = new LoaderOptions();
|
||||
loaderOptions.setMaxAliasesForCollections(Integer.MAX_VALUE);
|
||||
loaderOptions.setCodePointLimit(Integer.MAX_VALUE);
|
||||
Yaml yaml = new Yaml(loaderOptions);
|
||||
Path tempDirectory = null;
|
||||
|
||||
try {
|
||||
if (hasValidStructure(haeFilePath)) {
|
||||
tempDirectory = Files.createTempDirectory("hae");
|
||||
haeFileContent.setHttpPath(tempDirectory.toString());
|
||||
|
||||
try (ZipFile zipFile = new ZipFile(haeFilePath)) {
|
||||
Enumeration<? extends ZipEntry> entries = zipFile.entries();
|
||||
while (entries.hasMoreElements()) {
|
||||
ZipEntry entry = entries.nextElement();
|
||||
String fileName = entry.getName();
|
||||
if (fileName.startsWith("http/")) {
|
||||
Path filePath = tempDirectory.resolve(fileName.substring("http/".length()));
|
||||
futures.add(executorService.submit(() -> {
|
||||
try (InputStream in = zipFile.getInputStream(entry)) {
|
||||
Files.copy(in, filePath, StandardCopyOption.REPLACE_EXISTING);
|
||||
} catch (IOException e) {
|
||||
api.logging().logToError("readHaeFile: " + e.getMessage());
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
try (InputStream in = zipFile.getInputStream(entry)) {
|
||||
switch (fileName) {
|
||||
case "info" ->
|
||||
haeFileContent.setHost(new String(in.readAllBytes(), StandardCharsets.UTF_8));
|
||||
case "data" ->
|
||||
haeFileContent.setDataMap(yaml.load(new InputStreamReader(in, StandardCharsets.UTF_8)));
|
||||
case "url" ->
|
||||
haeFileContent.setUrlMap(yaml.load(new InputStreamReader(in, StandardCharsets.UTF_8)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (Future<?> future : futures) {
|
||||
try {
|
||||
future.get();
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
}
|
||||
zipIn.closeEntry();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
api.logging().logToOutput(e.getMessage());
|
||||
return null;
|
||||
api.logging().logToError("readHaeFile: " + e.getMessage());
|
||||
if (tempDirectory != null) {
|
||||
FileProcessor.deleteDirectoryWithContents(tempDirectory);
|
||||
}
|
||||
haeFileContent = null;
|
||||
} finally {
|
||||
executorService.shutdown();
|
||||
}
|
||||
|
||||
return haeFileContent;
|
||||
}
|
||||
|
||||
private boolean hasValidStructure(String zipFilePath) {
|
||||
Set<String> requiredRootEntries = new HashSet<>();
|
||||
requiredRootEntries.add("info");
|
||||
requiredRootEntries.add("data");
|
||||
requiredRootEntries.add("url");
|
||||
|
||||
boolean hasHttpDirectoryWithFiles = false;
|
||||
|
||||
try {
|
||||
ZipFile zipFile = new ZipFile(zipFilePath);
|
||||
Enumeration<? extends ZipEntry> entries = zipFile.entries();
|
||||
|
||||
while (entries.hasMoreElements()) {
|
||||
ZipEntry entry = entries.nextElement();
|
||||
String name = entry.getName();
|
||||
|
||||
if (!entry.isDirectory() && !name.contains("/")) {
|
||||
requiredRootEntries.remove(name);
|
||||
}
|
||||
|
||||
if (name.startsWith("http/") && !entry.isDirectory()) {
|
||||
hasHttpDirectoryWithFiles = true;
|
||||
}
|
||||
|
||||
if (requiredRootEntries.isEmpty() && hasHttpDirectoryWithFiles) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
zipFile.close();
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
|
||||
return requiredRootEntries.isEmpty() && hasHttpDirectoryWithFiles;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,13 +11,14 @@ import java.util.Map;
|
||||
public class HaeFileContent {
|
||||
private final MontoyaApi api;
|
||||
private String host;
|
||||
private String httpPath;
|
||||
private final Map<String, List<String>> dataMap;
|
||||
private final Map<String, Map<String, String>> httpMap;
|
||||
private final Map<String, Map<String, String>> urlMap;
|
||||
|
||||
public HaeFileContent(MontoyaApi api) {
|
||||
this.api = api;
|
||||
this.dataMap = new HashMap<>();
|
||||
this.httpMap = new HashMap<>();
|
||||
this.urlMap = new HashMap<>();
|
||||
}
|
||||
|
||||
public String getHost() {
|
||||
@@ -28,14 +29,22 @@ public class HaeFileContent {
|
||||
return dataMap;
|
||||
}
|
||||
|
||||
public Map<String, Map<String, String>> getHttpMap() {
|
||||
return httpMap;
|
||||
public Map<String, Map<String, String>> getUrlMap() {
|
||||
return urlMap;
|
||||
}
|
||||
|
||||
public String getHttpPath() {
|
||||
return httpPath;
|
||||
}
|
||||
|
||||
public void setHost(String host) {
|
||||
this.host = host;
|
||||
}
|
||||
|
||||
public void setHttpPath(String path) {
|
||||
this.httpPath = path;
|
||||
}
|
||||
|
||||
public void setDataMap(Map<String, List<Object>> dataMap) {
|
||||
for (Map.Entry<String, List<Object>> entry : dataMap.entrySet()) {
|
||||
List<String> values = new ArrayList<>();
|
||||
@@ -50,8 +59,8 @@ public class HaeFileContent {
|
||||
}
|
||||
}
|
||||
|
||||
public void setHttpMap(Map<String, Map<String, Object>> httpMap) {
|
||||
for (Map.Entry<String, Map<String, Object>> entry : httpMap.entrySet()) {
|
||||
public void setUrlMap(Map<String, Map<String, Object>> urlMap) {
|
||||
for (Map.Entry<String, Map<String, Object>> entry : urlMap.entrySet()) {
|
||||
Map<String, String> newValues = new HashMap<>();
|
||||
Map<String, Object> values = entry.getValue();
|
||||
for (String key : values.keySet()) {
|
||||
@@ -61,7 +70,7 @@ public class HaeFileContent {
|
||||
newValues.put(key, values.get(key).toString());
|
||||
}
|
||||
}
|
||||
this.httpMap.put(entry.getKey(), newValues);
|
||||
this.urlMap.put(entry.getKey(), newValues);
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user