diff --git a/.gitignore b/.gitignore index ca03307..892281c 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,6 @@ out/ .project/ /build/ /.gradle/ -libs/montoya-api-2025.3.6.jar \ No newline at end of file +libs/montoya-api-2025.3.6.jar +#other +.DS_Store diff --git a/src/main/java/burp/shadow/repeater/ShadowRepeaterExtension.java b/src/main/java/burp/shadow/repeater/ShadowRepeaterExtension.java index 2e89b5b..a14b1fe 100644 --- a/src/main/java/burp/shadow/repeater/ShadowRepeaterExtension.java +++ b/src/main/java/burp/shadow/repeater/ShadowRepeaterExtension.java @@ -13,7 +13,9 @@ import burp.api.montoya.ui.settings.SettingsPanelSetting; import burp.api.montoya.ui.settings.SettingsPanelWithData; import burp.shadow.repeater.ai.AI; +import burp.shadow.repeater.ai.AIProviderType; import burp.shadow.repeater.ai.VariationAnalyser; +import burp.shadow.repeater.ai.executor.OpenAIExecutor; import burp.shadow.repeater.utils.Utils; import burp.api.montoya.BurpExtension; import burp.api.montoya.EnhancedCapability; @@ -91,6 +93,10 @@ public void initialize(MontoyaApi montoyaApi) { Reduce vectors - Should Shadow Repeater reduce the vectors? Maximum variation amount - Maximum number of variations to create Excluded headers - Comma separated list of headers to to exclude from analysis + AI provider - The AI provider to use + OpenAI API key - The API key to use for OpenAI + OpenAI endpoint - The endpoint to use for OpenAI + OpenAI model - The model to use for OpenAI """) .withKeywords("Repeater", "Shadow") .withSettings( @@ -100,7 +106,11 @@ public void initialize(MontoyaApi montoyaApi) { SettingsPanelSetting.booleanSetting("Debug AI", false), SettingsPanelSetting.booleanSetting("Reduce vectors", false), SettingsPanelSetting.integerSetting("Maximum variation amount", 10), - SettingsPanelSetting.stringSetting("Excluded headers", "Authorization,Cookie,Content-Length,Connection") + SettingsPanelSetting.stringSetting("Excluded headers", "Authorization,Cookie,Content-Length,Connection"), + SettingsPanelSetting.listSetting("AI provider", java.util.Arrays.stream(AIProviderType.values()).map(AIProviderType::value).collect(java.util.stream.Collectors.toList()), AIProviderType.BurpAI.value()), + SettingsPanelSetting.stringSetting("OpenAI API key"), + SettingsPanelSetting.stringSetting("OpenAI endpoint", OpenAIExecutor.DEFAULT_ENDPOINT), + SettingsPanelSetting.stringSetting("OpenAI model", OpenAIExecutor.DEFAULT_MODEL) ) .build(); api.userInterface().registerSettingsPanel(settings); diff --git a/src/main/java/burp/shadow/repeater/ai/AI.java b/src/main/java/burp/shadow/repeater/ai/AI.java index 4e5a536..fb48ecd 100644 --- a/src/main/java/burp/shadow/repeater/ai/AI.java +++ b/src/main/java/burp/shadow/repeater/ai/AI.java @@ -1,7 +1,5 @@ package burp.shadow.repeater.ai; -import burp.api.montoya.ai.chat.Message; -import burp.api.montoya.ai.chat.PromptOptions; import burp.api.montoya.ai.chat.PromptResponse; import java.security.MessageDigest; @@ -64,7 +62,7 @@ public String execute() { api.logging().logToOutput("System Prompt:" + this.systemMessage + "\n\n"); api.logging().logToOutput("Prompt:" + this.prompt + "\n\n"); } - PromptResponse response = api.ai().prompt().execute(PromptOptions.promptOptions().withTemperature(this.temperature), Message.systemMessage(this.systemMessage), Message.userMessage(this.prompt)); + PromptResponse response = AIProvider.acquire().execute(this.temperature, this.systemMessage, this.prompt); if(debugAi) { api.logging().logToOutput("AI Response:" + response.content() + "\n\n"); } diff --git a/src/main/java/burp/shadow/repeater/ai/AIExecutor.java b/src/main/java/burp/shadow/repeater/ai/AIExecutor.java new file mode 100644 index 0000000..92ae583 --- /dev/null +++ b/src/main/java/burp/shadow/repeater/ai/AIExecutor.java @@ -0,0 +1,7 @@ +package burp.shadow.repeater.ai; + +import burp.api.montoya.ai.chat.PromptResponse; + +public interface AIExecutor { + public PromptResponse execute(double temperature, String systemMessage, String userMessage); +} diff --git a/src/main/java/burp/shadow/repeater/ai/AIProvider.java b/src/main/java/burp/shadow/repeater/ai/AIProvider.java new file mode 100644 index 0000000..207313e --- /dev/null +++ b/src/main/java/burp/shadow/repeater/ai/AIProvider.java @@ -0,0 +1,22 @@ +package burp.shadow.repeater.ai; + +import burp.shadow.repeater.ai.executor.BurpAIExecutor; +import burp.shadow.repeater.ai.executor.OpenAIExecutor; + +import static burp.shadow.repeater.ShadowRepeaterExtension.settings; + +public class AIProvider { + public static AIExecutor acquire() { + AIProviderType aiProvider; + try { + aiProvider = AIProviderType.valueOf(settings.getString("AI provider")); + } catch (Exception e) { + aiProvider = AIProviderType.BurpAI; + } + + return switch (aiProvider) { + case AIProviderType.OpenAI -> new OpenAIExecutor(); + default -> new BurpAIExecutor(); // Return Burp AI by default + }; + }; +} diff --git a/src/main/java/burp/shadow/repeater/ai/AIProviderType.java b/src/main/java/burp/shadow/repeater/ai/AIProviderType.java new file mode 100644 index 0000000..4ebf3b4 --- /dev/null +++ b/src/main/java/burp/shadow/repeater/ai/AIProviderType.java @@ -0,0 +1,15 @@ +package burp.shadow.repeater.ai; + +public enum AIProviderType { + BurpAI("BurpAI"), OpenAI("OpenAI"); + + private final String value; + + AIProviderType(String value) { + this.value = value; + } + + public String value() { + return value; + } +} diff --git a/src/main/java/burp/shadow/repeater/ai/executor/BurpAIExecutor.java b/src/main/java/burp/shadow/repeater/ai/executor/BurpAIExecutor.java new file mode 100644 index 0000000..9d420ed --- /dev/null +++ b/src/main/java/burp/shadow/repeater/ai/executor/BurpAIExecutor.java @@ -0,0 +1,15 @@ +package burp.shadow.repeater.ai.executor; + +import burp.api.montoya.ai.chat.Message; +import burp.api.montoya.ai.chat.PromptOptions; +import burp.api.montoya.ai.chat.PromptResponse; +import burp.shadow.repeater.ai.AIExecutor; + +import static burp.shadow.repeater.ShadowRepeaterExtension.api; + +public class BurpAIExecutor implements AIExecutor { + @Override + public PromptResponse execute(double temperature, String systemMessage, String userMessage) { + return api.ai().prompt().execute(PromptOptions.promptOptions().withTemperature(temperature), Message.systemMessage(systemMessage), Message.userMessage(userMessage)); + } +} diff --git a/src/main/java/burp/shadow/repeater/ai/executor/OpenAIExecutor.java b/src/main/java/burp/shadow/repeater/ai/executor/OpenAIExecutor.java new file mode 100644 index 0000000..d35e4d2 --- /dev/null +++ b/src/main/java/burp/shadow/repeater/ai/executor/OpenAIExecutor.java @@ -0,0 +1,98 @@ +package burp.shadow.repeater.ai.executor; + +import burp.api.montoya.ai.chat.PromptResponse; +import burp.shadow.repeater.ai.AIExecutor; + +import org.json.JSONArray; +import org.json.JSONObject; + +import static burp.shadow.repeater.ShadowRepeaterExtension.api; +import static burp.shadow.repeater.ShadowRepeaterExtension.settings; + +class OpenAIResponse implements PromptResponse { + private String content; + + OpenAIResponse(String content) { + this.content = content; + } + + public String content() { + return content; + } +} + +class Payload extends JSONObject { + Payload(double temperature, String systemMessage, String userMessage, String model) { + put("model", (model != null && model.length() > 0) ? model : OpenAIExecutor.DEFAULT_MODEL); + put("temperature", temperature); + + JSONArray messages = new JSONArray(); + + JSONObject message = new JSONObject(); + message.put("role", "user"); + message.put("content", userMessage); + messages.put(message); + + message = new JSONObject(); + message.put("role", "system"); + message.put("content", systemMessage); + messages.put(message); + + put("messages", messages); + } +} + +public class OpenAIExecutor implements AIExecutor { + public static String DEFAULT_ENDPOINT = "https://api.openai.com/v1/responses"; + public static String DEFAULT_MODEL = "o4-mini"; + + @Override + public PromptResponse execute(double temperature, String systemMessage, String userMessage) { + try { + String apiKey = settings.getString("OpenAI API key"); + String endpoint = settings.getString("OpenAI endpoint"); + String model = settings.getString("OpenAI model"); + + Payload payload = new Payload(temperature, systemMessage, userMessage, model); + + java.net.http.HttpClient client = java.net.http.HttpClient.newHttpClient(); + java.net.http.HttpRequest request; + java.net.http.HttpResponse response; + + request = java.net.http.HttpRequest.newBuilder() + .uri(java.net.URI.create(endpoint != null && endpoint.length() > 0 ? endpoint : DEFAULT_ENDPOINT)) + .header("Content-Type", "application/json") + .header("Authorization", "Bearer " + apiKey) + .POST(java.net.http.HttpRequest.BodyPublishers.ofString(payload.toString())) + .build(); + + response = client.send(request, java.net.http.HttpResponse.BodyHandlers.ofString()); + + int status = response.statusCode(); + String body = response.body(); + + if (status != 200) { + throw new Exception("Received response: " + status + " body: " + body); + } + + // Parse the assistant content + JSONObject jsonResponse = new JSONObject(body); + JSONArray choices = jsonResponse.getJSONArray("choices"); + String result = null; + for (int i = 0; i < choices.length(); i++) { + JSONObject choice = choices.getJSONObject(i); + JSONObject messageObj = choice.getJSONObject("message"); + if ("assistant".equals(messageObj.optString("role"))) { + result = messageObj.optString("content"); + break; + } + } + + return new OpenAIResponse(result); + } catch (Exception e) { + api.logging().logToOutput("An error occurred while processing the request. See `Errors` for details"); + api.logging().logToError(e.getMessage()); + return new OpenAIResponse(""); + } + } +} diff --git a/src/main/java/burp/shadow/repeater/utils/Utils.java b/src/main/java/burp/shadow/repeater/utils/Utils.java index 29897be..fa38cb8 100644 --- a/src/main/java/burp/shadow/repeater/utils/Utils.java +++ b/src/main/java/burp/shadow/repeater/utils/Utils.java @@ -18,7 +18,6 @@ import java.util.stream.Collectors; import static burp.shadow.repeater.ShadowRepeaterExtension.*; -import static burp.shadow.repeater.ShadowRepeaterExtension.responseHistory; public class Utils { public static HttpRequest modifyRequest(HttpRequest req, String type, String name, String value) {