diff --git a/CHANGELOG.md b/CHANGELOG.md
index 71781f8b..628ecb9c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,17 @@ As of v0.2-alpha, this project is attempting to adhere to [Semantic Versioning](
While alpha, however, any version may include breaking changes that may not be specifically noted as such,
and breaking changes will not necessarily result in changes to the main version number.
+## [v1.6.15-alpha](https://github.com/Lexpedite/blawx/releases/tag/v1.6.15-alpha) 2023-05-23
+
+### Added
+* If you provide an API access key for an OpenAI account, Blawx will use ChatGPT-3.5 to summarize its explanations and display those summaries in scenario editor.
+
+### Changed
+* Disclaimer has been added to the GCWeb styled version of the scenario editor.
+
+### TODO
+* Update the documentation for the scenario editor.
+
## [v1.6.14-alpha](https://github.com/Lexpedite/blawx/releases/tag/v1.6.14-alpha) 2023-05-12
### Added
diff --git a/INSTALL.md b/INSTALL.md
index 698cf0a7..286f1fc1 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -21,6 +21,9 @@ cd blawx
./update.sh
```
+Note that the `./update.sh` script runs the blawx server in the terminal, for development purposes, so that you can see debug information.
+If you want to run the docker container in the background, add `-d` as a flag to the `docker run` command in that script.
+
This command will take several minutes to run the first time.
The Blawx server will now be available at [http://127.0.0.1:8000](http://127.0.0.1:8000),
@@ -36,6 +39,16 @@ of running the `./update.sh` script.
A demo account with username "demo" and password "blawx2022" is also created,
and should be deleted in the admin interface if you want to restrict access to your server.
+## Configure ChatGPT Integration
+
+If you wish to run Blawx with ChatGPT integration, which allows for AI-generated summaries of explanations to be displayed
+to the user in the scenario editor, you will need to not use the `./update.sh` command, and instead enter these two commands:
+
+```
+docker build -t blawx .
+docker run -it -p 8000:8000 -e OPENAI_API_KEY="your_key_goes_here" blawx
+```
+
## Updating Blawx
Blawx is under active development. Currently, updates are being sent to GitHub only, there is no published
diff --git a/blawx/fixtures/docs/components/scenario_editor.yaml b/blawx/fixtures/docs/components/scenario_editor.yaml
index 8c67600a..9e6c9cc5 100644
--- a/blawx/fixtures/docs/components/scenario_editor.yaml
+++ b/blawx/fixtures/docs/components/scenario_editor.yaml
@@ -110,6 +110,13 @@
attributes, all those contingent answers will be included, also. You can see which answers are contingent
by looking to see whether there are parts of the explanations that indicate the reason was assumed.
+ ### ChatGPT-Generated Summaries
+
+ If you followed the instructions for providing Blawx with your OpenAI API Key, Scenario Editor will attempt
+ to obtain AI-generated summaries of the details inside each explanation for an answer, and display that summary
+ at the top of the explanation and provide the standard tree-structured explanation in a collapsable area beneath
+ the summary. The summary will be prefaced with a warning that it was generated by a generative AI.
+
## View
The View tab of the scenario editor gives you the ability to customize the Facts tab by hiding various elements
diff --git a/blawx/fixtures/docs/features/answers.yaml b/blawx/fixtures/docs/features/answers.yaml
index f55ed61b..1bd2ea7f 100644
--- a/blawx/fixtures/docs/features/answers.yaml
+++ b/blawx/fixtures/docs/features/answers.yaml
@@ -108,6 +108,14 @@
It is possible that age is a factor that can exclude you, but cannot include you. So you
would not really be getting the answer to your question unless you ran both queries.
+ ## ChatGPT Summaries of Explanations
+
+ If you provide Blawx with an OpenAI API Key when running the server (see `INSTALL.md` for details)
+ in scenario editor your tree-structured explanations will be prefaced with an AI-generated plain-
+ language summary. It is prefaced with a warning that it should not be relied upon for understanding
+ how the reasoner reached the conclusion, and the actual tree-structured explanation on which it is
+ based is still made available.
+
diff --git a/blawx/requirements.txt b/blawx/requirements.txt
index a50aea31..a19c4beb 100644
--- a/blawx/requirements.txt
+++ b/blawx/requirements.txt
@@ -5,4 +5,5 @@ pyyaml
cobalt
clean-law >=0.0.4
django-guardian
-django-preferences
\ No newline at end of file
+django-preferences
+openai
\ No newline at end of file
diff --git a/blawx/settings.py b/blawx/settings.py
index 6110a888..86ce9785 100644
--- a/blawx/settings.py
+++ b/blawx/settings.py
@@ -13,7 +13,7 @@
from pathlib import Path
# For adding a version identifier
-BLAWX_VERSION = "v1.6.14-alpha"
+BLAWX_VERSION = "v1.6.15-alpha"
# Build paths inside the project like this: BASE_DIR / 'subdir'.
diff --git a/blawx/simplifier.py b/blawx/simplifier.py
new file mode 100644
index 00000000..a40447a0
--- /dev/null
+++ b/blawx/simplifier.py
@@ -0,0 +1,27 @@
+from django.http import Http404, HttpResponseNotFound, HttpResponseForbidden
+
+from rest_framework.decorators import api_view, permission_classes, authentication_classes
+from rest_framework.response import Response
+# from rest_framework.permissions import AllowAny
+from rest_framework.authentication import SessionAuthentication, BasicAuthentication
+from rest_framework.permissions import IsAuthenticated, DjangoObjectPermissions, IsAuthenticatedOrReadOnly, AllowAny
+
+import openai
+import os
+
+prompt_preamble = """
+What follows is an automatically generated explanation. Restate it in plain language without restating mathematical calculations and
+without further justifying conclusions for which there is only an absence of evidence in support.
+
+
+"""
+
+@api_view(['POST'])
+@authentication_classes([SessionAuthentication])
+@permission_classes([IsAuthenticated])
+def simplify(request):
+ if "OPENAI_API_KEY" in os.environ:
+ completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=[{"role": "user", "content": prompt_preamble + request.data['explanation'] }])
+ return Response(completion.choices[0].message.content)
+ else:
+ return Response("")
\ No newline at end of file
diff --git a/blawx/templates/blawx/scenario_editor.html b/blawx/templates/blawx/scenario_editor.html
index 00e7cd9c..d396155a 100644
--- a/blawx/templates/blawx/scenario_editor.html
+++ b/blawx/templates/blawx/scenario_editor.html
@@ -1510,14 +1510,25 @@
output_content += '';
answer_element.innerHTML = output_content;
+ // Now we summarize them
+ for (var a = 0; a < parsed_test_response['Answers'].length; a++) {
+ for (var e = 0; e < parsed_test_response['Answers'][a]['Models'].length; e++) {
+ var target_explanation = document.getElementById("answer_" + (a+1) + "_model_" + (e+1) + "_collapse");
+ var target_summary = document.getElementById("answer_" + (a+1) + "_model_" + (e+1) + "_collapse_simplified");
+ var detail_header_name = "answer_" + (a+1) + "_model_" + (e+1) + "_collapse_detail_header";
+ var detail_content_name = "answer_" + (a+1) + "_model_" + (e+1) + "_collapse_detail_content";
+ var text_explanation = get_text_of_explanation(target_explanation);
+ var simplify_request = new XMLHttpRequest();
+ var warning_text = '
';
+ warning_text += '';
+ warning_text += '
The following summarization was automatically generated, and may not be accurate. The details below show the actual reasoning.
'
+ warning_text += '
';
+ simplify_request.onload = function () {
+ if (this.responseText != '""') {
+ target_summary.innerHTML = warning_text + "Summary: " + this.responseText;
+ } else {
+ // There was no answer, so there is no summary. The contents of the details should be moved to the explanation part,
+ // and the details parts removed.
+
+ var detail_header_target = document.getElementById(detail_header_name);
+ var detail_content_target = document.getElementById(detail_content_name);
+
+ // Get the details content innerHTML
+ var explanation = detail_content_target.innerHTML;
+ // Set it to the value of the target.
+ target_summary.innerHTML = explanation;
+ // remove the header and the content elements.
+ detail_header_target.remove();
+ detail_content_target.remove();
+ }
+ }
+ simplify_request.open("POST", "{% url 'simplify' %}");
+ simplify_request.setRequestHeader("Content-Type", "application/json");
+ target_summary.innerHTML = "Getting AI Summary...";
+ console.log("Sending simplify request");
+ simplify_request.setRequestHeader('X-CSRFToken', csrftoken);
+ simplify_request.send(JSON.stringify({"explanation": text_explanation}));
+ }
+ }
+
$('#nav-answers-tab').tab('show');
draw_facts(); // So that new relevance information will be displayed in the interface.
} else {
@@ -1588,6 +1641,19 @@
Response
testrun_request.setRequestHeader('X-CSRFToken', csrftoken);
testrun_request.send(JSON.stringify(new_fact_data));
}
+
+ function get_text_of_explanation(element) {
+ var output = ""
+ if (element.hasChildNodes()) {
+ for (var c = 0; c < element.childNodes.length; c++) {
+ output += get_text_of_explanation(element.childNodes[c])
+ }
+ } else {
+ output += "\n" + element.data
+ }
+ return output;
+ }
+
var view_form_element = document.getElementById('viewform');
function toggle_view_hidden(input) {
index = hidden_by_view.indexOf(input);
diff --git a/blawx/templates/blawx/scenario_editor_gcweb.html b/blawx/templates/blawx/scenario_editor_gcweb.html
index ede4a2f1..12c79ea5 100644
--- a/blawx/templates/blawx/scenario_editor_gcweb.html
+++ b/blawx/templates/blawx/scenario_editor_gcweb.html
@@ -19,6 +19,11 @@
';
@@ -1572,6 +1586,47 @@
Response
output_content += '';
answer_element.innerHTML = output_content;
+ // Now we summarize them
+ for (var a = 0; a < parsed_test_response['Answers'].length; a++) {
+ for (var e = 0; e < parsed_test_response['Answers'][a]['Models'].length; e++) {
+ var target_explanation = document.getElementById("answer_" + (a+1) + "_model_" + (e+1) + "_collapse");
+ var target_summary = document.getElementById("answer_" + (a+1) + "_model_" + (e+1) + "_collapse_simplified");
+ var detail_header_name = "answer_" + (a+1) + "_model_" + (e+1) + "_collapse_detail_header";
+ var detail_content_name = "answer_" + (a+1) + "_model_" + (e+1) + "_collapse_detail_content";
+ var text_explanation = get_text_of_explanation(target_explanation);
+ var simplify_request = new XMLHttpRequest();
+ var warning_text = '
';
+ warning_text += '';
+ warning_text += '
The following summarization was automatically generated, and may not be accurate. The details below show the actual reasoning.
'
+ warning_text += '
';
+ simplify_request.onload = function () {
+ if (this.responseText != '""') {
+ target_summary.innerHTML = warning_text + "Summary: " + this.responseText;
+ } else {
+ // There was no answer, so there is no summary. The contents of the details should be moved to the explanation part,
+ // and the details parts removed.
+
+ var detail_header_target = document.getElementById(detail_header_name);
+ var detail_content_target = document.getElementById(detail_content_name);
+
+ // Get the details content innerHTML
+ var explanation = detail_content_target.innerHTML;
+ // Set it to the value of the target.
+ target_summary.innerHTML = explanation;
+ // remove the header and the content elements.
+ detail_header_target.remove();
+ detail_content_target.remove();}
+ }
+ simplify_request.open("POST", "{% url 'simplify' %}");
+ simplify_request.setRequestHeader("Content-Type", "application/json");
+ target_summary.innerHTML = "Getting AI Summary...";
+ console.log("Sending simplify request");
+ simplify_request.setRequestHeader('X-CSRFToken', csrftoken);
+ simplify_request.send(JSON.stringify({"explanation": text_explanation}));
+ }
+ }
+
+
$('#nav-answers-tab').tab('show');
draw_facts(); // So that new relevance information will be displayed in the interface.
} else {
@@ -1592,6 +1647,20 @@
Response
testrun_request.setRequestHeader('X-CSRFToken', csrftoken);
testrun_request.send(JSON.stringify(new_fact_data));
}
+
+
+ function get_text_of_explanation(element) {
+ var output = ""
+ if (element.hasChildNodes()) {
+ for (var c = 0; c < element.childNodes.length; c++) {
+ output += get_text_of_explanation(element.childNodes[c])
+ }
+ } else {
+ output += "\n" + element.data
+ }
+ return output;
+ }
+
var view_form_element = document.getElementById('viewform');
function toggle_view_hidden(input) {
index = hidden_by_view.indexOf(input);
@@ -1875,7 +1944,7 @@
Response
}
if (!new_fact_data['facts'][f]['from_ontology']) {
var delete_target
- output_html += '';
+ output_html += '';
} else {
output_html += '