diff --git a/ClinicalCoach.php b/ClinicalCoach.php index 149bc5a..16ef81d 100644 --- a/ClinicalCoach.php +++ b/ClinicalCoach.php @@ -92,39 +92,88 @@ public function redcap_module_ajax($action, $payload, $project_id, $record, $ins case "callAI": $messages = $payload; - // Retrieve the system context reflection from project settings - $reflection_context = $this->getProjectSetting("system_context_reflection_1"); + // Retrieve the main system context reflection from project settings + $main_system_context = $this->getProjectSetting("system_context_summarize"); + + // Reflection contexts (could be null, so we check them before appending) + $reflection_contexts = [ + $this->getProjectSetting("system_context_reflection_1"), + $this->getProjectSetting("system_context_reflection_2"), + $this->getProjectSetting("system_context_reflection_3"), + $this->getProjectSetting("system_context_reflection_4"), + $this->getProjectSetting("system_context_reflection_5"), + $this->getProjectSetting("system_context_reflection_6") + ]; + + // Array to hold results from all API calls + $allResults = []; + + // TODO NEED TO ENGIENEER PROMPT TO GIVE RELATIVE SCORE? + function assignScore($responseContent) { + // Example logic: scoring based on the length of the content + $length = strlen($responseContent); + if ($length < 100) { + return 1; // Bad response + } elseif ($length < 300) { + return 2; // Average response + } else { + return 3; // Good response + } + } - // Use the appendSystemContext function to handle system context - $messages = $this->appendSystemContext($messages, $reflection_context); + // Loop through each reflection context + foreach ($reflection_contexts as $index => $reflection_context) { + if (!empty($reflection_context)) { + // Append main system context and current reflection context + $currentMessages = $this->appendSystemContext($messages, $main_system_context); + $currentMessages = $this->appendSystemContext($currentMessages, $reflection_context); - $this->emDebug("chatml Messages array to API", $messages); + $this->emDebug("chatml Messages array to API for reflection context " . ($index + 1), $currentMessages); - // CALL API ENDPOINT WITH AUGMENTED CHATML - $model = "gpt-4o"; - $params = array("messages" => $messages); + // Prepare parameters for the API call + $model = "gpt-4o"; + $params = array("messages" => $currentMessages); - if ($this->getProjectSetting("gpt-temperature")) { - $params["temperature"] = floatval($this->getProjectSetting("gpt-temperature")); - } - if ($this->getProjectSetting("gpt-top-p")) { - $params["top_p"] = floatval($this->getProjectSetting("gpt-top-p")); - } - if ($this->getProjectSetting("gpt-frequency-penalty")) { - $params["frequency_penalty"] = floatval($this->getProjectSetting("gpt-frequency-penalty")); - } - if ($this->getProjectSetting("presence_penalty")) { - $params["presence_penalty"] = floatval($this->getProjectSetting("presence_penalty")); - } - if ($this->getProjectSetting("gpt-max-tokens")) { - $params["max_tokens"] = intval($this->getProjectSetting("gpt-max-tokens")); + if ($this->getProjectSetting("gpt-temperature")) { + $params["temperature"] = floatval($this->getProjectSetting("gpt-temperature")); + } + if ($this->getProjectSetting("gpt-top-p")) { + $params["top_p"] = floatval($this->getProjectSetting("gpt-top-p")); + } + if ($this->getProjectSetting("gpt-frequency-penalty")) { + $params["frequency_penalty"] = floatval($this->getProjectSetting("gpt-frequency-penalty")); + } + if ($this->getProjectSetting("presence_penalty")) { + $params["presence_penalty"] = floatval($this->getProjectSetting("presence_penalty")); + } + if ($this->getProjectSetting("gpt-max-tokens")) { + $params["max_tokens"] = intval($this->getProjectSetting("gpt-max-tokens")); + } + + // Make the API call for the current context + $response = $this->getSecureChatInstance()->callAI($model, $params, PROJECT_ID); + $result = $this->formatResponse($response); + + // Extract the response content for scoring + $responseContent = $result['response']['content'] ?? ''; + $score = assignScore($responseContent); + + // Add the result to the allResults array, including the score + $allResults[] = [ + "reflection_context" => "Reflection " . ($index + 1), + "response" => $result, + "score" => $score + ]; + + $this->emDebug("API result for reflection context " . ($index + 1), $result); + } } - $response = $this->getSecureChatInstance()->callAI($model, $params, PROJECT_ID); - $result = $this->formatResponse($response); + // Return all results as a JSON array + $this->emDebug("All API results", $allResults); + return json_encode($allResults); + - $this->emDebug("calling SecureChatAI.callAI()", $result); - return json_encode($result); case "transcribeAudio": $messages = $payload; diff --git a/MVP/src/assets/mvp.css b/MVP/src/assets/mvp.css index 668e7d1..7bdd1f0 100644 --- a/MVP/src/assets/mvp.css +++ b/MVP/src/assets/mvp.css @@ -272,3 +272,24 @@ button:hover { font-style: italic; } +.status-indicator { + display: flex; + justify-content: space-between; + align-items: center; + padding: 10px 0; +} + +.status-bar { + flex:1; + min-width: 60px; + height: 10px; + margin-bottom: 5px; + margin-right: 1px; +} + +.status-label { + width: 100%; + font-size: 65%; + color: black; + text-align: center; +} diff --git a/MVP/src/components/Home.jsx b/MVP/src/components/Home.jsx index f3721d3..c0a287a 100644 --- a/MVP/src/components/Home.jsx +++ b/MVP/src/components/Home.jsx @@ -1,4 +1,5 @@ import React from 'react'; +import StatusIndicator from './StatusIndicator'; import { useStudents } from '../contexts/Students'; import { FaUserCircle } from 'react-icons/fa'; @@ -16,31 +17,58 @@ function Home() { { role: "user", content: selectedStudent.transcription } ]; - console.log("chatmlPayload", chatmlPayload); - + // Call AI function and handle the callback callAI(chatmlPayload, (aiContent) => { - console.log("aiContent callback", aiContent); - if (aiContent) { - updateAIResponse(aiContent); // Store AI response in context + if (aiContent && Array.isArray(aiContent)) { + const updatedStudent = { + ...selectedStudent, + aiResponse: aiContent // Store the AI response in the student data + }; + + console.log("HOME handleSubmitToAI Updating student with AI response:", updatedStudent); + + // Instead of using updateStudentData, use updateAIResponse + updateAIResponse(selectedStudent.id, aiContent); } else { - console.error("Failed to get a response from the AI."); + console.error("HOME handleSubmitToAI No content received from AI or invalid format"); } }); }; + const callAI = (chatmlPayload, callback) => { - window.clicnical_coach_jsmo_module.callAI(chatmlPayload, (res) => { - if (res) { - if (callback) callback(res); - } else { - console.log("Unexpected AI response format:", res); + window.clicnical_coach_jsmo_module.callAI( + chatmlPayload, + (res) => { + if (!res) { + console.error("HOME callAI Response is null or undefined"); + callback(undefined); // Pass undefined to indicate failure + return; + } + + try { + const parsedRes = Array.isArray(res) ? res : typeof res === 'object' ? res : JSON.parse(res); + + if (Array.isArray(parsedRes)) { + console.log("HOME callAI Parsed response before callback:", parsedRes); + callback(parsedRes); + } else { + console.error("HOME callAI Unexpected response format:", parsedRes); + callback(undefined); // Call with undefined for failure + } + } catch (error) { + console.error("HOME callAI Error parsing response:", error); + callback(undefined); // Call with undefined for failure + } + }, + (err) => { + console.error("HOME callAI AI call failed:", err); + callback(undefined); // Ensure callback is called on error } - }, (err) => { - console.log("callAI error:", err); - if (callback) callback(); - }); + ); }; + return (
{selectedStudent.transcription}+