From f546fabb1e91fe81dd7b6d21a5da6d25f9a2b6a5 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Mon, 29 Jan 2024 18:38:30 +0100 Subject: [PATCH 01/58] setup websocket --- api_app/websocket.py | 44 +++++++++++++++++++ configuration/nginx/django_server.conf | 3 ++ configuration/nginx/locations.conf | 5 ++- configuration/nginx/websocket.conf | 10 +++++ docker/Dockerfile | 1 + docker/default.yml | 17 +++++++ docker/entrypoints/daphne.sh | 8 ++++ docker/test.override.yml | 8 ++++ .../src/components/jobs/result/JobResult.jsx | 18 ++++++++ intel_owl/asgi.py | 31 +++++++++++++ intel_owl/settings/__init__.py | 2 + intel_owl/settings/django.py | 1 + requirements/project-requirements.txt | 2 + 13 files changed, 149 insertions(+), 1 deletion(-) create mode 100644 api_app/websocket.py create mode 100644 configuration/nginx/websocket.conf create mode 100755 docker/entrypoints/daphne.sh create mode 100644 intel_owl/asgi.py diff --git a/api_app/websocket.py b/api_app/websocket.py new file mode 100644 index 0000000000..b62060e2c8 --- /dev/null +++ b/api_app/websocket.py @@ -0,0 +1,44 @@ +import json +import logging + +from channels.generic.websocket import WebsocketConsumer + +logger = logging.getLogger(__name__) + + +class JobConsumer(WebsocketConsumer): + def connect(self): + logger.debug("websocket connect!") + job_id = self.scope["url_route"]["kwargs"]["job_id"] + user = self.scope["user"] + logger.debug(f"this is the job id: {job_id}") + logger.debug(f"user: {user}") + self.accept() + + def disconnect(self): + logger.debug("websocket disconnect!") + # self.send(bytes_data="disconnect request received") + self.close() + + # Receive message from WebSocket + def receive(self): + logger.debug("websocket receive!") + self.send(text_data=json.dumps({"message": "it's working"})) + # TODO: non so se viene inviato sempre almeno un messaggio. + # in ogni caso quando uno va ad aprire un job vecchio viene usata la websocket + # (in frontend non può sapere se è running o no) + # e bisognerà gestire la casistica reportando subito il job. + # nel caso in cui l'analisi sia appena partita e il job è running va detto + # si fa un check sul db e si ritorna o lo status se è running o tutto il job + + +# sto metodo deve essere usato in altre parti del codice (direi alla fine del job) +# per accedere alla websocket giusta su cui inviare il risultato: +# channel_layer = get_channel_layer() +# await channel_layer.send("channel_name", { +# "type": "chat.message", +# "text": "Hello there!", +# }) + +# nell'esempio della doc si salvano i canali disponibili (le ws aperte) sul db: +# https://channels.readthedocs.io/en/latest/topics/channel_layers.html?highlight=get_channel_layer#single-channels diff --git a/configuration/nginx/django_server.conf b/configuration/nginx/django_server.conf index 1b56aca4c4..8a799489ca 100644 --- a/configuration/nginx/django_server.conf +++ b/configuration/nginx/django_server.conf @@ -21,6 +21,9 @@ server { client_max_body_size 100m; } + # Websocket + include websocket.conf; + # Error pages include errors.conf; } diff --git a/configuration/nginx/locations.conf b/configuration/nginx/locations.conf index 369897fb52..0ce1815107 100644 --- a/configuration/nginx/locations.conf +++ b/configuration/nginx/locations.conf @@ -29,4 +29,7 @@ location ^~/admin { uwsgi_read_timeout 600; include uwsgi_params; client_max_body_size 100m; -} \ No newline at end of file +} + +# Websocket +include websocket.conf; diff --git a/configuration/nginx/websocket.conf b/configuration/nginx/websocket.conf new file mode 100644 index 0000000000..dae2db2569 --- /dev/null +++ b/configuration/nginx/websocket.conf @@ -0,0 +1,10 @@ +location /ws { + proxy_pass http://daphne:8011; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_connect_timeout 86400; + proxy_read_timeout 86400; + proxy_send_timeout 86400; +} \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index a190b6e4dd..bdad016663 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -28,6 +28,7 @@ ARG PYCTI_VERSION=5.10.0 RUN mkdir -p ${LOG_PATH} \ ${LOG_PATH}/django \ ${LOG_PATH}/uwsgi \ + ${LOG_PATH}/asgi \ /opt/deploy/files_required /opt/deploy/files_required/yara /opt/deploy/configuration # install required packages. some notes about:o diff --git a/docker/default.yml b/docker/default.yml index b27c373316..56287fa594 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -23,12 +23,29 @@ services: - env_file_app - .env + daphne: + image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} + container_name: intelowl_daphne + restart: unless-stopped + volumes: + - ../configuration:/opt/deploy/intel_owl/configuration + - generic_logs:/var/log/intel_owl + - shared_files:/opt/deploy/files_required + entrypoint: + - ./docker/entrypoints/daphne.sh + expose: + - "8011" + env_file: + - env_file_app + - .env + nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_nginx restart: unless-stopped hostname: nginx volumes: + - ../configuration/nginx/websocket.conf:/etc/nginx/websocket.conf - ../configuration/nginx/errors.conf:/etc/nginx/errors.conf - ../configuration/nginx/http.conf:/etc/nginx/conf.d/default.conf - ../configuration/nginx/locations.conf:/etc/nginx/locations.conf diff --git a/docker/entrypoints/daphne.sh b/docker/entrypoints/daphne.sh new file mode 100755 index 0000000000..91fb811e0f --- /dev/null +++ b/docker/entrypoints/daphne.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +until cd /opt/deploy/intel_owl +do + echo "Waiting for server volume..." +done + +/usr/local/bin/daphne --proxy-headers --access-log /var/log/intel_owl/asgi/daphne.log -p 8011 -b 0.0.0.0 --no-server-name --application-close-timeout 60 --ping-interval 30 --ping-timeout 35 intel_owl.asgi:application diff --git a/docker/test.override.yml b/docker/test.override.yml index 02a84a33c0..31a3bab329 100644 --- a/docker/test.override.yml +++ b/docker/test.override.yml @@ -18,6 +18,14 @@ services: - DJANGO_TEST_SERVER=True - DJANGO_WATCHMAN_TIMEOUT=20 + daphne: + build: + context: .. + dockerfile: docker/Dockerfile + image: intelowlproject/intelowl:test + volumes: + - ../:/opt/deploy/intel_owl + nginx: build: context: .. diff --git a/frontend/src/components/jobs/result/JobResult.jsx b/frontend/src/components/jobs/result/JobResult.jsx index 1f2cb5f467..cd4d327b39 100644 --- a/frontend/src/components/jobs/result/JobResult.jsx +++ b/frontend/src/components/jobs/result/JobResult.jsx @@ -13,6 +13,24 @@ import { setNotificationFavicon, } from "../notifications"; +const wsClient = new WebSocket(`ws://127.0.0.1/ws/jobs/1`); +wsClient.onopen = (data) => { + console.debug("ws opened!, received: "); + console.debug(data); +}; +wsClient.onclose = (data) => { + console.debug("ws closed!, received: "); + console.debug(data); +}; +wsClient.onmessage = (data) => { + console.debug("ws received a message!, received: "); + console.debug(data); +}; +wsClient.onerror = (data) => { + console.debug("ws error!, received: "); + console.debug(data); +}; + export default function JobResult() { console.debug("JobResult rendered!"); diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py new file mode 100644 index 0000000000..8929a51980 --- /dev/null +++ b/intel_owl/asgi.py @@ -0,0 +1,31 @@ +import os + +from channels.auth import AuthMiddlewareStack +from channels.routing import ProtocolTypeRouter, URLRouter +from channels.security.websocket import AllowedHostsOriginValidator +from django.core.asgi import get_asgi_application +from django.urls import path + +from api_app.websocket import JobConsumer + +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "intel_owl.settings") + +# Initialize Django ASGI application early to ensure the AppRegistry +# is populated before importing code that may import ORM models. +get_asgi_application() + + +application = ProtocolTypeRouter( + { + # WebSocket chat handler + "websocket": AllowedHostsOriginValidator( + AuthMiddlewareStack( + URLRouter( + [ + path("ws/jobs/", JobConsumer.as_asgi()), + ] + ) + ) + ), + } +) diff --git a/intel_owl/settings/__init__.py b/intel_owl/settings/__init__.py index 3de33e8427..9f8ee000cb 100644 --- a/intel_owl/settings/__init__.py +++ b/intel_owl/settings/__init__.py @@ -47,6 +47,8 @@ "silk", # celery "django_celery_beat", + # websocket + "channels", ] from .auth import * # lgtm [py/polluting-import] diff --git a/intel_owl/settings/django.py b/intel_owl/settings/django.py index 02cc29dcab..d133126a95 100644 --- a/intel_owl/settings/django.py +++ b/intel_owl/settings/django.py @@ -42,6 +42,7 @@ ] WSGI_APPLICATION = "intel_owl.wsgi.application" +ASGI_APPLICATION = "intel_owl.asgi.application" # Internationalization diff --git a/requirements/project-requirements.txt b/requirements/project-requirements.txt index 7a03841f14..c5b08926b8 100644 --- a/requirements/project-requirements.txt +++ b/requirements/project-requirements.txt @@ -74,3 +74,5 @@ pyxlsb2==0.0.8 xlrd2==1.3.4 defusedxml==0.7.1 dateparser==1.2.0 +daphne==4.0.0 +channels==4.0.0 From bb1ed9c59fd25cd0563bff259fd41ca1f5bb922a Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 1 Feb 2024 18:17:19 +0100 Subject: [PATCH 02/58] fixed frontend --- api_app/serializers.py | 3 +- api_app/websocket.py | 39 ++--- .../components/jobs/result/JobOverview.jsx | 21 +-- .../src/components/jobs/result/JobResult.jsx | 145 ++++++++---------- .../jobs/result/pluginReportTables.jsx | 26 ++-- frontend/src/constants/apiURLs.js | 6 + intel_owl/asgi.py | 4 +- 7 files changed, 118 insertions(+), 126 deletions(-) diff --git a/api_app/serializers.py b/api_app/serializers.py index 1ff9f0938d..83e7940436 100644 --- a/api_app/serializers.py +++ b/api_app/serializers.py @@ -408,7 +408,8 @@ class Meta: permissions = rfs.SerializerMethodField() def get_pivots_to_execute(self, obj: Job): - return obj.pivots_to_execute.all().values_list("name", flat=True) + # this cast is required or serializer doesn't work with websocket + return list(obj.pivots_to_execute.all().values_list("name", flat=True)) def get_fields(self): # this method override is required for a cyclic import diff --git a/api_app/websocket.py b/api_app/websocket.py index b62060e2c8..3c90683434 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -1,35 +1,36 @@ -import json import logging -from channels.generic.websocket import WebsocketConsumer +from channels.generic.websocket import JsonWebsocketConsumer + +from api_app.models import Job +from api_app.serializers import JobSerializer logger = logging.getLogger(__name__) -class JobConsumer(WebsocketConsumer): +class JobConsumer(JsonWebsocketConsumer): def connect(self): - logger.debug("websocket connect!") - job_id = self.scope["url_route"]["kwargs"]["job_id"] user = self.scope["user"] - logger.debug(f"this is the job id: {job_id}") - logger.debug(f"user: {user}") + job_id = self.scope["url_route"]["kwargs"]["job_id"] + logger.info(f"user: {user} requested the analysis for the job {job_id}") self.accept() + job = Job.objects.get(id=job_id) + job_serializer = JobSerializer(job) + job_data = job_serializer.data + logger.debug(f"job data: {job_data}") + self.send_json(content=job_data) def disconnect(self): - logger.debug("websocket disconnect!") - # self.send(bytes_data="disconnect request received") + user = self.scope["user"] + logger.debug(f"user {user} disconnected!") self.close() - # Receive message from WebSocket - def receive(self): - logger.debug("websocket receive!") - self.send(text_data=json.dumps({"message": "it's working"})) - # TODO: non so se viene inviato sempre almeno un messaggio. - # in ogni caso quando uno va ad aprire un job vecchio viene usata la websocket - # (in frontend non può sapere se è running o no) - # e bisognerà gestire la casistica reportando subito il job. - # nel caso in cui l'analisi sia appena partita e il job è running va detto - # si fa un check sul db e si ritorna o lo status se è running o tutto il job + def receive_json(self, content): + logger.info("websocket receive!") + user = self.scope["user"] + logger.warning( + f"user {user} send {content} to the websocket, this shouldn't happen" + ) # sto metodo deve essere usato in altre parti del codice (direi alla fine del job) diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index 477d57f4b9..f06db01b2e 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -51,14 +51,8 @@ in case we use empty param for this page we fall in an infinite redirect loop. const LOADING_VISUALIZER_UI_ELEMENT_CODE = "loading"; const NO_VISUALIZER_UI_ELEMENT_CODE = "no-visualizer"; -export function JobOverview({ - isRunningJob, - job, - refetch, - section, - subSection, -}) { - console.debug("JobOverview rendered"); +export function JobOverview({ isRunningJob, job, section, subSection }) { + console.debug(`JobOverview rendered: ${JSON.stringify(job)}`); console.debug(`section: ${section}, subSection: ${subSection}`); const isSelectedUI = section === JobResultSections.VISUALIZER; @@ -80,7 +74,7 @@ export function JobOverview({ /> ), - report: , + report: , }, { id: "connector", @@ -97,7 +91,7 @@ export function JobOverview({ /> ), - report: , + report: , }, { id: "pivot", @@ -114,7 +108,7 @@ export function JobOverview({ /> ), - report: , + report: , }, { id: "visualizer", @@ -135,7 +129,7 @@ export function JobOverview({ /> ), - report: , + report: , }, { id: "full", @@ -158,7 +152,7 @@ export function JobOverview({ ), }, ], - [job, refetch], + [job], ); // state @@ -399,7 +393,6 @@ export function JobOverview({ JobOverview.propTypes = { isRunningJob: PropTypes.bool.isRequired, job: PropTypes.object.isRequired, - refetch: PropTypes.func.isRequired, section: PropTypes.string.isRequired, subSection: PropTypes.string.isRequired, }; diff --git a/frontend/src/components/jobs/result/JobResult.jsx b/frontend/src/components/jobs/result/JobResult.jsx index cd4d327b39..b46e88eea9 100644 --- a/frontend/src/components/jobs/result/JobResult.jsx +++ b/frontend/src/components/jobs/result/JobResult.jsx @@ -1,11 +1,9 @@ -import React from "react"; -import useAxios from "axios-hooks"; +import React, { useEffect } from "react"; import useTitle from "react-use/lib/useTitle"; -import useInterval from "react-use/lib/useInterval"; import { useParams } from "react-router-dom"; import { Loader } from "@certego/certego-ui"; -import { JOB_BASE_URI } from "../../../constants/apiURLs"; +import { WEBSOCKET_JOBS_URI } from "../../../constants/apiURLs"; import { JobOverview } from "./JobOverview"; import { @@ -13,30 +11,11 @@ import { setNotificationFavicon, } from "../notifications"; -const wsClient = new WebSocket(`ws://127.0.0.1/ws/jobs/1`); -wsClient.onopen = (data) => { - console.debug("ws opened!, received: "); - console.debug(data); -}; -wsClient.onclose = (data) => { - console.debug("ws closed!, received: "); - console.debug(data); -}; -wsClient.onmessage = (data) => { - console.debug("ws received a message!, received: "); - console.debug(data); -}; -wsClient.onerror = (data) => { - console.debug("ws error!, received: "); - console.debug(data); -}; - export default function JobResult() { console.debug("JobResult rendered!"); - // local state const [initialLoading, setInitialLoading] = React.useState(true); - const [isRunning, setIsRunning] = React.useState(false); + const [job, setJob] = React.useState(undefined); // this state var is used to check if we notified the user, in this way we avoid to notify more than once const [notified, setNotified] = React.useState(false); // this state var is used to check if the user changed page, in case he waited the result on the page we avoid the notification @@ -48,64 +27,87 @@ export default function JobResult() { const { section } = params; const { subSection } = params; - // API to download the job data - const [{ data: job, loading, error }, refetch] = useAxios({ - url: `${JOB_BASE_URI}/${jobId}`, - }); + // setup ws (we need to use useref to avoid to create a ws each render) + const jobWebsocket = React.useRef(); + if (!jobWebsocket.current) { + const websocketUrl = `${ + window.location.protocol === "https:" ? "wss" : "ws" + }://${window.location.hostname}/${WEBSOCKET_JOBS_URI}/${jobId}`; + console.debug(`connect to websocket API: ${websocketUrl}`); + jobWebsocket.current = new WebSocket(websocketUrl); + jobWebsocket.current.onopen = (data) => { + console.debug("ws opened:"); + console.debug(data); + }; + jobWebsocket.current.onclose = (data) => { + console.debug("ws closed:"); + console.debug(data); + }; + jobWebsocket.current.onmessage = (data) => { + console.debug("ws received:"); + console.debug(data); + const jobData = JSON.parse(data.data); + console.debug(jobData); + setJob(jobData); + }; + jobWebsocket.current.onerror = (data) => { + console.debug("ws error:"); + console.debug(data); + }; + } - // in case the job is not running and started (the job is not undefined) it means it terminated. - const jobTerminated = job !== undefined && !isRunning; + useEffect(() => { + /* this is required because the first loading we don't have job data + and this is a problem for JobOverview that needs the UI sections names + so the first time the page has a spinner, after the first request + the spinner will be moved in the sections. + */ + if (job) setInitialLoading(false); + }, [job]); - console.debug( - `JobResult - initialLoading: ${initialLoading}, isRunning: ${isRunning}, ` + - `notified: ${notified}, toNotify: ${toNotify}, jobTerminated: ${jobTerminated}`, + // page title + useTitle( + `IntelOwl | Job (#${jobId}, ${ + // eslint-disable-next-line no-nested-ternary + job ? (job.is_sample ? job.file_name : job.observable_name) : "" + })`, + { restoreOnUnmount: true }, ); - // HTTP polling only in case the job is running - useInterval( - refetch, - isRunning ? 5 * 1000 : null, // 5 seconds - ); + // in case the job is not running and started (the job is not undefined) it means it terminated. + const jobIsRunning = + job === undefined || + [ + "pending", + "running", + "analyzers_running", + "connectors_running", + "pivots_running", + "visualizers_running", + "analyzers_completed", + "connectors_completed", + "pivots_completed", + "visualizers_completed", + ].includes(job.status); - // every time the job data are downloaded we check if it terminated or not - React.useEffect( - () => - setIsRunning( - job === undefined || - [ - "pending", - "running", - "analyzers_running", - "connectors_running", - "pivots_running", - "visualizers_running", - "analyzers_completed", - "connectors_completed", - "pivots_completed", - "visualizers_completed", - ].includes(job.status), - ), - [job], + console.debug( + `JobResult - initialLoading: ${initialLoading}, jobIsRunning: ${jobIsRunning}, ` + + `notified: ${notified}, toNotify: ${toNotify}`, ); // In case the job terminated and it's not to notify, it means the user waited the result, notification is not needed. React.useEffect(() => { - if (jobTerminated && !toNotify) { + if (!jobIsRunning && !toNotify) { setNotified(true); } - }, [isRunning, jobTerminated, toNotify]); + }, [jobIsRunning, toNotify]); // notify the user when the job ends, he left the web page and we didn't notified the user before. - if (jobTerminated && toNotify && !notified) { + if (!jobIsRunning && toNotify && !notified) { generateJobNotification(job.observable_name, job.id); setNotified(true); } - // initial loading (spinner) - React.useEffect(() => { - if (!loading) setInitialLoading(false); - }, [loading]); - /* add a focus listener: when gain focus set it has been notified and reset the favicon when lost focus (blur) we set we can notify the user @@ -119,24 +121,13 @@ export default function JobResult() { // eslint-disable-next-line react-hooks/exhaustive-deps }, []); - // page title - useTitle( - `IntelOwl | Job (#${jobId}, ${ - // eslint-disable-next-line no-nested-ternary - job ? (job.is_sample ? job.file_name : job.observable_name) : "" - })`, - { restoreOnUnmount: true }, - ); - return ( ( diff --git a/frontend/src/components/jobs/result/pluginReportTables.jsx b/frontend/src/components/jobs/result/pluginReportTables.jsx index 99c5eeda67..aecc2b1c12 100644 --- a/frontend/src/components/jobs/result/pluginReportTables.jsx +++ b/frontend/src/components/jobs/result/pluginReportTables.jsx @@ -24,14 +24,14 @@ const tableProps = { accessor: (pluginReport) => pluginReport, maxWidth: 60, disableSortBy: true, - Cell: ({ value: plugin, customProps: { job, refetch } }) => ( + Cell: ({ value: plugin, customProps: { job } }) => (
{job.permissions?.plugin_actions === true && ["running", "pending"].includes(plugin.status.toLowerCase()) && ( killPlugin(job.id, plugin).then(refetch)} + onClick={() => killPlugin(job.id, plugin)} color="accent" size="xs" title={`Kill ${plugin.type} run`} @@ -44,7 +44,7 @@ const tableProps = { retryPlugin(job.id, plugin).then(refetch)} + onClick={() => retryPlugin(job.id, plugin)} color="light" size="xs" title={`Retry ${plugin.type} run`} @@ -118,50 +118,50 @@ const tableProps = { ), }; -export function AnalyzersReportTable({ job, refetch }) { +export function AnalyzersReportTable({ job }) { console.debug("AnalyzersReportTable rendered"); return (
); } -export function ConnectorsReportTable({ job, refetch }) { +export function ConnectorsReportTable({ job }) { console.debug("ConnectorsReportTable rendered"); return (
); } -export function PivotsReportTable({ job, refetch }) { - console.debug("ConnectorsReportTable rendered"); +export function PivotsReportTable({ job }) { + console.debug("PivotsReportTable rendered"); return (
); } -export function VisualizersReportTable({ job, refetch }) { - console.debug("AnalyzersReportTable rendered"); +export function VisualizersReportTable({ job }) { + console.debug("VisualizersReportTable rendered"); return (
diff --git a/frontend/src/constants/apiURLs.js b/frontend/src/constants/apiURLs.js index 442b37738f..147031b43d 100644 --- a/frontend/src/constants/apiURLs.js +++ b/frontend/src/constants/apiURLs.js @@ -1,3 +1,4 @@ +// HTTP(S) // api/ auth export const API_BASE_URI = "/api"; @@ -44,3 +45,8 @@ export const NOTIFICATION_BASE_URI = `${API_BASE_URI}/notification`; export const AUTH_BASE_URI = `${API_BASE_URI}/auth`; export const SESSIONS_BASE_URI = `${AUTH_BASE_URI}/sessions`; export const APIACCESS_BASE_URI = `${AUTH_BASE_URI}/apiaccess`; + +// WEBSOCKETS +export const WEBSOCKET_BASE_URI = "ws"; + +export const WEBSOCKET_JOBS_URI = `${WEBSOCKET_BASE_URI}/jobs`; diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py index 8929a51980..2794362571 100644 --- a/intel_owl/asgi.py +++ b/intel_owl/asgi.py @@ -6,14 +6,14 @@ from django.core.asgi import get_asgi_application from django.urls import path -from api_app.websocket import JobConsumer - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "intel_owl.settings") # Initialize Django ASGI application early to ensure the AppRegistry # is populated before importing code that may import ORM models. get_asgi_application() +# pylint: disable=wrong-import-position +from api_app.websocket import JobConsumer # noqa: E402 application = ProtocolTypeRouter( { From 8425ff9a1ddd2411e5159998e94a482a2eb5dd98 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 6 Feb 2024 15:05:30 +0100 Subject: [PATCH 03/58] added channel layer and used ws from different part of job workflow --- api_app/websocket.py | 31 ++++++++++++++++++++++----- docker/default.yml | 13 +++++++++++ docker/test.override.yml | 3 +++ intel_owl/settings/__init__.py | 1 + intel_owl/settings/django.py | 1 - intel_owl/settings/websocket.py | 9 ++++++++ intel_owl/tasks.py | 23 +++++++++++++++++++- requirements/project-requirements.txt | 1 + 8 files changed, 75 insertions(+), 7 deletions(-) create mode 100644 intel_owl/settings/websocket.py diff --git a/api_app/websocket.py b/api_app/websocket.py index 3c90683434..dba33669b6 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -1,5 +1,6 @@ import logging +from asgiref.sync import async_to_sync from channels.generic.websocket import JsonWebsocketConsumer from api_app.models import Job @@ -17,21 +18,41 @@ def connect(self): job = Job.objects.get(id=job_id) job_serializer = JobSerializer(job) job_data = job_serializer.data - logger.debug(f"job data: {job_data}") - self.send_json(content=job_data) + async_to_sync(self.channel_layer.group_add)( + JobConsumer.generate_group_name(job_id), self.channel_name + ) + # send data + async_to_sync(self.channel_layer.group_send)( + JobConsumer.generate_group_name(job_id), + {"type": "send.job", "job": job_data}, + ) - def disconnect(self): + def disconnect(self, close_code): user = self.scope["user"] - logger.debug(f"user {user} disconnected!") + job_id = self.scope["url_route"]["kwargs"]["job_id"] + async_to_sync(self.channel_layer.group_discard)( + JobConsumer.generate_group_name(job_id), self.channel_name + ) + logger.info( + f"user: {user} disconnected for the job: {job_id}. Close code: {close_code}" + ) self.close() def receive_json(self, content): - logger.info("websocket receive!") user = self.scope["user"] logger.warning( f"user {user} send {content} to the websocket, this shouldn't happen" ) + def send_job(self, event): + job_data = event["job"] + logger.debug(f"job data: {job_data}") + self.send_json(content=job_data) + + @classmethod + def generate_group_name(cls, job_id: int): + return f"job-{job_id}" + # sto metodo deve essere usato in altre parti del codice (direi alla fine del job) # per accedere alla websocket giusta su cui inviare il risultato: diff --git a/docker/default.yml b/docker/default.yml index 56287fa594..dd10df8246 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -39,6 +39,19 @@ services: - env_file_app - .env + redis: + image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} + container_name: intelowl_redis + restart: unless-stopped + volumes: + - ../configuration:/opt/deploy/intel_owl/configuration + - generic_logs:/var/log/intel_owl + - shared_files:/opt/deploy/files_required + expose: + - "6379" + env_file: + - env_file_app + nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_nginx diff --git a/docker/test.override.yml b/docker/test.override.yml index 31a3bab329..32f1538f00 100644 --- a/docker/test.override.yml +++ b/docker/test.override.yml @@ -18,6 +18,9 @@ services: - DJANGO_TEST_SERVER=True - DJANGO_WATCHMAN_TIMEOUT=20 + redis: + image: library/redis:6.2.7-alpine + daphne: build: context: .. diff --git a/intel_owl/settings/__init__.py b/intel_owl/settings/__init__.py index 9f8ee000cb..7e5524113b 100644 --- a/intel_owl/settings/__init__.py +++ b/intel_owl/settings/__init__.py @@ -67,3 +67,4 @@ from .rest import * # lgtm [py/polluting-import] from .security import * # lgtm [py/polluting-import] from .storage import * # lgtm [py/polluting-import] +from .websocket import * # lgtm [py/polluting-import] diff --git a/intel_owl/settings/django.py b/intel_owl/settings/django.py index d133126a95..02cc29dcab 100644 --- a/intel_owl/settings/django.py +++ b/intel_owl/settings/django.py @@ -42,7 +42,6 @@ ] WSGI_APPLICATION = "intel_owl.wsgi.application" -ASGI_APPLICATION = "intel_owl.asgi.application" # Internationalization diff --git a/intel_owl/settings/websocket.py b/intel_owl/settings/websocket.py new file mode 100644 index 0000000000..4d2dd4572b --- /dev/null +++ b/intel_owl/settings/websocket.py @@ -0,0 +1,9 @@ +ASGI_APPLICATION = "intel_owl.asgi.application" +CHANNEL_LAYERS = { + "default": { + "BACKEND": "channels_redis.core.RedisChannelLayer", + "CONFIG": { + "hosts": ["redis://redis:6379"], + }, + }, +} diff --git a/intel_owl/tasks.py b/intel_owl/tasks.py index 10fdfd5a36..38f95035ab 100644 --- a/intel_owl/tasks.py +++ b/intel_owl/tasks.py @@ -9,10 +9,12 @@ import typing import uuid +from asgiref.sync import async_to_sync from celery import Task, shared_task, signals from celery.worker.consumer import Consumer from celery.worker.control import control_command from celery.worker.request import Request +from channels.layers import get_channel_layer from django.conf import settings from django.db.models import Q from django.utils.timezone import now @@ -209,10 +211,19 @@ def update_notifications_with_releases(): @app.task(name="job_set_final_status", soft_time_limit=30) def job_set_final_status(job_id: int): from api_app.models import Job + from api_app.serializers import JobSerializer + from api_app.websocket import JobConsumer job = Job.objects.get(pk=job_id) # execute some callbacks job.set_final_status() + channel_layer = get_channel_layer() + job_serializer = JobSerializer(job) + job_data = job_serializer.data + logger.debug(f"job: {job_id} set to final status: {job.status}") + async_to_sync(channel_layer.group_send)( + JobConsumer.generate_group_name(job_id), {"type": "send.job", "job": job_data} + ) @shared_task(base=FailureLoggedTask, name="job_set_pipeline_status", soft_time_limit=30) @@ -257,7 +268,9 @@ def run_plugin( task_id: int, ): from api_app.classes import Plugin - from api_app.models import PythonModule + from api_app.models import Job, PythonModule + from api_app.serializers import JobSerializer + from api_app.websocket import JobConsumer logger.info( f"Configuring plugin {plugin_config_pk} for job {job_id} with task {task_id}" @@ -283,6 +296,14 @@ def run_plugin( config.reports.filter(job__pk=job_id).update( status=plugin.report_model.Status.FAILED.value ) + job = Job.objects.get(pk=job_id) + channel_layer = get_channel_layer() + job_serializer = JobSerializer(job) + job_data = job_serializer.data + logger.debug(f"job: {job_id} set to final status: {job.status}") + async_to_sync(channel_layer.group_send)( + JobConsumer.generate_group_name(job_id), {"type": "send.job", "job": job_data} + ) @shared_task(base=FailureLoggedTask, name="create_caches", soft_time_limit=200) diff --git a/requirements/project-requirements.txt b/requirements/project-requirements.txt index c5b08926b8..d86b7bb890 100644 --- a/requirements/project-requirements.txt +++ b/requirements/project-requirements.txt @@ -76,3 +76,4 @@ defusedxml==0.7.1 dateparser==1.2.0 daphne==4.0.0 channels==4.0.0 +channels-redis==4.2.0 From 08a1b51b17397d6c16c7492dda32f9d98f944f48 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 6 Feb 2024 17:37:13 +0100 Subject: [PATCH 04/58] added request to retry plugin --- .../components/jobs/result/JobOverview.jsx | 14 ++++++++---- .../jobs/result/pluginReportTables.jsx | 22 +++++++++---------- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index f06db01b2e..8e8a034e50 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -19,6 +19,7 @@ import { GoBackButton, Loader } from "@certego/certego-ui"; import { JSONTree } from "react-json-tree"; import { useNavigate, useLocation } from "react-router-dom"; +import axios from "axios"; import { AnalyzersReportTable, ConnectorsReportTable, @@ -39,6 +40,7 @@ import { JobResultSections } from "../../../constants/miscConst"; import { JobInfoCard } from "./JobInfoCard"; import { JobIsRunningAlert } from "./JobIsRunningAlert"; import { JobActionsBar } from "./bar/JobActionBar"; +import { JOB_BASE_URI } from "../../../constants/apiURLs"; /* THESE IDS CANNOT BE EMPTY! We perform a redirect in case the user landed in the visualzier page without a visualizer, @@ -57,6 +59,10 @@ export function JobOverview({ isRunningJob, job, section, subSection }) { const isSelectedUI = section === JobResultSections.VISUALIZER; + const refetch = () => { + axios.get(`${JOB_BASE_URI}/${job.id}`); + }; + const rawElements = React.useMemo( () => [ { @@ -74,7 +80,7 @@ export function JobOverview({ isRunningJob, job, section, subSection }) { />
), - report: , + report: , }, { id: "connector", @@ -91,7 +97,7 @@ export function JobOverview({ isRunningJob, job, section, subSection }) { /> ), - report: , + report: , }, { id: "pivot", @@ -108,7 +114,7 @@ export function JobOverview({ isRunningJob, job, section, subSection }) { /> ), - report: , + report: , }, { id: "visualizer", @@ -129,7 +135,7 @@ export function JobOverview({ isRunningJob, job, section, subSection }) { /> ), - report: , + report: , }, { id: "full", diff --git a/frontend/src/components/jobs/result/pluginReportTables.jsx b/frontend/src/components/jobs/result/pluginReportTables.jsx index aecc2b1c12..8067197fff 100644 --- a/frontend/src/components/jobs/result/pluginReportTables.jsx +++ b/frontend/src/components/jobs/result/pluginReportTables.jsx @@ -24,14 +24,14 @@ const tableProps = { accessor: (pluginReport) => pluginReport, maxWidth: 60, disableSortBy: true, - Cell: ({ value: plugin, customProps: { job } }) => ( + Cell: ({ value: plugin, customProps: { job, refetch } }) => (
{job.permissions?.plugin_actions === true && ["running", "pending"].includes(plugin.status.toLowerCase()) && ( killPlugin(job.id, plugin)} + onClick={() => killPlugin(job.id, plugin).then(refetch)} color="accent" size="xs" title={`Kill ${plugin.type} run`} @@ -44,7 +44,7 @@ const tableProps = { retryPlugin(job.id, plugin)} + onClick={() => retryPlugin(job.id, plugin).then(refetch)} color="light" size="xs" title={`Retry ${plugin.type} run`} @@ -118,50 +118,50 @@ const tableProps = { ), }; -export function AnalyzersReportTable({ job }) { +export function AnalyzersReportTable({ job, refetch }) { console.debug("AnalyzersReportTable rendered"); return (
); } -export function ConnectorsReportTable({ job }) { +export function ConnectorsReportTable({ job, refetch }) { console.debug("ConnectorsReportTable rendered"); return (
); } -export function PivotsReportTable({ job }) { +export function PivotsReportTable({ job, refetch }) { console.debug("PivotsReportTable rendered"); return (
); } -export function VisualizersReportTable({ job }) { +export function VisualizersReportTable({ job, refetch }) { console.debug("VisualizersReportTable rendered"); return (
From 5640007bf86bbed7af1050fac673f7121573ea72 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 7 Feb 2024 12:54:30 +0100 Subject: [PATCH 05/58] first request with http then open ws --- api_app/websocket.py | 12 -- .../src/components/jobs/result/JobResult.jsx | 124 ++++++++++-------- intel_owl/tasks.py | 2 - 3 files changed, 68 insertions(+), 70 deletions(-) diff --git a/api_app/websocket.py b/api_app/websocket.py index dba33669b6..b5973f4cff 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -52,15 +52,3 @@ def send_job(self, event): @classmethod def generate_group_name(cls, job_id: int): return f"job-{job_id}" - - -# sto metodo deve essere usato in altre parti del codice (direi alla fine del job) -# per accedere alla websocket giusta su cui inviare il risultato: -# channel_layer = get_channel_layer() -# await channel_layer.send("channel_name", { -# "type": "chat.message", -# "text": "Hello there!", -# }) - -# nell'esempio della doc si salvano i canali disponibili (le ws aperte) sul db: -# https://channels.readthedocs.io/en/latest/topics/channel_layers.html?highlight=get_channel_layer#single-channels diff --git a/frontend/src/components/jobs/result/JobResult.jsx b/frontend/src/components/jobs/result/JobResult.jsx index b46e88eea9..895434a0f8 100644 --- a/frontend/src/components/jobs/result/JobResult.jsx +++ b/frontend/src/components/jobs/result/JobResult.jsx @@ -3,7 +3,8 @@ import useTitle from "react-use/lib/useTitle"; import { useParams } from "react-router-dom"; import { Loader } from "@certego/certego-ui"; -import { WEBSOCKET_JOBS_URI } from "../../../constants/apiURLs"; +import axios from "axios"; +import { WEBSOCKET_JOBS_URI, JOB_BASE_URI } from "../../../constants/apiURLs"; import { JobOverview } from "./JobOverview"; import { @@ -15,6 +16,7 @@ export default function JobResult() { console.debug("JobResult rendered!"); const [initialLoading, setInitialLoading] = React.useState(true); + const [initialError, setInitialError] = React.useState(""); const [job, setJob] = React.useState(undefined); // this state var is used to check if we notified the user, in this way we avoid to notify more than once const [notified, setNotified] = React.useState(false); @@ -27,9 +29,71 @@ export default function JobResult() { const { section } = params; const { subSection } = params; - // setup ws (we need to use useref to avoid to create a ws each render) + const jobIsRunning = + job === undefined || + [ + "pending", + "running", + "analyzers_running", + "connectors_running", + "pivots_running", + "visualizers_running", + "analyzers_completed", + "connectors_completed", + "pivots_completed", + "visualizers_completed", + ].includes(job.status); + + console.debug( + `JobResult - initialLoading: ${initialLoading}, jobIsRunning: ${jobIsRunning}, ` + + `notified: ${notified}, toNotify: ${toNotify}`, + ); + + const getJob = () => axios.get(`${JOB_BASE_URI}/${jobId}`); + useEffect(() => { + /* INITIAL SETUP: + - add a focus listener: + * when gain focus set it has been notified and reset the favicon + * when lost focus (blur) we set we can notify the user + - first request with HTTP(S): we avoid to create a ws if not need (ex: old completed jobs) + */ + window.addEventListener("focus", () => { + setNotificationFavicon(false); + setToNotify(false); + }); + window.addEventListener("blur", () => setToNotify(true)); + getJob() + .then((response) => setJob(response.data)) + .catch((err) => setInitialError(err)); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []); + + // page title + useTitle( + `IntelOwl | Job (#${jobId}, ${ + // eslint-disable-next-line no-nested-ternary + job ? (job.is_sample ? job.file_name : job.observable_name) : "" + })`, + { restoreOnUnmount: true }, + ); + + useEffect(() => { + /* this is required because the first loading we don't have job data + and this is a problem for JobOverview that needs the UI sections names + so the first time the page has a spinner, after the first request + the spinner will be moved in the sections. + */ + if (job) setInitialLoading(false); + }, [job]); + + /* SETUP WS: + only in case the first request didn't get the job in a final status. + use ws with useRef to avoid to create a ws each render AND create the ws. + only in the last page (section and subSection) or we will create 3 ws, one for each redirect: + jobs/1 -> jobs/1/visualizer -> jobs/1/visualizer/loading + */ const jobWebsocket = React.useRef(); - if (!jobWebsocket.current) { + if (job && jobIsRunning && section && subSection && !jobWebsocket.current) { const websocketUrl = `${ window.location.protocol === "https:" ? "wss" : "ws" }://${window.location.hostname}/${WEBSOCKET_JOBS_URI}/${jobId}`; @@ -47,7 +111,6 @@ export default function JobResult() { console.debug("ws received:"); console.debug(data); const jobData = JSON.parse(data.data); - console.debug(jobData); setJob(jobData); }; jobWebsocket.current.onerror = (data) => { @@ -56,45 +119,6 @@ export default function JobResult() { }; } - useEffect(() => { - /* this is required because the first loading we don't have job data - and this is a problem for JobOverview that needs the UI sections names - so the first time the page has a spinner, after the first request - the spinner will be moved in the sections. - */ - if (job) setInitialLoading(false); - }, [job]); - - // page title - useTitle( - `IntelOwl | Job (#${jobId}, ${ - // eslint-disable-next-line no-nested-ternary - job ? (job.is_sample ? job.file_name : job.observable_name) : "" - })`, - { restoreOnUnmount: true }, - ); - - // in case the job is not running and started (the job is not undefined) it means it terminated. - const jobIsRunning = - job === undefined || - [ - "pending", - "running", - "analyzers_running", - "connectors_running", - "pivots_running", - "visualizers_running", - "analyzers_completed", - "connectors_completed", - "pivots_completed", - "visualizers_completed", - ].includes(job.status); - - console.debug( - `JobResult - initialLoading: ${initialLoading}, jobIsRunning: ${jobIsRunning}, ` + - `notified: ${notified}, toNotify: ${toNotify}`, - ); - // In case the job terminated and it's not to notify, it means the user waited the result, notification is not needed. React.useEffect(() => { if (!jobIsRunning && !toNotify) { @@ -108,22 +132,10 @@ export default function JobResult() { setNotified(true); } - /* add a focus listener: - when gain focus set it has been notified and reset the favicon - when lost focus (blur) we set we can notify the user - */ - React.useEffect(() => { - window.addEventListener("focus", () => { - setNotificationFavicon(false); - setToNotify(false); - }); - window.addEventListener("blur", () => setToNotify(true)); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []); - return ( ( Date: Wed, 7 Feb 2024 15:56:58 +0100 Subject: [PATCH 06/58] close ws when job is reported --- frontend/src/components/jobs/result/JobResult.jsx | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/jobs/result/JobResult.jsx b/frontend/src/components/jobs/result/JobResult.jsx index 895434a0f8..3d3b0b8d78 100644 --- a/frontend/src/components/jobs/result/JobResult.jsx +++ b/frontend/src/components/jobs/result/JobResult.jsx @@ -12,6 +12,8 @@ import { setNotificationFavicon, } from "../notifications"; +import { JobFinalStatuses } from "../../../constants/jobConst"; + export default function JobResult() { console.debug("JobResult rendered!"); @@ -29,6 +31,8 @@ export default function JobResult() { const { section } = params; const { subSection } = params; + const jobWebsocket = React.useRef(); + const jobIsRunning = job === undefined || [ @@ -92,7 +96,6 @@ export default function JobResult() { only in the last page (section and subSection) or we will create 3 ws, one for each redirect: jobs/1 -> jobs/1/visualizer -> jobs/1/visualizer/loading */ - const jobWebsocket = React.useRef(); if (job && jobIsRunning && section && subSection && !jobWebsocket.current) { const websocketUrl = `${ window.location.protocol === "https:" ? "wss" : "ws" @@ -111,6 +114,9 @@ export default function JobResult() { console.debug("ws received:"); console.debug(data); const jobData = JSON.parse(data.data); + if (Object.values(JobFinalStatuses).includes(jobData.status)) { + jobWebsocket.current.close(1000); + } setJob(jobData); }; jobWebsocket.current.onerror = (data) => { From 2cbdb3545c58d2bbb8a503dab9c1b5cfbc9a8241 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 7 Feb 2024 15:58:05 +0100 Subject: [PATCH 07/58] fix --- .../src/components/jobs/result/JobOverview.jsx | 14 +++++++------- frontend/src/components/jobs/result/JobResult.jsx | 1 + 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index 8e8a034e50..7ad8b7d268 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -19,7 +19,6 @@ import { GoBackButton, Loader } from "@certego/certego-ui"; import { JSONTree } from "react-json-tree"; import { useNavigate, useLocation } from "react-router-dom"; -import axios from "axios"; import { AnalyzersReportTable, ConnectorsReportTable, @@ -40,7 +39,6 @@ import { JobResultSections } from "../../../constants/miscConst"; import { JobInfoCard } from "./JobInfoCard"; import { JobIsRunningAlert } from "./JobIsRunningAlert"; import { JobActionsBar } from "./bar/JobActionBar"; -import { JOB_BASE_URI } from "../../../constants/apiURLs"; /* THESE IDS CANNOT BE EMPTY! We perform a redirect in case the user landed in the visualzier page without a visualizer, @@ -53,16 +51,18 @@ in case we use empty param for this page we fall in an infinite redirect loop. const LOADING_VISUALIZER_UI_ELEMENT_CODE = "loading"; const NO_VISUALIZER_UI_ELEMENT_CODE = "no-visualizer"; -export function JobOverview({ isRunningJob, job, section, subSection }) { +export function JobOverview({ + isRunningJob, + job, + refetch, + section, + subSection, +}) { console.debug(`JobOverview rendered: ${JSON.stringify(job)}`); console.debug(`section: ${section}, subSection: ${subSection}`); const isSelectedUI = section === JobResultSections.VISUALIZER; - const refetch = () => { - axios.get(`${JOB_BASE_URI}/${job.id}`); - }; - const rawElements = React.useMemo( () => [ { diff --git a/frontend/src/components/jobs/result/JobResult.jsx b/frontend/src/components/jobs/result/JobResult.jsx index 3d3b0b8d78..bf9f1ec26f 100644 --- a/frontend/src/components/jobs/result/JobResult.jsx +++ b/frontend/src/components/jobs/result/JobResult.jsx @@ -146,6 +146,7 @@ export default function JobResult() { From 8784c6a7e7407d1b6460a915957828a588729fe6 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 7 Feb 2024 15:58:51 +0100 Subject: [PATCH 08/58] updated props --- frontend/src/components/jobs/result/JobOverview.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index 7ad8b7d268..8877a8c97e 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -399,6 +399,7 @@ export function JobOverview({ JobOverview.propTypes = { isRunningJob: PropTypes.bool.isRequired, job: PropTypes.object.isRequired, + refetch: PropTypes.func.isRequired, section: PropTypes.string.isRequired, subSection: PropTypes.string.isRequired, }; From 0de008b150493d859ebe3b4d1c42476cbeb15581 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Fri, 9 Feb 2024 18:26:21 +0100 Subject: [PATCH 09/58] removed polling message --- frontend/src/components/jobs/result/JobIsRunningAlert.jsx | 4 ---- 1 file changed, 4 deletions(-) diff --git a/frontend/src/components/jobs/result/JobIsRunningAlert.jsx b/frontend/src/components/jobs/result/JobIsRunningAlert.jsx index 58b9ec0ce9..5221160214 100644 --- a/frontend/src/components/jobs/result/JobIsRunningAlert.jsx +++ b/frontend/src/components/jobs/result/JobIsRunningAlert.jsx @@ -108,10 +108,6 @@ export function JobIsRunningAlert({ job }) { className="mt-2" /> )} -
- The page will auto-refresh once the analysis completes. You can either - wait here or come back later and check. -
); From 435671375d360514ecff4885a91898396124f6ac Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Wed, 14 Feb 2024 15:30:59 +0100 Subject: [PATCH 10/58] Redis as main broker Signed-off-by: 0ssigeno --- docker/default.yml | 18 +++++++++++------- docker/env_file_app_template | 2 -- docker/rabbitmq.override.yml | 8 +++++++- docker/test.override.yml | 6 ------ intel_owl/settings/cache.py | 2 ++ intel_owl/settings/celery.py | 9 ++++++--- intel_owl/settings/websocket.py | 2 +- requirements/project-requirements.txt | 2 +- start.py | 6 +++--- 9 files changed, 31 insertions(+), 24 deletions(-) diff --git a/docker/default.yml b/docker/default.yml index dd10df8246..227addc499 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -22,13 +22,16 @@ services: env_file: - env_file_app - .env + depends_on: + redis: + condition: service_healthy + daphne: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_daphne restart: unless-stopped volumes: - - ../configuration:/opt/deploy/intel_owl/configuration - generic_logs:/var/log/intel_owl - shared_files:/opt/deploy/files_required entrypoint: @@ -37,20 +40,21 @@ services: - "8011" env_file: - env_file_app - - .env + depends_on: + redis: + condition: service_healthy redis: - image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} + image: library/redis:6.2.7-alpine container_name: intelowl_redis + hostname: redis restart: unless-stopped volumes: - - ../configuration:/opt/deploy/intel_owl/configuration - generic_logs:/var/log/intel_owl - - shared_files:/opt/deploy/files_required expose: - "6379" - env_file: - - env_file_app + healthcheck: + test: test $$(redis-cli -h '127.0.0.1' ping) = 'PONG' nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} diff --git a/docker/env_file_app_template b/docker/env_file_app_template index 2616627ea6..945f7d35fe 100644 --- a/docker/env_file_app_template +++ b/docker/env_file_app_template @@ -83,8 +83,6 @@ PUBLIC_DEPLOYMENT=False # broker configuration BROKER_URL=amqp://guest:guest@rabbitmq:5672 BROKER_URL_API=http://guest:guest@rabbitmq:15672/api/ -RABBITMQ_DEFAULT_USER=guest -RABBITMQ_DEFAULT_PASS=guest FLOWER_USER=flower FLOWER_PWD=flower diff --git a/docker/rabbitmq.override.yml b/docker/rabbitmq.override.yml index 43885a0570..7f4d06429d 100644 --- a/docker/rabbitmq.override.yml +++ b/docker/rabbitmq.override.yml @@ -7,4 +7,10 @@ services: volumes: - ../configuration/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf logging: - driver: none \ No newline at end of file + driver: none + + uwsgi: + environment: + - BROKER_URL="amqp://guest:guest@rabbitmq:5672" + depends_on: + - rabbitmq \ No newline at end of file diff --git a/docker/test.override.yml b/docker/test.override.yml index 32f1538f00..4402dae2d9 100644 --- a/docker/test.override.yml +++ b/docker/test.override.yml @@ -18,13 +18,7 @@ services: - DJANGO_TEST_SERVER=True - DJANGO_WATCHMAN_TIMEOUT=20 - redis: - image: library/redis:6.2.7-alpine - daphne: - build: - context: .. - dockerfile: docker/Dockerfile image: intelowlproject/intelowl:test volumes: - ../:/opt/deploy/intel_owl diff --git a/intel_owl/settings/cache.py b/intel_owl/settings/cache.py index 21b487774d..62ff2ac218 100644 --- a/intel_owl/settings/cache.py +++ b/intel_owl/settings/cache.py @@ -35,6 +35,8 @@ def get_where(self, starts_with: str, version=None) -> Dict[str, Any]: return self.get_many([row[0] for row in rows], version=version) +DJANGO_REDIS_IGNORE_EXCEPTIONS = True + CACHES = { "default": { "BACKEND": "intel_owl.settings.cache.DatabaseCacheExtended", diff --git a/intel_owl/settings/celery.py b/intel_owl/settings/celery.py index ebb01bd0a6..d24fd29a0f 100644 --- a/intel_owl/settings/celery.py +++ b/intel_owl/settings/celery.py @@ -8,9 +8,12 @@ from .aws import AWS_SQS, AWS_USER_NUMBER RESULT_BACKEND = "django-db" -BROKER_URL = get_secret( - "BROKER_URL", "sqs://" if AWS_SQS else "amqp://guest:guest@rabbitmq:5672" -) +BROKER_URL = get_secret("BROKER_URL", None) +if not BROKER_URL and AWS_SQS: + BROKER_URL = "sqs://" +else: + BROKER_URL = "redis://redis:6379/1" # 0 is used by channels + DEFAULT_QUEUE = "default" BROADCAST_QUEUE = "broadcast" CONFIG_QUEUE = "config" diff --git a/intel_owl/settings/websocket.py b/intel_owl/settings/websocket.py index 4d2dd4572b..82790a0082 100644 --- a/intel_owl/settings/websocket.py +++ b/intel_owl/settings/websocket.py @@ -3,7 +3,7 @@ "default": { "BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": { - "hosts": ["redis://redis:6379"], + "hosts": ["redis://redis:6379/0"], }, }, } diff --git a/requirements/project-requirements.txt b/requirements/project-requirements.txt index d86b7bb890..0a090ddeba 100644 --- a/requirements/project-requirements.txt +++ b/requirements/project-requirements.txt @@ -27,7 +27,7 @@ whitenoise==6.6.0 # infra boto3==1.26.143 -celery[sqs]==5.3.0 +celery[sqs,redis]==5.3.0 dataclasses==0.6 # https://github.com/advisories/GHSA-q4qm-xhf9-4p8f # unpatched CVE: noproblem, we just use this for debugging purposes diff --git a/start.py b/start.py index fe4f776eb2..a5b0d81234 100644 --- a/start.py +++ b/start.py @@ -156,10 +156,10 @@ def start(): help="Do not use postgres.override.yml compose file", ) parser.add_argument( - "--use-external-broker", + "--rabbitmq", required=False, action="store_true", - help="Do not use rabbitmq.override.yml compose file", + help="Use rabbitmq.override.yml compose file", ) parser.add_argument( "--flower", @@ -254,7 +254,7 @@ def start(): if not args.__dict__["use_external_database"]: compose_files.append(PATH_MAPPING["postgres"]) # RabbitMQ - if not args.__dict__["use_external_broker"]: + if args.__dict__["rabbitmq"]: compose_files.append(PATH_MAPPING["rabbitmq"]) # mode if is_test: From 8909d76eed64cbb7df25ad9750fbc3adca0b10c9 Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Wed, 14 Feb 2024 15:32:21 +0100 Subject: [PATCH 11/58] Added change for rabbit in start Signed-off-by: 0ssigeno --- start | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/start b/start index f1ec54b073..0fc3eea9a4 100755 --- a/start +++ b/start @@ -34,7 +34,7 @@ print_help () { echo " --nfs Uses the nfs.override.yml compose file." echo " --traefik Uses the traefik.override.yml compose file." echo " --use-external-database Do not use postgres.override.yml compose file." - echo " --use-external-broker Do not use rabbitmq.override.yml compose file." + echo " --rabbitmq Uses the rabbitmq.override.yml compose file." echo " --flower Uses the flower.override.yml compose file." echo " --custom Uses custom.override.yml to leverage your" echo " customized configuration." @@ -172,8 +172,8 @@ while [[ $# -gt 0 ]]; do params["use_external_database"]=true shift 1 ;; - --use-external-broker) - params["use_external_broker"]=true + --rabbitmq) + params["rabbitmq"]=true shift 1 ;; --flower) @@ -254,7 +254,7 @@ compose_files=("${path_mapping["default"]}") if ! [ "${params["use_external_database"]}" ]; then compose_files+=("${path_mapping["postgres"]}") fi -if ! [ "${params["use_external_broker"]}" ]; then +if [ "${params["rabbitmq"]}" ]; then compose_files+=("${path_mapping["rabbitmq"]}") fi if $is_test; then From f28459d0bc20829913d5a29b844552282b5059e8 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 14 Feb 2024 18:34:18 +0100 Subject: [PATCH 12/58] rework auth form durin (token) to drf cookie --- api_app/apps.py | 5 -- api_app/serializers.py | 7 ++- authentication/admin.py | 47 ------------------ authentication/urls.py | 4 -- authentication/views.py | 66 ++++++-------------------- frontend/package-lock.json | 26 +++++++--- frontend/package.json | 1 + frontend/src/components/auth/Login.jsx | 8 ---- frontend/src/stores/useAuthStore.jsx | 23 ++++----- frontend/src/utils/initAxios.js | 17 +------ intel_owl/settings/__init__.py | 1 + intel_owl/settings/django.py | 5 ++ intel_owl/settings/rest.py | 6 ++- intel_owl/settings/security.py | 7 ++- 14 files changed, 70 insertions(+), 153 deletions(-) diff --git a/api_app/apps.py b/api_app/apps.py index 89e0e26e60..2817469e6b 100644 --- a/api_app/apps.py +++ b/api_app/apps.py @@ -9,8 +9,3 @@ class ApiAppConfig(AppConfig): name = "api_app" - - def ready(self): # skipcq: PYL-R0201 - from authentication.views import DurinAuthenticationScheme # noqa - - from . import signals # noqa diff --git a/api_app/serializers.py b/api_app/serializers.py index 83e7940436..3477667f9d 100644 --- a/api_app/serializers.py +++ b/api_app/serializers.py @@ -17,7 +17,6 @@ from django.http import QueryDict from django.utils.timezone import now from django_celery_beat.models import CrontabSchedule, PeriodicTask -from durin.serializers import UserSerializer from rest_framework import serializers as rfs from rest_framework.exceptions import ValidationError from rest_framework.fields import Field, SerializerMethodField, empty @@ -53,6 +52,12 @@ logger = logging.getLogger(__name__) +class UserSerializer(rfs.ModelSerializer): + class Meta: + model = User + fields = ("username",) + + class TagSerializer(rfs.ModelSerializer): class Meta: model = Tag diff --git a/authentication/admin.py b/authentication/admin.py index 73add611fb..deb8bd84f8 100644 --- a/authentication/admin.py +++ b/authentication/admin.py @@ -7,8 +7,6 @@ from django.contrib import admin, messages from django.db.models import Q from django.utils.translation import ngettext -from durin.admin import AuthTokenAdmin -from durin.models import AuthToken, Client from certego_saas.apps.user.admin import AbstractUserAdmin from certego_saas.apps.user.models import User @@ -131,48 +129,3 @@ def user_is_active(self, obj: UserProfile) -> bool: @admin.display(boolean=True) def user_is_approved(self, obj: UserProfile) -> Optional[bool]: return obj.user.approved - - -# durin app (AuthToken model) customization - - -class CustomAuthTokenAdmin(AuthTokenAdmin): - """ - Custom admin view for AuthToken model - """ - - exclude = [] - raw_id_fields = ("user",) - readonly_fields = ("token", "expiry", "created", "expires_in") - - def get_fieldsets(self, request, obj=None): - if not obj: - return [ - ( - "Create token for PyIntelOwl", - { - "fields": ("user",), - "description": """ -

Token will be auto-generated on save.

-

This token will be valid for 10 years.

- """, - }, - ), - ] - return super().get_fieldsets(request, obj) - - @staticmethod - def has_change_permission(*args, **kwargs): - return False - - def save_model(self, request, obj, form, change): - obj.client = Client.objects.get( - name=settings.REST_DURIN["API_ACCESS_CLIENT_NAME"] - ) - super().save_model(request, obj, form, change) - - -# Unregister the default admin view for AuthToken -admin.site.unregister(AuthToken) -# Register our custom admin view for AuthToken -admin.site.register(AuthToken, CustomAuthTokenAdmin) diff --git a/authentication/urls.py b/authentication/urls.py index 1b33b4c9c5..c2b7a1207a 100644 --- a/authentication/urls.py +++ b/authentication/urls.py @@ -5,7 +5,6 @@ from rest_framework import routers from .views import ( - APIAccessTokenView, ChangePasswordView, EmailVerificationView, GoogleLoginCallbackView, @@ -15,13 +14,11 @@ PasswordResetView, RegistrationView, ResendVerificationView, - TokenSessionsViewSet, checkConfiguration, google_login, ) router = routers.DefaultRouter(trailing_slash=False) -router.register(r"sessions", TokenSessionsViewSet, basename="auth_tokensessions") urlpatterns = [ # django-rest-email-auth @@ -50,7 +47,6 @@ path("login", LoginView.as_view(), name="auth_login"), path("logout", LogoutView.as_view(), name="auth_logout"), path("changepassword", ChangePasswordView.as_view(), name="auth_changepassword"), - path("apiaccess", APIAccessTokenView.as_view(), name="auth_apiaccess"), path("google", google_login, name="oauth_google"), path( "google-callback", diff --git a/authentication/views.py b/authentication/views.py index 46c3615a52..8cbe703c71 100644 --- a/authentication/views.py +++ b/authentication/views.py @@ -11,11 +11,7 @@ from django.contrib.auth import get_user_model, login, logout from django.contrib.auth.hashers import check_password from django.shortcuts import redirect -from django_user_agents.utils import get_user_agent -from drf_spectacular.extensions import OpenApiAuthenticationExtension from drf_spectacular.utils import extend_schema as add_docs -from durin import views as durin_views -from durin.models import Client from rest_framework import status from rest_framework.decorators import api_view, permission_classes from rest_framework.exceptions import AuthenticationFailed @@ -82,37 +78,28 @@ class ResendVerificationView( throttle_classes: List = [POSTUserRateThrottle] -class LoginView(durin_views.LoginView, RecaptchaV2Mixin): +class LoginView(RecaptchaV2Mixin): + + authentication_classes: List = [] + permission_classes: List = [] + throttle_classes: List = [POSTUserRateThrottle] + @staticmethod def validate_and_return_user(request): serializer = LoginSerializer(data=request.data) serializer.is_valid(raise_exception=True) return serializer.validated_data["user"] - @staticmethod - def get_client_obj(request) -> Client: - user_agent = get_user_agent(request) - client_name = str(user_agent) - client, _ = Client.objects.get_or_create(name=client_name) - return client - def post(self, request, *args, **kwargs): try: self.get_serializer() # for RecaptchaV2Mixin except AssertionError: # it will raise this bcz `serializer_class` is not defined pass - response = super().post(request, *args, **kwargs) - uname = request.user.username - logger.info(f"LoginView: received request from '{uname}'.") - if request.user.is_superuser: - try: - # pass admin user's session - login(request, request.user) - logger.info(f"administrator:'{uname}' was logged in.") - except Exception: - logger.exception(f"administrator:'{uname}' login failed.") - return response + user = self.validate_and_return_user(request=request) + logger.info(f"perform_login received request from '{user.username}''.") + login(request, user) + return Response({}) class ChangePasswordView(APIView): @@ -142,35 +129,12 @@ def post(request: Request) -> Response: return Response({"message": "Password changed successfully"}) -class LogoutView(durin_views.LogoutView): +class LogoutView(APIView): def post(self, request, *args, **kwargs): - uname = request.user.username - logger.info(f"perform_logout received request from '{uname}''.") - if request.user.is_superuser: - try: - logout(request) - logger.info(f"administrator: '{uname}' was logged out.") - except Exception: - logger.exception(f"administrator: '{uname}' session logout failed.") - return super().post(request, format=None) - - -APIAccessTokenView = durin_views.APIAccessTokenView -TokenSessionsViewSet = durin_views.TokenSessionsViewSet - - -class DurinAuthenticationScheme(OpenApiAuthenticationExtension): - target_class = "durin.auth.CachedTokenAuthentication" - name = "durinAuth" - - @staticmethod - def get_security_definition(auto_schema): - return { - "type": "apiKey", - "in": "header", - "name": "Authorization", - "description": "Token-based authentication with required prefix: Token", - } + user = request.user + logger.info(f"perform_logout received request from '{user.username}''.") + logout(request) + return Response({}) @add_docs( diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 6494a65511..e8ce0652c5 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -15,6 +15,7 @@ "classnames": "^2.3.1", "flag-icons": "^6.11.0", "formik": "^2.4.3", + "js-cookie": "^3.0.5", "md5": "^2.3.0", "prop-types": "^15.8.1", "react": "^17.0.2", @@ -13830,9 +13831,12 @@ } }, "node_modules/js-cookie": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz", - "integrity": "sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==" + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-3.0.5.tgz", + "integrity": "sha512-cEiJEAEoIbWfCZYKWhVwFuvPX1gETRYPw6LlaTKoxD3s2AkXzkCjnp6h0V77ozyqj0jakteJ4YqDJT830+lVGw==", + "engines": { + "node": ">=14" + } }, "node_modules/js-tokens": { "version": "4.0.0", @@ -19739,6 +19743,11 @@ "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, + "node_modules/react-use/node_modules/js-cookie": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz", + "integrity": "sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==" + }, "node_modules/react-use/node_modules/throttle-debounce": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-3.0.1.tgz", @@ -33716,9 +33725,9 @@ "integrity": "sha512-oVhqoRDaBXf7sjkll95LHVS6Myyyb1zaunVwk4Z0+WPSW4gjS0pl01zYKHScTuyEhQsFxV5L4DR5r+YqSyqyyg==" }, "js-cookie": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz", - "integrity": "sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==" + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-3.0.5.tgz", + "integrity": "sha512-cEiJEAEoIbWfCZYKWhVwFuvPX1gETRYPw6LlaTKoxD3s2AkXzkCjnp6h0V77ozyqj0jakteJ4YqDJT830+lVGw==" }, "js-tokens": { "version": "4.0.0", @@ -37811,6 +37820,11 @@ "tslib": "^2.1.0" }, "dependencies": { + "js-cookie": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz", + "integrity": "sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==" + }, "throttle-debounce": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-3.0.1.tgz", diff --git a/frontend/package.json b/frontend/package.json index b151b5f0d6..e83f2d49e2 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -11,6 +11,7 @@ "classnames": "^2.3.1", "flag-icons": "^6.11.0", "formik": "^2.4.3", + "js-cookie": "^3.0.5", "md5": "^2.3.0", "prop-types": "^15.8.1", "react": "^17.0.2", diff --git a/frontend/src/components/auth/Login.jsx b/frontend/src/components/auth/Login.jsx index 36a53fd329..f3846c6818 100644 --- a/frontend/src/components/auth/Login.jsx +++ b/frontend/src/components/auth/Login.jsx @@ -1,7 +1,6 @@ import axios from "axios"; import React from "react"; import { AiOutlineInfoCircle } from "react-icons/ai"; -import { useSearchParams } from "react-router-dom"; import { FormGroup, Label, @@ -84,10 +83,6 @@ export default function Login() { React.useCallback((state) => state.service.loginUser, []), ); - const updateToken = useAuthStore( - React.useCallback((state) => state.updateToken, []), - ); - // callbacks const onSubmit = React.useCallback( async (values, _formik) => { @@ -100,9 +95,6 @@ export default function Login() { [loginUser], ); - const [searchParams] = useSearchParams(); - if (searchParams.get("token")) updateToken(searchParams.get("token")); - return ( {showConfigurationModal && ( diff --git a/frontend/src/stores/useAuthStore.jsx b/frontend/src/stores/useAuthStore.jsx index d677ab506f..5bdb2aac69 100644 --- a/frontend/src/stores/useAuthStore.jsx +++ b/frontend/src/stores/useAuthStore.jsx @@ -3,15 +3,16 @@ import { create } from "zustand"; import { addToast } from "@certego/certego-ui"; +import Cookies from "js-cookie"; import { USERACCESS_URI, AUTH_BASE_URI } from "../constants/apiURLs"; // constants -const TOKEN_STORAGE_KEY = "INTELOWL_AUTH_TOKEN"; +const CSRF_TOKEN = "csrftoken"; // hook/ store see: https://github.com/pmndrs/zustand export const useAuthStore = create((set, get) => ({ loading: false, - token: localStorage.getItem(TOKEN_STORAGE_KEY) || null, + CSRFToken: Cookies.get(CSRF_TOKEN) || "", user: { username: "", full_name: "", @@ -21,15 +22,9 @@ export const useAuthStore = create((set, get) => ({ is_staff: false, }, access: null, - isAuthenticated: () => !!get().token, - updateToken: (newValue) => { - localStorage.setItem(TOKEN_STORAGE_KEY, newValue.toString()); - set({ token: newValue }); - }, - deleteToken: () => { - localStorage.removeItem(TOKEN_STORAGE_KEY); - set({ token: null }); - }, + isAuthenticated: () => !!get().CSRFToken, + updateToken: () => set({ CSRFToken: Cookies.get(CSRF_TOKEN)}), + deleteToken: () => set({ CSRFToken: "" }), service: { fetchUserAccess: async () => { try { @@ -54,9 +49,7 @@ export const useAuthStore = create((set, get) => ({ const resp = await axios.post(`${AUTH_BASE_URI}/login`, body, { certegoUIenableProgressBar: false, }); - get().updateToken(resp.data.token, { - expires: new Date(resp.data.expiry), - }); + get().updateToken(); addToast("You've been logged in!", null, "success"); return Promise.resolve(resp); } catch (err) { @@ -70,6 +63,8 @@ export const useAuthStore = create((set, get) => ({ set({ loading: true }); const onLogoutCb = () => { get().deleteToken(); + // rmeove from the browser or it will persist next time we open a tab + Cookies.remove(CSRF_TOKEN) set({ loading: false }); addToast("Logged out!", null, "info"); }; diff --git a/frontend/src/utils/initAxios.js b/frontend/src/utils/initAxios.js index 1874f88248..66c5647174 100644 --- a/frontend/src/utils/initAxios.js +++ b/frontend/src/utils/initAxios.js @@ -3,16 +3,6 @@ import axios from "axios"; import { isObject, objToString } from "@certego/certego-ui"; import { useAuthStore } from "../stores/useAuthStore"; -const shouldInjectToken = (url) => { - if (url === "/api/auth/login" || url === "/api/auth/register") { - return false; - } - if (url.startsWith("/api/")) { - return true; - } - return false; -}; - export default function initAxios() { // base config axios.defaults.headers.common["Content-Type"] = "application/json"; @@ -20,11 +10,8 @@ export default function initAxios() { axios.defaults.certegoUIenableProgressBar = true; // request interceptor axios.interceptors.request.use((req) => { - // filter requests deciding whether to inject token or not - const { token } = useAuthStore.getState(); - if (token && shouldInjectToken(req.url)) { - req.headers.Authorization = `Token ${token}`; - } + const { CSRFToken } = useAuthStore.getState(); + req.headers["X-CSRFToken"] = CSRFToken; return req; }); // response interceptor diff --git a/intel_owl/settings/__init__.py b/intel_owl/settings/__init__.py index 7e5524113b..4eae380393 100644 --- a/intel_owl/settings/__init__.py +++ b/intel_owl/settings/__init__.py @@ -24,6 +24,7 @@ # rest framework libs "rest_framework", "rest_framework_filters", + 'rest_framework.authtoken', "drf_spectacular", # certego libs "durin", diff --git a/intel_owl/settings/django.py b/intel_owl/settings/django.py index 02cc29dcab..6d6cc9635f 100644 --- a/intel_owl/settings/django.py +++ b/intel_owl/settings/django.py @@ -17,6 +17,11 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", "certego_saas.ext.middlewares.LogMiddleware", # custom ] + +SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies" +SESSION_COOKIE_HTTPONLY = True +SESSION_COOKIE_AGE = 60 * 60 * 24 * 90 # seconds * minutes * hours * days + if DEBUG: MIDDLEWARE.append("silk.middleware.SilkyMiddleware") diff --git a/intel_owl/settings/rest.py b/intel_owl/settings/rest.py index 138b6da3e1..904e3205e7 100644 --- a/intel_owl/settings/rest.py +++ b/intel_owl/settings/rest.py @@ -12,7 +12,11 @@ REST_FRAMEWORK = { "DEFAULT_RENDERER_CLASSES": ["rest_framework.renderers.JSONRenderer"], # Auth - "DEFAULT_AUTHENTICATION_CLASSES": ["durin.auth.CachedTokenAuthentication"], + "DEFAULT_AUTHENTICATION_CLASSES": [ + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.TokenAuthentication', + 'rest_framework.authentication.BasicAuthentication', + ], # Pagination "DEFAULT_PAGINATION_CLASS": "certego_saas.ext.pagination.CustomPageNumberPagination", "PAGE_SIZE": 10, diff --git a/intel_owl/settings/security.py b/intel_owl/settings/security.py index 5f9e844436..431cbe2d83 100644 --- a/intel_owl/settings/security.py +++ b/intel_owl/settings/security.py @@ -19,7 +19,12 @@ WEB_CLIENT_URL = f"http://{WEB_CLIENT_DOMAIN}" CSRF_COOKIE_SAMESITE = "Strict" -CSRF_COOKIE_HTTPONLY = True +# TODO: change this +CSRF_TRUSTED_ORIGINS = [ + "http://localhost", "http://localhost/", + "http://localhost:80", "http://localhost:80/", + "http://localhost:3001", "http://localhost:3001/" +] ALLOWED_HOSTS = ["*"] # https://docs.djangoproject.com/en/4.2/ref/settings/#data-upload-max-memory-size From 5a0657a6a505feabc15d6bc1cae102419cafcc27 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 15 Feb 2024 11:19:46 +0100 Subject: [PATCH 13/58] close ws server side --- api_app/websocket.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/api_app/websocket.py b/api_app/websocket.py index b5973f4cff..d9874caf11 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -3,6 +3,7 @@ from asgiref.sync import async_to_sync from channels.generic.websocket import JsonWebsocketConsumer +from api_app.choices import Status from api_app.models import Job from api_app.serializers import JobSerializer @@ -48,6 +49,9 @@ def send_job(self, event): job_data = event["job"] logger.debug(f"job data: {job_data}") self.send_json(content=job_data) + if job_data["status"] in Status.final_statuses(): + logger.debug("job sent to the client and terminated, close ws") + self.close() @classmethod def generate_group_name(cls, job_id: int): From ecf848c82c18e3960e799e65a7038fb57df254f1 Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Thu, 15 Feb 2024 11:44:44 +0100 Subject: [PATCH 14/58] Fixes for docker mgmt Signed-off-by: 0ssigeno --- docker/default.yml | 18 -- docker/env_file_app_template | 5 +- docker/flower.override.yml | 4 + docker/rabbitmq.override.yml | 2 +- docker/redis.override.yml | 27 ++ docker/sqs.override.yml | 6 + docs/source/Advanced-Configuration.md | 29 ++- intel_owl/settings/websocket.py | 7 +- start | 15 ++ start.py | 353 -------------------------- 10 files changed, 87 insertions(+), 379 deletions(-) create mode 100644 docker/redis.override.yml create mode 100644 docker/sqs.override.yml delete mode 100644 start.py diff --git a/docker/default.yml b/docker/default.yml index 227addc499..75cb1d3559 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -22,9 +22,6 @@ services: env_file: - env_file_app - .env - depends_on: - redis: - condition: service_healthy daphne: @@ -40,21 +37,6 @@ services: - "8011" env_file: - env_file_app - depends_on: - redis: - condition: service_healthy - - redis: - image: library/redis:6.2.7-alpine - container_name: intelowl_redis - hostname: redis - restart: unless-stopped - volumes: - - generic_logs:/var/log/intel_owl - expose: - - "6379" - healthcheck: - test: test $$(redis-cli -h '127.0.0.1' ping) = 'PONG' nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} diff --git a/docker/env_file_app_template b/docker/env_file_app_template index 945f7d35fe..bbb76ba38c 100644 --- a/docker/env_file_app_template +++ b/docker/env_file_app_template @@ -81,8 +81,9 @@ RADIUS_AUTH_ENABLED=False # True for public deployment, False for internal deployment PUBLIC_DEPLOYMENT=False # broker configuration -BROKER_URL=amqp://guest:guest@rabbitmq:5672 -BROKER_URL_API=http://guest:guest@rabbitmq:15672/api/ +BROKER_URL=redis://redis:6379 +WEBSOCKETS_URL=redis://redis:6379 + FLOWER_USER=flower FLOWER_PWD=flower diff --git a/docker/flower.override.yml b/docker/flower.override.yml index 1c7188d901..dee8ebddb3 100644 --- a/docker/flower.override.yml +++ b/docker/flower.override.yml @@ -1,6 +1,10 @@ version: "3.8" services: + uwsgi: + environment: + BROKER_URL_API: "http://guest:guest@rabbitmq:15672/api/" + rabbitmq: image: library/rabbitmq:3.11-management-alpine container_name: intelowl_rabbitmq diff --git a/docker/rabbitmq.override.yml b/docker/rabbitmq.override.yml index 7f4d06429d..14b246b64d 100644 --- a/docker/rabbitmq.override.yml +++ b/docker/rabbitmq.override.yml @@ -11,6 +11,6 @@ services: uwsgi: environment: - - BROKER_URL="amqp://guest:guest@rabbitmq:5672" + BROKER_URL: "amqp://guest:guest@rabbitmq:5672" depends_on: - rabbitmq \ No newline at end of file diff --git a/docker/redis.override.yml b/docker/redis.override.yml new file mode 100644 index 0000000000..8421125ee0 --- /dev/null +++ b/docker/redis.override.yml @@ -0,0 +1,27 @@ +version: "3.8" +services: + uwsgi: + depends_on: + redis: + condition: service_healthy + environment: + BROKER_URL: "redis://redis:6379" + WEBSOCKETS_URL: "redis://redis:6379" + + daphne: + depends_on: + redis: + condition: service_healthy + + + redis: + image: library/redis:6.2.7-alpine + container_name: intelowl_redis + hostname: redis + restart: unless-stopped + volumes: + - generic_logs:/var/log/intel_owl + expose: + - "6379" + healthcheck: + test: test $$(redis-cli -h '127.0.0.1' ping) = 'PONG' \ No newline at end of file diff --git a/docker/sqs.override.yml b/docker/sqs.override.yml new file mode 100644 index 0000000000..847417e9a1 --- /dev/null +++ b/docker/sqs.override.yml @@ -0,0 +1,6 @@ +version: "3.8" +services: + uwsgi: + environment: + AWS_SQS: True + BROKER_URL: "sqs://" \ No newline at end of file diff --git a/docs/source/Advanced-Configuration.md b/docs/source/Advanced-Configuration.md index 78ef302ad3..045b231f4c 100644 --- a/docs/source/Advanced-Configuration.md +++ b/docs/source/Advanced-Configuration.md @@ -174,15 +174,36 @@ First, you need to configure the environment variable `LOCAL_STORAGE` to `False` Then you need to configure permission access to the chosen S3 bucket. -#### SQS +#### Message Broker -If you like, you could use AWS SQS instead of Rabbit-MQ to manage your queues. +IntelOwl at the moment supports 3 different message brokers: +- RabbitMQ +- Aws SQS +- Redis + +The default broker, if nothing is specified, is `Redis`. + +To use `RabbitMQ`, you must use the option `--rabbitmq` when launching IntelOwl with the `./start` script. + +To use `Aws SQS`, you must use the option `--sqs` when launching IntelOwl with the `.start` script. In that case, you should create new SQS queues in AWS called `intelowl--` and give your instances on AWS the proper permissions to access it. +Moreover, you must populate the `AWS_USER_NUMBER`. This is required to connect in the right way to the selected SQS queues. Only FIFO queues are supported. -Also, you need to set the environment variable `AWS_SQS` to `True` and populate the `AWS_USER_NUMBER`. This is required to connect in the right way to the selected SQS queues. +If you want to use a remote message broker (like an `ElasticCache` or `AmazonMQ` instance), you must use the option `--external-broker` when launching IntelOwl with the `./start` script. +Moreover, you must populate the `BROKER_URL` environment variable. + + +#### Websockets + +`Redis` is used for two different functions: +- message broker +- websockets -Ultimately, to avoid to run RabbitMQ locally, you would need to use the option `--use-external-broker` when launching IntelOwl with the `./start` script. +For this reason, a `Redis` instance is **mandatory**. +You can personalize IntelOwl in two different way: +- with a local `Redis` instance. This is the default behaviour. +- With a remote `Redis` instance. You must use the option `--use-external-redis` when launching IntelOwl with the `.start` script. Moreover, you need to populate the `WEBSOCKETS_URL` environment variable. #### RDS diff --git a/intel_owl/settings/websocket.py b/intel_owl/settings/websocket.py index 82790a0082..992c91c728 100644 --- a/intel_owl/settings/websocket.py +++ b/intel_owl/settings/websocket.py @@ -1,9 +1,14 @@ +from intel_owl import secrets + +websockets_url = secrets.get_secret("WEBSOCKETS_URL", None) +if not websockets_url: + raise RuntimeError("Unable to configure websockets") ASGI_APPLICATION = "intel_owl.asgi.application" CHANNEL_LAYERS = { "default": { "BACKEND": "channels_redis.core.RedisChannelLayer", "CONFIG": { - "hosts": ["redis://redis:6379/0"], + "hosts": [f"{websockets_url}/0"], }, }, } diff --git a/start b/start index 0fc3eea9a4..83b33c16dd 100755 --- a/start +++ b/start @@ -172,10 +172,18 @@ while [[ $# -gt 0 ]]; do params["use_external_database"]=true shift 1 ;; + --use-external-redis) + params["use_external_redis"]=true + shift 1 + ;; --rabbitmq) params["rabbitmq"]=true shift 1 ;; + --sqs) + params["sqs"]=true + shift 1 + ;; --flower) params["flower"]=true shift 1 @@ -254,9 +262,16 @@ compose_files=("${path_mapping["default"]}") if ! [ "${params["use_external_database"]}" ]; then compose_files+=("${path_mapping["postgres"]}") fi +if ! [ "${params["use_external_redis"]}" ]; then + compose_files+=("${path_mapping["redis"]}") +fi + if [ "${params["rabbitmq"]}" ]; then compose_files+=("${path_mapping["rabbitmq"]}") +elif [ "${params["sqs"]}" ]; then + compose_files+=("${path_mapping["sqs"]}") fi + if $is_test; then compose_files+=("${path_mapping["$env_argument"]}") fi diff --git a/start.py b/start.py deleted file mode 100644 index a5b0d81234..0000000000 --- a/start.py +++ /dev/null @@ -1,353 +0,0 @@ -# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl -# See the file 'LICENSE' for copying permission. - -# DEPRECATION NOTICE!!!! -# THIS SCRIPT HAS BEEN DEPRECATED AND WILL BE REMOVED IN THE NEXT MAJOR VERSION -# PLEASE USE the bash `start` script instead - -import argparse -import os -import re -import subprocess -import sys -from shlex import join, split - -try: - from dotenv import load_dotenv - from git import Repo -except ImportError: - print( - "you must install the Python requirements." - " See: https://intelowl.readthedocs.io/en/latest/Installation.html" - ) - sys.exit(2) - - -load_dotenv("docker/.env") -CURRENT_VERSION = os.getenv("REACT_APP_INTELOWL_VERSION", "").replace("v", "") -PYELASTIC_DEFAULT_VERSION = "7.4.1" -PYCTI_DEFAULT_VERSION = "5.10.0" - -DOCKER_ANALYZERS = [ - "tor_analyzers", - "malware_tools_analyzers", - "cyberchef", - "pcap_analyzers", -] - -PATH_MAPPING = { - "default": "docker/default.yml", - "postgres": "docker/postgres.override.yml", - "rabbitmq": "docker/rabbitmq.override.yml", - "test": "docker/test.override.yml", - "ci": "docker/ci.override.yml", - "custom": "docker/custom.override.yml", - "traefik": "docker/traefik.override.yml", - "multi_queue": "docker/multi-queue.override.yml", - "test_multi_queue": "docker/test.multi-queue.override.yml", - "flower": "docker/flower.override.yml", - "test_flower": "docker/test.flower.override.yml", - "elastic": "docker/elasticsearch.override.yml", - "https": "docker/https.override.yml", - "nfs": "docker/nfs.override.yml", -} -# to fix the box-js folder name -PATH_MAPPING.update( - {name: f"integrations/{name}/compose.yml" for name in DOCKER_ANALYZERS} -) -PATH_MAPPING.update( - { - name + ".test": f"integrations/{name}/compose-tests.yml" - for name in DOCKER_ANALYZERS - } -) -PATH_MAPPING["all_analyzers"] = [PATH_MAPPING[key] for key in DOCKER_ANALYZERS] -PATH_MAPPING["all_analyzers.test"] = [ - PATH_MAPPING[key + ".test"] for key in DOCKER_ANALYZERS -] - - -def version_regex(arg_value, pat=re.compile(r"^[3-9]\.[0-9]{1,2}.[0-9]{1,2}$")): - if not pat.match(arg_value): - print(f"type error for version {arg_value}") - raise argparse.ArgumentTypeError - return arg_value - - -def generic_version_regex( - arg_value, pat=re.compile(r"^[0-9]{1,2}\.[0-9]{1,2}.[0-9]{1,2}$") -): - if not pat.match(arg_value): - print(f"type error for version {arg_value}") - raise argparse.ArgumentTypeError - return arg_value - - -def start(): - parser = argparse.ArgumentParser() - # mandatory arguments - parser.add_argument("mode", type=str, choices=["prod", "test", "ci"]) - parser.add_argument( - "docker_command", - type=str, - choices=[ - "build", - "up", - "start", - "restart", - "down", - "stop", - "kill", - "logs", - "ps", - ], - ) - - # integrations - parser.add_argument( - "--project_name", required=False, help="project name", default="intel_owl" - ) - parser.add_argument( - "--version", - required=False, - type=version_regex, - default=CURRENT_VERSION, - help="choose the version you would like to install (>=3.0.0)." - " Works only in 'prod' mode. Default version is the most recently released.", - ) - # integrations - parser.add_argument( - "--all_analyzers", - required=False, - action="store_true", - help="Uses every integration", - ) - for integration in DOCKER_ANALYZERS: - parser.add_argument( - f"--{integration}", - required=False, - action="store_true", - help=f"Uses the integrations/{integration}/compose.yml file", - ) - - # possible upgrades - parser.add_argument( - "--multi_queue", - required=False, - action="store_true", - help="Uses the multiqueue.override.yml compose file", - ) - parser.add_argument( - "--nfs", - required=False, - action="store_true", - help="Uses the nfs.override.yml compose file", - ) - parser.add_argument( - "--traefik", - required=False, - action="store_true", - help="Uses the traefik.override.yml compose file", - ) - parser.add_argument( - "--use-external-database", - required=False, - action="store_true", - help="Do not use postgres.override.yml compose file", - ) - parser.add_argument( - "--rabbitmq", - required=False, - action="store_true", - help="Use rabbitmq.override.yml compose file", - ) - parser.add_argument( - "--flower", - required=False, - action="store_true", - help="Uses the flower.override.yml compose file", - ) - parser.add_argument( - "--custom", - required=False, - action="store_true", - help="Uses custom.override.yml to leverage your customized configuration", - ) - parser.add_argument( - "--debug-build", - required=False, - action="store_true", - help="see more verbose output from the build, for debug purposes", - ) - parser.add_argument( - "--elastic", - required=False, - action="store_true", - help="This spins up Elasticsearch" - "and Kibana on your machine (might need >=16GB of RAM)", - ) - parser.add_argument( - "--pyelastic-version", - required=False, - type=generic_version_regex, - default=PYELASTIC_DEFAULT_VERSION, - help="The py-elasticsearch version to choose." - " This must match the server version" - " you are connecting to." - f" Default is {PYELASTIC_DEFAULT_VERSION}", - ) - parser.add_argument( - "--pycti-version", - required=False, - type=generic_version_regex, - default=PYCTI_DEFAULT_VERSION, - help="The pycti version to choose. This must match the OpenCTI server version" - " you are connecting to." - f" Default is {PYCTI_DEFAULT_VERSION}", - ) - parser.add_argument( - "--https", - required=False, - action="store_true", - help="This leverage the https.override.yml file that can be used " - "to host IntelOwl with HTTPS and your own certificate", - ) - - args, unknown = parser.parse_known_args() - # logic - test_appendix = "" - is_test = False - if args.mode in ["test", "ci"]: - is_test = True - test_appendix = ".test" - - os.environ["PYELASTIC_VERSION"] = args.pyelastic_version - os.environ["PYCTI_VERSION"] = args.pycti_version - if (not args.mode == "test" or args.docker_command not in ["up", "build"]) and ( - args.pyelastic_version != PYELASTIC_DEFAULT_VERSION - or args.pycti_version != PYCTI_DEFAULT_VERSION - ): - print( - "pycti_version and pyelastic_version options are valid only while" - " running in 'test' mode" - " and while building a new image." - " This is because they can change the version of those library only during" - " the build of a new Docker Image." - ) - sys.exit(11) - - # load relevant .env file - load_dotenv("docker/.env.start" + test_appendix) - - docker_flags = [ - args.__dict__[docker_analyzer] for docker_analyzer in DOCKER_ANALYZERS - ] - if args.all_analyzers and any(docker_flags): - parser.error( - "It is not possible to select both " - "`all_analyzers` and another docker container" - ) - return - # default file - compose_files = [PATH_MAPPING["default"]] - # PostreSQL - if not args.__dict__["use_external_database"]: - compose_files.append(PATH_MAPPING["postgres"]) - # RabbitMQ - if args.__dict__["rabbitmq"]: - compose_files.append(PATH_MAPPING["rabbitmq"]) - # mode - if is_test: - compose_files.append(PATH_MAPPING[args.mode]) - # upgrades - for key in [ - "elastic", - "https", - "nfs", - "traefik", - "multi_queue", - "custom", - "flower", - ]: - if args.__dict__[key]: - compose_files.append(PATH_MAPPING[key]) - # additional compose files for tests - if args.mode == "test": - for key in ["multi_queue", "flower"]: - if args.__dict__[key]: - compose_files.append(PATH_MAPPING["test_" + key]) - # additional integrations - for key in DOCKER_ANALYZERS: - if args.__dict__[key]: - compose_files.append(PATH_MAPPING[key]) - if is_test: - compose_files.append(PATH_MAPPING[key + test_appendix]) - if args.all_analyzers: - compose_files.extend(list(PATH_MAPPING["all_analyzers"])) - if is_test: - compose_files.extend(list(PATH_MAPPING[f"all_analyzers{test_appendix}"])) - - if args.mode == "prod" and args.version != CURRENT_VERSION: - print( - f"Requested version {args.version} is different " - f"from current version {CURRENT_VERSION}" - ) - current_dir = os.getcwd() - repo = Repo(current_dir) - git = repo.git - git.config("--global", "--add", "safe.directory", current_dir) - git.checkout(f"tags/v{args.version}") - - # construct final command - cmd = split("docker compose version") - try: - ps = subprocess.run( - cmd, - shell=False, - check=True, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - except subprocess.CalledProcessError: - print("Failed to run docker compose") - sys.exit(127) - - output = ps.stdout - if output: - base_command = [ - "docker", - "compose", - "-p", - args.project_name, - "--project-directory", - "docker", - ] - else: - print("Failed to retrieve docker compose version") - sys.exit(126) - - for compose_file in compose_files: - base_command.append("-f") - base_command.append(compose_file) - # we use try/catch to mimick docker-compose's behaviour of handling CTRL+C event - try: - command = split(join(base_command + [args.docker_command] + unknown)) - env = os.environ.copy() - env["DOCKER_BUILDKIT"] = "1" - if args.debug_build: - env["BUILDKIT_PROGRESS"] = "plain" - subprocess.run(command, env=env, check=True) - except KeyboardInterrupt: - print( - "---- removing the containers, please wait... ", - "(press Ctrl+C again to force) ----", - ) - try: - subprocess.run(split(join(base_command + ["down"])), check=True) - except KeyboardInterrupt: - # just need to catch it - pass - - -if __name__ == "__main__": - start() From f4e32f01b01430c25ce883dfe8f456cca59bf73a Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Thu, 15 Feb 2024 11:50:04 +0100 Subject: [PATCH 15/58] Docs Signed-off-by: 0ssigeno --- docs/source/Advanced-Configuration.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/source/Advanced-Configuration.md b/docs/source/Advanced-Configuration.md index 045b231f4c..e232f99ec9 100644 --- a/docs/source/Advanced-Configuration.md +++ b/docs/source/Advanced-Configuration.md @@ -190,8 +190,7 @@ In that case, you should create new SQS queues in AWS called `intelowl- Date: Thu, 15 Feb 2024 12:19:44 +0100 Subject: [PATCH 16/58] Fixes Signed-off-by: 0ssigeno --- docker/default.yml | 4 ++++ docker/flower.override.yml | 4 +++- docker/multi-queue.override.yml | 3 +++ docker/postgres.override.yml | 1 + docker/rabbitmq.override.yml | 2 +- docker/redis.override.yml | 8 +++++--- docker/sqs.override.yml | 4 ++-- intel_owl/settings/websocket.py | 21 ++++++++++++--------- 8 files changed, 31 insertions(+), 16 deletions(-) diff --git a/docker/default.yml b/docker/default.yml index 75cb1d3559..4bd2b331e2 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -8,6 +8,7 @@ services: uwsgi: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_uwsgi + hostname: uwsgi volumes: - ../configuration/intel_owl.ini:/etc/uwsgi/sites/intel_owl.ini # uwsgi configuration file - ../configuration:/opt/deploy/intel_owl/configuration @@ -27,6 +28,7 @@ services: daphne: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_daphne + hostname: daphne restart: unless-stopped volumes: - generic_logs:/var/log/intel_owl @@ -59,6 +61,7 @@ services: celery_beat: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_celery_beat + hostname: celery_beat restart: unless-stopped volumes: - ../configuration:/opt/deploy/intel_owl/configuration @@ -77,6 +80,7 @@ services: celery_worker_default: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_celery_worker_default + hostname: celery_worker_default restart: unless-stopped stop_grace_period: 3m volumes: diff --git a/docker/flower.override.yml b/docker/flower.override.yml index dee8ebddb3..575f77321e 100644 --- a/docker/flower.override.yml +++ b/docker/flower.override.yml @@ -3,10 +3,11 @@ version: "3.8" services: uwsgi: environment: - BROKER_URL_API: "http://guest:guest@rabbitmq:15672/api/" + - BROKER_URL_API=http://guest:guest@rabbitmq:15672/api/ rabbitmq: image: library/rabbitmq:3.11-management-alpine + hostname: rabbitmq container_name: intelowl_rabbitmq logging: driver: none @@ -16,6 +17,7 @@ services: flower: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_flower + hostname: flower restart: unless-stopped stop_grace_period: 3m volumes: diff --git a/docker/multi-queue.override.yml b/docker/multi-queue.override.yml index 03d941c3fb..a34525d2ea 100644 --- a/docker/multi-queue.override.yml +++ b/docker/multi-queue.override.yml @@ -13,6 +13,7 @@ services: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_celery_worker_local restart: unless-stopped + hostname: celery_worker_local stop_grace_period: 3m volumes: - ../configuration:/opt/deploy/intel_owl/configuration @@ -29,6 +30,7 @@ services: celery_worker_long: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_celery_worker_long + hostname: celery_worker_long restart: unless-stopped stop_grace_period: 3m volumes: @@ -46,6 +48,7 @@ services: celery_worker_ingestor: image: intelowlproject/intelowl:${REACT_APP_INTELOWL_VERSION} container_name: intelowl_celery_worker_ingestor + hostname: celery_worker_ingestor restart: unless-stopped stop_grace_period: 3m volumes: diff --git a/docker/postgres.override.yml b/docker/postgres.override.yml index 5693c676a9..fd21718fab 100644 --- a/docker/postgres.override.yml +++ b/docker/postgres.override.yml @@ -5,6 +5,7 @@ services: postgres: image: library/postgres:12-alpine container_name: intelowl_postgres + hostname: postgres volumes: - postgres_data:/var/lib/postgresql/data/ env_file: diff --git a/docker/rabbitmq.override.yml b/docker/rabbitmq.override.yml index 14b246b64d..7f4d06429d 100644 --- a/docker/rabbitmq.override.yml +++ b/docker/rabbitmq.override.yml @@ -11,6 +11,6 @@ services: uwsgi: environment: - BROKER_URL: "amqp://guest:guest@rabbitmq:5672" + - BROKER_URL="amqp://guest:guest@rabbitmq:5672" depends_on: - rabbitmq \ No newline at end of file diff --git a/docker/redis.override.yml b/docker/redis.override.yml index 8421125ee0..22f7de8163 100644 --- a/docker/redis.override.yml +++ b/docker/redis.override.yml @@ -5,14 +5,16 @@ services: redis: condition: service_healthy environment: - BROKER_URL: "redis://redis:6379" - WEBSOCKETS_URL: "redis://redis:6379" + - BROKER_URL=redis://redis:6379 + - WEBSOCKETS_URL=redis://redis:6379 daphne: depends_on: redis: condition: service_healthy - + environment: + - BROKER_URL=redis://redis:6379 + - WEBSOCKETS_URL=redis://redis:6379 redis: image: library/redis:6.2.7-alpine diff --git a/docker/sqs.override.yml b/docker/sqs.override.yml index 847417e9a1..4fce1efd89 100644 --- a/docker/sqs.override.yml +++ b/docker/sqs.override.yml @@ -2,5 +2,5 @@ version: "3.8" services: uwsgi: environment: - AWS_SQS: True - BROKER_URL: "sqs://" \ No newline at end of file + - AWS_SQS=True + - BROKER_URL=sqs:// \ No newline at end of file diff --git a/intel_owl/settings/websocket.py b/intel_owl/settings/websocket.py index 992c91c728..f5b74fd84f 100644 --- a/intel_owl/settings/websocket.py +++ b/intel_owl/settings/websocket.py @@ -1,14 +1,17 @@ from intel_owl import secrets +import socket websockets_url = secrets.get_secret("WEBSOCKETS_URL", None) if not websockets_url: - raise RuntimeError("Unable to configure websockets") -ASGI_APPLICATION = "intel_owl.asgi.application" -CHANNEL_LAYERS = { - "default": { - "BACKEND": "channels_redis.core.RedisChannelLayer", - "CONFIG": { - "hosts": [f"{websockets_url}/0"], + if socket.gethostname() in ["uwsgi", "daphne"]: + raise RuntimeError("Unable to configure websockets") +else: + ASGI_APPLICATION = "intel_owl.asgi.application" + CHANNEL_LAYERS = { + "default": { + "BACKEND": "channels_redis.core.RedisChannelLayer", + "CONFIG": { + "hosts": [f"{websockets_url}/0"], + }, }, - }, -} + } From e84145bd26ce123e2116d446318de4a43bbc615d Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 20 Feb 2024 12:36:33 +0100 Subject: [PATCH 17/58] added auth layer in ws --- api_app/websocket.py | 1 + intel_owl/asgi.py | 11 +++++++---- intel_owl/settings/django.py | 1 - intel_owl/settings/middleware.py | 10 ++++++++++ 4 files changed, 18 insertions(+), 5 deletions(-) create mode 100644 intel_owl/settings/middleware.py diff --git a/api_app/websocket.py b/api_app/websocket.py index d9874caf11..f0ee3c96ab 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -12,6 +12,7 @@ class JobConsumer(JsonWebsocketConsumer): def connect(self): + logger.debug(f"{self.scope=}") user = self.scope["user"] job_id = self.scope["url_route"]["kwargs"]["job_id"] logger.info(f"user: {user} requested the analysis for the job {job_id}") diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py index 2794362571..b43ed6eee4 100644 --- a/intel_owl/asgi.py +++ b/intel_owl/asgi.py @@ -14,16 +14,19 @@ # pylint: disable=wrong-import-position from api_app.websocket import JobConsumer # noqa: E402 +from intel_owl.settings.middleware import WSAuthMiddleware # noqa: E402 application = ProtocolTypeRouter( { # WebSocket chat handler "websocket": AllowedHostsOriginValidator( AuthMiddlewareStack( - URLRouter( - [ - path("ws/jobs/", JobConsumer.as_asgi()), - ] + WSAuthMiddleware( + URLRouter( + [ + path("ws/jobs/", JobConsumer.as_asgi()), + ] + ) ) ) ), diff --git a/intel_owl/settings/django.py b/intel_owl/settings/django.py index 6d6cc9635f..46182deadc 100644 --- a/intel_owl/settings/django.py +++ b/intel_owl/settings/django.py @@ -18,7 +18,6 @@ "certego_saas.ext.middlewares.LogMiddleware", # custom ] -SESSION_ENGINE = "django.contrib.sessions.backends.signed_cookies" SESSION_COOKIE_HTTPONLY = True SESSION_COOKIE_AGE = 60 * 60 * 24 * 90 # seconds * minutes * hours * days diff --git a/intel_owl/settings/middleware.py b/intel_owl/settings/middleware.py new file mode 100644 index 0000000000..dfcb9af897 --- /dev/null +++ b/intel_owl/settings/middleware.py @@ -0,0 +1,10 @@ +class WSAuthMiddleware: + def __init__(self, app): + self.app = app + + async def __call__(self, scope, receive, send): + user = scope["user"] + if user.is_anonymous: + return await send({"type": "websocket.close", "code": 1008}) + + return await self.app(scope, receive, send) From 317f5a8f377b98a340e878f644c5fa0c2b04278c Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 20 Feb 2024 15:04:19 +0100 Subject: [PATCH 18/58] fix redis --- start | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/start b/start index 83b33c16dd..1a3935e50a 100755 --- a/start +++ b/start @@ -33,7 +33,8 @@ print_help () { echo " --multi_queue Uses the multiqueue.override.yml compose file." echo " --nfs Uses the nfs.override.yml compose file." echo " --traefik Uses the traefik.override.yml compose file." - echo " --use-external-database Do not use postgres.override.yml compose file." + echo " --use-external-database Do NOT use postgres.override.yml compose file." + echo " --use-external-redis Do NOT use redis.override.yml compose file." echo " --rabbitmq Uses the rabbitmq.override.yml compose file." echo " --flower Uses the flower.override.yml compose file." echo " --custom Uses custom.override.yml to leverage your" @@ -107,7 +108,7 @@ current_version=${REACT_APP_INTELOWL_VERSION/"v"/""} docker_analyzers=("tor_analyzers" "malware_tools_analyzers" "cyberchef" "pcap_analyzers") -declare -A path_mapping=(["default"]="docker/default.yml" ["postgres"]="docker/postgres.override.yml" ["rabbitmq"]="docker/rabbitmq.override.yml" ["test"]="docker/test.override.yml" ["ci"]="docker/ci.override.yml" ["custom"]="docker/custom.override.yml" ["traefik"]="docker/traefik.override.yml" ["multi_queue"]="docker/multi-queue.override.yml" ["test_multi_queue"]="docker/test.multi-queue.override.yml" ["flower"]="docker/flower.override.yml" ["test_flower"]="docker/test.flower.override.yml" ["elastic"]="docker/elasticsearch.override.yml" ["https"]="docker/https.override.yml" ["nfs"]="docker/nfs.override.yml") +declare -A path_mapping=(["default"]="docker/default.yml" ["postgres"]="docker/postgres.override.yml" ["rabbitmq"]="docker/rabbitmq.override.yml" ["test"]="docker/test.override.yml" ["ci"]="docker/ci.override.yml" ["custom"]="docker/custom.override.yml" ["traefik"]="docker/traefik.override.yml" ["multi_queue"]="docker/multi-queue.override.yml" ["test_multi_queue"]="docker/test.multi-queue.override.yml" ["flower"]="docker/flower.override.yml" ["test_flower"]="docker/test.flower.override.yml" ["elastic"]="docker/elasticsearch.override.yml" ["https"]="docker/https.override.yml" ["nfs"]="docker/nfs.override.yml" ["redis"]="docker/redis.override.yml") for value in "${docker_analyzers[@]}"; do path_mapping["${value}"]+="integrations/${value}/compose.yml" path_mapping["${value}.test"]+="integrations/${value}/compose-tests.yml" From 96e23b7b45aa3258187a60e288ce4de0b4162df0 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 21 Feb 2024 11:25:45 +0100 Subject: [PATCH 19/58] fixed csrf token in frontend dev env --- authentication/views.py | 5 ++--- intel_owl/settings/security.py | 12 +++++------- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/authentication/views.py b/authentication/views.py index 8cbe703c71..d09bf20338 100644 --- a/authentication/views.py +++ b/authentication/views.py @@ -79,7 +79,6 @@ class ResendVerificationView( class LoginView(RecaptchaV2Mixin): - authentication_classes: List = [] permission_classes: List = [] throttle_classes: List = [POSTUserRateThrottle] @@ -99,7 +98,7 @@ def post(self, request, *args, **kwargs): user = self.validate_and_return_user(request=request) logger.info(f"perform_login received request from '{user.username}''.") login(request, user) - return Response({}) + return Response() class ChangePasswordView(APIView): @@ -134,7 +133,7 @@ def post(self, request, *args, **kwargs): user = request.user logger.info(f"perform_logout received request from '{user.username}''.") logout(request) - return Response({}) + return Response() @add_docs( diff --git a/intel_owl/settings/security.py b/intel_owl/settings/security.py index 431cbe2d83..e739d463c2 100644 --- a/intel_owl/settings/security.py +++ b/intel_owl/settings/security.py @@ -5,7 +5,7 @@ from django.core.management.utils import get_random_secret_key from ._util import get_secret -from .commons import WEB_CLIENT_DOMAIN +from .commons import STAGE_LOCAL, WEB_CLIENT_DOMAIN # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = get_secret("DJANGO_SECRET", None) or get_random_secret_key() @@ -19,12 +19,10 @@ WEB_CLIENT_URL = f"http://{WEB_CLIENT_DOMAIN}" CSRF_COOKIE_SAMESITE = "Strict" -# TODO: change this -CSRF_TRUSTED_ORIGINS = [ - "http://localhost", "http://localhost/", - "http://localhost:80", "http://localhost:80/", - "http://localhost:3001", "http://localhost:3001/" -] + +if STAGE_LOCAL: + # required to allow requests from port 3001 (frontend development) + CSRF_TRUSTED_ORIGINS = [f"{WEB_CLIENT_URL}:80/"] ALLOWED_HOSTS = ["*"] # https://docs.djangoproject.com/en/4.2/ref/settings/#data-upload-max-memory-size From 1e10aca7112be2b47a15346d8658a16b66629fce Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 21 Feb 2024 16:32:10 +0100 Subject: [PATCH 20/58] moved middleware + improved UX job loading --- frontend/src/components/Routes.jsx | 9 +- .../components/jobs/result/JobOverview.jsx | 136 ++++++++++-------- .../src/components/jobs/result/JobResult.jsx | 12 +- intel_owl/asgi.py | 2 +- intel_owl/{settings => }/middleware.py | 0 5 files changed, 83 insertions(+), 76 deletions(-) rename intel_owl/{settings => }/middleware.py (100%) diff --git a/frontend/src/components/Routes.jsx b/frontend/src/components/Routes.jsx index d666e477ff..f000fdd5f6 100644 --- a/frontend/src/components/Routes.jsx +++ b/frontend/src/components/Routes.jsx @@ -4,6 +4,7 @@ import { Navigate, useParams } from "react-router-dom"; import AuthGuard from "../wrappers/AuthGuard"; import IfAuthRedirectGuard from "../wrappers/IfAuthRedirectGuard"; +import { JobResultSections } from "../constants/miscConst"; const Home = React.lazy(() => import("./home/Home")); const Login = React.lazy(() => import("./auth/Login")); @@ -15,8 +16,8 @@ const Organization = React.lazy(() => import("./organization/Organization")); const Sessions = React.lazy(() => import("./user/sessions/Sessions")); const JobsTable = React.lazy(() => import("./jobs/table/JobsTable")); const JobResult = React.lazy(() => import("./jobs/result/JobResult")); -const CommentResult = React.lazy( - () => import("./jobs/result/bar/comment/CommentResult"), +const CommentResult = React.lazy(() => + import("./jobs/result/bar/comment/CommentResult"), ); const PluginsContainer = React.lazy(() => import("./plugins/PluginsContainer")); const Dashboard = React.lazy(() => import("./dashboard/Dashboard")); @@ -30,7 +31,9 @@ lazy imports to enable code splitting function JobRedirect() { const params = useParams(); const { id } = params; - return ; + return ( + + ); } // public components diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index 8877a8c97e..e01d6bcac1 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -66,7 +66,7 @@ export function JobOverview({ const rawElements = React.useMemo( () => [ { - id: "analyzer", + name: "analyzer", nav: (
Analyzers Report @@ -83,7 +83,7 @@ export function JobOverview({ report: , }, { - id: "connector", + name: "connector", nav: (
Connectors Report @@ -100,7 +100,7 @@ export function JobOverview({ report: , }, { - id: "pivot", + name: "pivot", nav: (
Pivots Report @@ -117,7 +117,7 @@ export function JobOverview({ report: , }, { - id: "visualizer", + name: "visualizer", nav: (
Visualizers Report @@ -138,7 +138,7 @@ export function JobOverview({ report: , }, { - id: "full", + name: "full", nav: (
Full Report @@ -171,6 +171,65 @@ export function JobOverview({ )}`, ); + useEffect(() => { + console.debug("JobOverview - check to set default visualizer"); + let visualizerSections = []; + if (Object.values(JobFinalStatuses).includes(job.status)) { + const pageList = job.visualizer_reports.map((report) => report.name); + if (pageList.length > 0) { + visualizerSections = pageList; + } else { + visualizerSections = [NO_VISUALIZER_UI_ELEMENT_CODE]; + } + } else { + visualizerSections = [LOADING_VISUALIZER_UI_ELEMENT_CODE]; + } + console.debug(`visualizerSections: ${JSON.stringify(visualizerSections)}`); + + // check visualizers have been loaded and user didn't changed page + if (visualizerSections !== 0 && !location.state?.userChanged) { + console.debug("updated visualizers"); + if (!subSection) { + console.debug( + `[AUTO REDIRECT] navigate to visualizer: ${ + visualizerSections[0] + }, encoded: ${encodeURIComponent(visualizerSections[0])}`, + ); + // in case no section is selected (ex: from start scan) redirect to a visualizer + navigate( + `/jobs/${job.id}/${JobResultSections.VISUALIZER}/${encodeURIComponent( + visualizerSections[0], + )}`, + { replace: true }, + ); + } else if ( + subSection === LOADING_VISUALIZER_UI_ELEMENT_CODE && + visualizerSections[0] !== LOADING_VISUALIZER_UI_ELEMENT_CODE + ) { + console.debug( + `[AUTO REDIRECT] navigate to visualizer: ${ + visualizerSections[0].id + }, encoded: ${encodeURIComponent(visualizerSections[0])}`, + ); + // in case we are in the loading page and we update the visualizer change page (if they are different from loading) + navigate( + `/jobs/${job.id}/${JobResultSections.VISUALIZER}/${encodeURIComponent( + visualizerSections[0], + )}`, + { replace: true }, + ); + } else if (subSection === NO_VISUALIZER_UI_ELEMENT_CODE) { + console.debug("[AUTO REDIRECT] navigate to raw data - analyzer"); + // in case there is no visualizer redirect to raw data + navigate( + `/jobs/${job.id}/${JobResultSections.RAW}/${rawElements[0].name}`, + { replace: true }, + ); + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [job]); + useEffect(() => { // this store the ui elements when the frontend download them console.debug("JobOverview - create/update visualizer components"); @@ -183,7 +242,7 @@ export function JobOverview({ job.visualizers_to_execute.length > 0 ) { newUIElements = job.visualizer_reports.map((visualizerReport) => ({ - id: visualizerReport.name, + name: visualizerReport.name, nav: (
{visualizerReport.name} @@ -202,7 +261,7 @@ export function JobOverview({ job.visualizers_to_execute.length > 0 ) { newUIElements.push({ - id: LOADING_VISUALIZER_UI_ELEMENT_CODE, + name: LOADING_VISUALIZER_UI_ELEMENT_CODE, nav: null, report: (
@@ -233,53 +292,6 @@ export function JobOverview({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [job]); - useEffect(() => { - console.debug("JobOverview - check to set default visualizer"); - // check visualizers have been loaded and user didn't changed page - console.debug(`Ui elements number: ${UIElements.length}`); - if (UIElements.length !== 0 && !location.state?.userChanged) { - console.debug("updated visualizers"); - if (!subSection) { - console.debug( - `[AUTO REDIRECT] navigate to visualizer: ${ - UIElements[0].id - }, encoded: ${encodeURIComponent(UIElements[0].id)}`, - ); - // in case no section is selected (ex: from start scan) redirect to a visualizer - navigate( - `/jobs/${job.id}/${JobResultSections.VISUALIZER}/${encodeURIComponent( - UIElements[0].id, - )}`, - { replace: true }, - ); - } else if ( - subSection === LOADING_VISUALIZER_UI_ELEMENT_CODE && - UIElements[0].id !== LOADING_VISUALIZER_UI_ELEMENT_CODE - ) { - console.debug( - `[AUTO REDIRECT] navigate to visualizer: ${ - UIElements[0].id - }, encoded: ${encodeURIComponent(UIElements[0].id)}`, - ); - // in case we are in the loading page and we update the visualizer change page (if they are different from loading) - navigate( - `/jobs/${job.id}/${JobResultSections.VISUALIZER}/${encodeURIComponent( - UIElements[0].id, - )}`, - { replace: true }, - ); - } else if (subSection === NO_VISUALIZER_UI_ELEMENT_CODE) { - console.debug("[AUTO REDIRECT] navigate to raw data - analyzer"); - // in case there is no visualizer redirect to raw data - navigate( - `/jobs/${job.id}/${JobResultSections.RAW}/${rawElements[0].id}`, - { replace: true }, - ); - } - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [UIElements]); - const elementsToShow = isSelectedUI ? UIElements : rawElements; return ( @@ -322,7 +334,7 @@ export function JobOverview({ navigate( `/jobs/${job.id}/${ JobResultSections.VISUALIZER - }/${encodeURIComponent(UIElements[0].id)}`, + }/${encodeURIComponent(UIElements[0].name)}`, { state: { userChanged: true } }, ) } @@ -335,7 +347,7 @@ export function JobOverview({ color={!isSelectedUI ? "primary" : "tertiary"} onClick={() => navigate( - `/jobs/${job.id}/${JobResultSections.RAW}/${rawElements[0].id}`, + `/jobs/${job.id}/${JobResultSections.RAW}/${rawElements[0].name}`, { state: { userChanged: true } }, ) } @@ -354,8 +366,8 @@ export function JobOverview({ {elementsToShow.sort().map((componentsObject) => ( {componentsObject.report} diff --git a/frontend/src/components/jobs/result/JobResult.jsx b/frontend/src/components/jobs/result/JobResult.jsx index bf9f1ec26f..48efd1b086 100644 --- a/frontend/src/components/jobs/result/JobResult.jsx +++ b/frontend/src/components/jobs/result/JobResult.jsx @@ -68,7 +68,8 @@ export default function JobResult() { window.addEventListener("blur", () => setToNotify(true)); getJob() .then((response) => setJob(response.data)) - .catch((err) => setInitialError(err)); + .catch((err) => setInitialError(err)) + .finally((_) => setInitialLoading(false)); // eslint-disable-next-line react-hooks/exhaustive-deps }, []); @@ -81,15 +82,6 @@ export default function JobResult() { { restoreOnUnmount: true }, ); - useEffect(() => { - /* this is required because the first loading we don't have job data - and this is a problem for JobOverview that needs the UI sections names - so the first time the page has a spinner, after the first request - the spinner will be moved in the sections. - */ - if (job) setInitialLoading(false); - }, [job]); - /* SETUP WS: only in case the first request didn't get the job in a final status. use ws with useRef to avoid to create a ws each render AND create the ws. diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py index b43ed6eee4..0690db5586 100644 --- a/intel_owl/asgi.py +++ b/intel_owl/asgi.py @@ -14,7 +14,7 @@ # pylint: disable=wrong-import-position from api_app.websocket import JobConsumer # noqa: E402 -from intel_owl.settings.middleware import WSAuthMiddleware # noqa: E402 +from intel_owl.middleware import WSAuthMiddleware # noqa: E402 application = ProtocolTypeRouter( { diff --git a/intel_owl/settings/middleware.py b/intel_owl/middleware.py similarity index 100% rename from intel_owl/settings/middleware.py rename to intel_owl/middleware.py From 3be3403b0e2f9ca7ed759a4c7d374c4e481e9e4a Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 22 Feb 2024 11:35:44 +0100 Subject: [PATCH 21/58] moved token view from durin to drf --- authentication/serializers.py | 25 ++++ authentication/urls.py | 2 + authentication/views.py | 44 ++++++- frontend/src/components/Routes.jsx | 8 +- .../user/{sessions => api}/APIAccess.jsx | 18 +-- .../Sessions.jsx => api/APIPage.jsx} | 12 +- .../sessionApi.js => api/APIPageApi.js} | 26 +--- .../components/user/sessions/SessionsList.jsx | 112 ------------------ frontend/src/constants/apiURLs.js | 1 - frontend/src/layouts/widgets/UserMenu.jsx | 4 +- 10 files changed, 85 insertions(+), 167 deletions(-) rename frontend/src/components/user/{sessions => api}/APIAccess.jsx (90%) rename frontend/src/components/user/{sessions/Sessions.jsx => api/APIPage.jsx} (77%) rename frontend/src/components/user/{sessions/sessionApi.js => api/APIPageApi.js} (56%) delete mode 100644 frontend/src/components/user/sessions/SessionsList.jsx diff --git a/authentication/serializers.py b/authentication/serializers.py index 54e58f24d4..bb9629506d 100644 --- a/authentication/serializers.py +++ b/authentication/serializers.py @@ -9,6 +9,7 @@ from django.core.exceptions import ValidationError from django.db import DatabaseError, transaction from rest_framework import serializers as rfs +from rest_framework.authtoken.models import Token from rest_framework.authtoken.serializers import AuthTokenSerializer from slack_sdk.errors import SlackApiError @@ -229,3 +230,27 @@ def validate(self, attrs): ) # else raise exc + + +class TokenSerializer(rfs.ModelSerializer): + class Meta: + model = Token + fields = [ + "key", + "created", + ] + read_only_fields = [ + "key", + "created", + ] + + def create(self, validated_data): + """ + :meta private: + """ + user = self.context["request"].user + if Token.objects.filter(user=user).exists(): + raise rfs.ValidationError("An API token was already issued to you.") + + validated_data["user"] = user + return super().create(validated_data) diff --git a/authentication/urls.py b/authentication/urls.py index c2b7a1207a..a2d269a1e7 100644 --- a/authentication/urls.py +++ b/authentication/urls.py @@ -5,6 +5,7 @@ from rest_framework import routers from .views import ( + APIAccessTokenView, ChangePasswordView, EmailVerificationView, GoogleLoginCallbackView, @@ -47,6 +48,7 @@ path("login", LoginView.as_view(), name="auth_login"), path("logout", LogoutView.as_view(), name="auth_logout"), path("changepassword", ChangePasswordView.as_view(), name="auth_changepassword"), + path("apiaccess", APIAccessTokenView.as_view(), name="auth_apiaccess"), path("google", google_login, name="oauth_google"), path( "google-callback", diff --git a/authentication/views.py b/authentication/views.py index d09bf20338..10e693fba2 100644 --- a/authentication/views.py +++ b/authentication/views.py @@ -13,8 +13,9 @@ from django.shortcuts import redirect from drf_spectacular.utils import extend_schema as add_docs from rest_framework import status +from rest_framework.authtoken.models import Token from rest_framework.decorators import api_view, permission_classes -from rest_framework.exceptions import AuthenticationFailed +from rest_framework.exceptions import AuthenticationFailed, NotFound from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.request import Request from rest_framework.response import Response @@ -30,6 +31,7 @@ EmailVerificationSerializer, LoginSerializer, RegistrationSerializer, + TokenSerializer, ) logger = logging.getLogger(__name__) @@ -129,6 +131,8 @@ def post(request: Request) -> Response: class LogoutView(APIView): + permission_classes = [IsAuthenticated] + def post(self, request, *args, **kwargs): user = request.user logger.info(f"perform_logout received request from '{user.username}''.") @@ -238,3 +242,41 @@ def checkConfiguration(request): return Response( status=status.HTTP_200_OK, data={"errors": errors} if errors else {} ) + + +class APIAccessTokenView(APIView): + """ + - ``GET`` -> get token-client pair info + - ``POST`` -> create and get token-client pair info + - ``DELETE`` -> delete existing API access token + """ + + permission_classes = [IsAuthenticated] + + def get_object(self): + try: + instance = Token.objects.get(user__pk=self.request.user.pk) + except Token.DoesNotExist: + raise NotFound() + + return instance + + def get(self, request, *args, **kwargs): + instance = self.get_object() + logger.info(f" user {request.user} request the API token") + serializer = TokenSerializer(instance) + return Response(serializer.data) + + def post(self, request): + username = request.user.username + logger.info(f"user {username} send a request to create the API token") + serializer = TokenSerializer(data={}, context={"request": request}) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def delete(self, request): + logger.info(f"user {request.user} send a request to delete the API token") + instance = self.get_object() + instance.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/frontend/src/components/Routes.jsx b/frontend/src/components/Routes.jsx index f000fdd5f6..8930e0897e 100644 --- a/frontend/src/components/Routes.jsx +++ b/frontend/src/components/Routes.jsx @@ -13,7 +13,7 @@ const Register = React.lazy(() => import("./auth/Register")); const EmailVerification = React.lazy(() => import("./auth/EmailVerification")); const ResetPassword = React.lazy(() => import("./auth/ResetPassword")); const Organization = React.lazy(() => import("./organization/Organization")); -const Sessions = React.lazy(() => import("./user/sessions/Sessions")); +const APIPage = React.lazy(() => import("./user/api/APIPage")); const JobsTable = React.lazy(() => import("./jobs/table/JobsTable")); const JobResult = React.lazy(() => import("./jobs/result/JobResult")); const CommentResult = React.lazy(() => @@ -116,12 +116,12 @@ const authRoutesLazy = [ ), }, - /* API Access/Sessions Management */ + /* API Access */ { - path: "/me/sessions", + path: "/me/api", element: ( }> - + ), }, diff --git a/frontend/src/components/user/sessions/APIAccess.jsx b/frontend/src/components/user/api/APIAccess.jsx similarity index 90% rename from frontend/src/components/user/sessions/APIAccess.jsx rename to frontend/src/components/user/api/APIAccess.jsx index 988c6e3319..a1d1716115 100644 --- a/frontend/src/components/user/sessions/APIAccess.jsx +++ b/frontend/src/components/user/api/APIAccess.jsx @@ -15,7 +15,7 @@ import { confirm, } from "@certego/certego-ui"; -import { APIACCESS_BASE_URI, createNewToken, deleteToken } from "./sessionApi"; +import { APIACCESS_BASE_URI, createNewToken, deleteToken } from "./APIPageApi"; function GenerateIcon() { return ( @@ -87,17 +87,7 @@ export default function APIAccess() { id="apikey__created" value={respData?.created} format="hh:mm a MMM do, yyyy" - title="Session create date" - showAgo - /> - - - Expires - @@ -109,10 +99,10 @@ export default function APIAccess() { {tokenVisible ? ( - {respData?.token} + {respData?.key} ) : (
diff --git a/frontend/src/components/user/sessions/Sessions.jsx b/frontend/src/components/user/api/APIPage.jsx similarity index 77% rename from frontend/src/components/user/sessions/Sessions.jsx rename to frontend/src/components/user/api/APIPage.jsx index b7a696f588..70107e43e1 100644 --- a/frontend/src/components/user/sessions/Sessions.jsx +++ b/frontend/src/components/user/api/APIPage.jsx @@ -4,15 +4,14 @@ import useTitle from "react-use/lib/useTitle"; import { ContentSection } from "@certego/certego-ui"; -import SessionsList from "./SessionsList"; import APIAccess from "./APIAccess"; import { PYINTELOWL_GH_URL } from "../../../constants/environment"; -export default function Sessions() { - console.debug("Sessions rendered!"); +export default function APIPage() { + console.debug("APIPage rendered!"); // page title - useTitle("IntelOwl | Sessions", { + useTitle("IntelOwl | API", { restoreOnUnmount: true, }); @@ -41,11 +40,6 @@ export default function Sessions() { - {/* Sessions List */} -
Browser Sessions
- - - ); } diff --git a/frontend/src/components/user/sessions/sessionApi.js b/frontend/src/components/user/api/APIPageApi.js similarity index 56% rename from frontend/src/components/user/sessions/sessionApi.js rename to frontend/src/components/user/api/APIPageApi.js index e4e06f1703..184e833681 100644 --- a/frontend/src/components/user/sessions/sessionApi.js +++ b/frontend/src/components/user/api/APIPageApi.js @@ -2,10 +2,7 @@ import axios from "axios"; import { addToast } from "@certego/certego-ui"; -import { - APIACCESS_BASE_URI, - SESSIONS_BASE_URI, -} from "../../../constants/apiURLs"; +import { APIACCESS_BASE_URI } from "../../../constants/apiURLs"; // API Access @@ -31,23 +28,4 @@ async function deleteToken() { } } -// Sessions - -async function deleteTokenById(id, clientName) { - try { - const resp = await axios.delete(`${SESSIONS_BASE_URI}/${id}`); - addToast(`Revoked Session (${clientName}).`, null, "success", true, 6000); - return resp; - } catch (error) { - addToast("Failed!", error.parsedMsg.toString(), "danger", true); - return Promise.reject(error); - } -} - -export { - APIACCESS_BASE_URI, - SESSIONS_BASE_URI, - createNewToken, - deleteToken, - deleteTokenById, -}; +export { APIACCESS_BASE_URI, createNewToken, deleteToken }; diff --git a/frontend/src/components/user/sessions/SessionsList.jsx b/frontend/src/components/user/sessions/SessionsList.jsx deleted file mode 100644 index 80a3c7dac7..0000000000 --- a/frontend/src/components/user/sessions/SessionsList.jsx +++ /dev/null @@ -1,112 +0,0 @@ -import React from "react"; -import { Row, Col, Badge } from "reactstrap"; -import { VscDebugDisconnect } from "react-icons/vsc"; - -import { - IconButton, - DateHoverable, - useAxiosComponentLoader, -} from "@certego/certego-ui"; - -import { SESSIONS_BASE_URI, deleteTokenById } from "./sessionApi"; - -export default function SessionsList() { - console.debug("SessionsList rendered!"); - - // API - const [tokenSessions, Loader, refetch] = useAxiosComponentLoader( - { - url: SESSIONS_BASE_URI, - }, - (respData) => - respData.sort( - (currentSession, nextSession) => - !currentSession.is_current || - currentSession.created - nextSession.created, - ), - ); - - // callbacks - const revokeSessionCb = React.useCallback( - async (id, clientName) => { - try { - await deleteTokenById(id, clientName); - // reload after 500ms - setTimeout(refetch, 500); - } catch (error) { - // handled inside deleteTokenById - } - }, - [refetch], - ); - - return ( - ( -
    - {tokenSessions.map( - ({ - id, - client, - created, - expiry, - has_expired: hasExpired, - is_current: isCurrent, - }) => ( -
  1. - - - Device -   - {client} - - - Created - - - - Expires - - {hasExpired && ( - - expired - - )} - - {/* Actions */} - - {!isCurrent ? ( - revokeSessionCb(id, client)} - /> - ) : ( - current - )} - - -
  2. - ), - )} -
- )} - /> - ); -} diff --git a/frontend/src/constants/apiURLs.js b/frontend/src/constants/apiURLs.js index 147031b43d..016d01a8b6 100644 --- a/frontend/src/constants/apiURLs.js +++ b/frontend/src/constants/apiURLs.js @@ -43,7 +43,6 @@ export const NOTIFICATION_BASE_URI = `${API_BASE_URI}/notification`; // auth export const AUTH_BASE_URI = `${API_BASE_URI}/auth`; -export const SESSIONS_BASE_URI = `${AUTH_BASE_URI}/sessions`; export const APIACCESS_BASE_URI = `${AUTH_BASE_URI}/apiaccess`; // WEBSOCKETS diff --git a/frontend/src/layouts/widgets/UserMenu.jsx b/frontend/src/layouts/widgets/UserMenu.jsx index 1cef27f638..f089c3e185 100644 --- a/frontend/src/layouts/widgets/UserMenu.jsx +++ b/frontend/src/layouts/widgets/UserMenu.jsx @@ -76,8 +76,8 @@ export default function UserMenu(props) { )} {/* API Access/Sessions */} - - API Access/ Sessions + + API Access {/* Change Password */} From 50e79f73db93ce2b86d2ee72223b0d261ed3fa66 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 22 Feb 2024 11:45:34 +0100 Subject: [PATCH 22/58] fix --- authentication/serializers.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/authentication/serializers.py b/authentication/serializers.py index bb9629506d..a24b398ccc 100644 --- a/authentication/serializers.py +++ b/authentication/serializers.py @@ -245,9 +245,6 @@ class Meta: ] def create(self, validated_data): - """ - :meta private: - """ user = self.context["request"].user if Token.objects.filter(user=user).exists(): raise rfs.ValidationError("An API token was already issued to you.") From c9ad009ab8f3c0465796f77f83dfe0c18ca1e05a Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 22 Feb 2024 16:23:54 +0100 Subject: [PATCH 23/58] migrate token from durin to drf --- .../migrations/0002_migrate_from_durin.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 authentication/migrations/0002_migrate_from_durin.py diff --git a/authentication/migrations/0002_migrate_from_durin.py b/authentication/migrations/0002_migrate_from_durin.py new file mode 100644 index 0000000000..d3e315de3f --- /dev/null +++ b/authentication/migrations/0002_migrate_from_durin.py @@ -0,0 +1,35 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +from django.conf import settings +from django.db import migrations + + +def move_token_from_durin(apps, schema_editor): + if "durin" in settings.INSTALLED_APPS: + AuthToken = apps.get_model("durin", "AuthToken") + Client = apps.get_model("durin", "Client") + Token = apps.get_model("authtoken", "Token") + + for durin_token in AuthToken.objects.all(): + # export only CLI token (client name PyIntelOwl) + # only in case user didnìt have a rest framework token + if durin_token.client.name == "PyIntelOwl": + if not Token.objects.filter(user_id=durin_token.user.id).exists(): + Token.objects.create( + key=durin_token.token, user_id=durin_token.user.pk + ) + + # delete durin db data + AuthToken.objects.all().delete() + Client.objects.all().delete() + + +class Migration(migrations.Migration): + dependencies = [ + ("authentication", "0001_initial"), + ] + + operations = [ + migrations.RunPython(move_token_from_durin, migrations.RunPython.noop), + ] From c2df6a7202b350a56f80a9564ea0f3cfd04a64fc Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 22 Feb 2024 17:09:00 +0100 Subject: [PATCH 24/58] frontend refator --- frontend/src/components/Routes.jsx | 4 ++-- .../src/components/jobs/result/JobOverview.jsx | 2 +- .../{api/APIAccess.jsx => token/TokenAccess.jsx} | 4 ++-- .../user/{api/APIPage.jsx => token/TokenPage.jsx} | 6 +++--- .../user/{api/APIPageApi.js => token/tokenApi.js} | 0 frontend/tests/components/auth/Login.test.jsx | 14 +------------- 6 files changed, 9 insertions(+), 21 deletions(-) rename frontend/src/components/user/{api/APIAccess.jsx => token/TokenAccess.jsx} (98%) rename frontend/src/components/user/{api/APIPage.jsx => token/TokenPage.jsx} (91%) rename frontend/src/components/user/{api/APIPageApi.js => token/tokenApi.js} (100%) diff --git a/frontend/src/components/Routes.jsx b/frontend/src/components/Routes.jsx index 8930e0897e..af37346930 100644 --- a/frontend/src/components/Routes.jsx +++ b/frontend/src/components/Routes.jsx @@ -13,7 +13,7 @@ const Register = React.lazy(() => import("./auth/Register")); const EmailVerification = React.lazy(() => import("./auth/EmailVerification")); const ResetPassword = React.lazy(() => import("./auth/ResetPassword")); const Organization = React.lazy(() => import("./organization/Organization")); -const APIPage = React.lazy(() => import("./user/api/APIPage")); +const TokenPage = React.lazy(() => import("./user/token/TokenPage")); const JobsTable = React.lazy(() => import("./jobs/table/JobsTable")); const JobResult = React.lazy(() => import("./jobs/result/JobResult")); const CommentResult = React.lazy(() => @@ -121,7 +121,7 @@ const authRoutesLazy = [ path: "/me/api", element: ( }> - + ), }, diff --git a/frontend/src/components/jobs/result/JobOverview.jsx b/frontend/src/components/jobs/result/JobOverview.jsx index e01d6bcac1..cb848aa3f3 100644 --- a/frontend/src/components/jobs/result/JobOverview.jsx +++ b/frontend/src/components/jobs/result/JobOverview.jsx @@ -58,7 +58,7 @@ export function JobOverview({ section, subSection, }) { - console.debug(`JobOverview rendered: ${JSON.stringify(job)}`); + console.debug("JobOverview rendered"); console.debug(`section: ${section}, subSection: ${subSection}`); const isSelectedUI = section === JobResultSections.VISUALIZER; diff --git a/frontend/src/components/user/api/APIAccess.jsx b/frontend/src/components/user/token/TokenAccess.jsx similarity index 98% rename from frontend/src/components/user/api/APIAccess.jsx rename to frontend/src/components/user/token/TokenAccess.jsx index a1d1716115..f1e70f1714 100644 --- a/frontend/src/components/user/api/APIAccess.jsx +++ b/frontend/src/components/user/token/TokenAccess.jsx @@ -15,7 +15,7 @@ import { confirm, } from "@certego/certego-ui"; -import { APIACCESS_BASE_URI, createNewToken, deleteToken } from "./APIPageApi"; +import { APIACCESS_BASE_URI, createNewToken, deleteToken } from "./tokenApi"; function GenerateIcon() { return ( @@ -26,7 +26,7 @@ function GenerateIcon() { ); } -export default function APIAccess() { +export default function TokenAccess() { console.debug("APIAccess rendered!"); const [{ data: respData, loading, error }, refetch] = useAxios( diff --git a/frontend/src/components/user/api/APIPage.jsx b/frontend/src/components/user/token/TokenPage.jsx similarity index 91% rename from frontend/src/components/user/api/APIPage.jsx rename to frontend/src/components/user/token/TokenPage.jsx index 70107e43e1..2cd18439e9 100644 --- a/frontend/src/components/user/api/APIPage.jsx +++ b/frontend/src/components/user/token/TokenPage.jsx @@ -4,10 +4,10 @@ import useTitle from "react-use/lib/useTitle"; import { ContentSection } from "@certego/certego-ui"; -import APIAccess from "./APIAccess"; +import TokenAccess from "./TokenAccess"; import { PYINTELOWL_GH_URL } from "../../../constants/environment"; -export default function APIPage() { +export default function TokenPage() { console.debug("APIPage rendered!"); // page title @@ -38,7 +38,7 @@ export default function APIPage() { {/* API Access */}
API Access
- + ); diff --git a/frontend/src/components/user/api/APIPageApi.js b/frontend/src/components/user/token/tokenApi.js similarity index 100% rename from frontend/src/components/user/api/APIPageApi.js rename to frontend/src/components/user/token/tokenApi.js diff --git a/frontend/tests/components/auth/Login.test.jsx b/frontend/tests/components/auth/Login.test.jsx index 5860bea0be..4c4fd967f5 100644 --- a/frontend/tests/components/auth/Login.test.jsx +++ b/frontend/tests/components/auth/Login.test.jsx @@ -14,19 +14,7 @@ jest.mock("../../../src/constants/environment", () => ({ describe("Login component", () => { // mock login request - axios.post.mockImplementation({ - data: { - expiry: "2023-02-09T14:52:04.877168Z", - token: "12345a6680364c7bf58b54b8c3d59b93", - user: { - email: "test@test.com", - first_name: "test", - full_name: "test user", - last_name: "user", - username: "test_user", - }, - }, - }); + axios.post.mockImplementation({}); test("User login", async () => { // mock user interaction: reccomanded to put this at the start of the test From b2819bc84b1d5a989e82ac16ac5b5208059c0bd7 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Fri, 23 Feb 2024 11:30:50 +0100 Subject: [PATCH 25/58] frontend test user token page --- .../src/components/user/token/TokenAccess.jsx | 3 + .../src/components/user/token/TokenPage.jsx | 4 +- .../user/token/TokenAccess.test.jsx | 148 ++++++++++++++++++ .../components/user/token/TokenPage.test.jsx | 27 ++++ 4 files changed, 180 insertions(+), 2 deletions(-) create mode 100644 frontend/tests/components/user/token/TokenAccess.test.jsx create mode 100644 frontend/tests/components/user/token/TokenPage.test.jsx diff --git a/frontend/src/components/user/token/TokenAccess.jsx b/frontend/src/components/user/token/TokenAccess.jsx index f1e70f1714..c7533178e9 100644 --- a/frontend/src/components/user/token/TokenAccess.jsx +++ b/frontend/src/components/user/token/TokenAccess.jsx @@ -36,6 +36,9 @@ export default function TokenAccess() { { useCache: false }, ); + console.debug(`TokenAccess - respData: ${JSON.stringify(respData)}`); + console.debug(`TokenAccess - error: ${JSON.stringify(error)}`); + // local state const [tokenVisible, setTokenVisible] = React.useState(false); diff --git a/frontend/src/components/user/token/TokenPage.jsx b/frontend/src/components/user/token/TokenPage.jsx index 2cd18439e9..a535fe3cc6 100644 --- a/frontend/src/components/user/token/TokenPage.jsx +++ b/frontend/src/components/user/token/TokenPage.jsx @@ -21,8 +21,8 @@ export default function TokenPage() { - You can generate an API key to access IntelOwl's RESTful - API.  Take a look to the available Python and Go clients: + You can generate an API key to access IntelOwl's RESTful API. + Take a look to the available Python and Go clients: { + beforeEach(() => { + jest.clearAllMocks(); + + axios.post.mockImplementation(() => Promise.resolve({ + data: { + key: "987654321", + created: "2024-02-22T18:48:18.257944", + } + })); + axios.delete.mockImplementation(() => Promise.resolve({ + data: {} + })); + }); + + test("render with token", async () => { + useAxios.mockImplementation(() => [{ + data: { + key: "123456789", + created: "2024-02-22T15:48:18.257944", + }, + loading: false, + error: "", + }]); + + const user = userEvent.setup(); + + const result = render( + + ); + expect(screen.getByText("Created")).toBeInTheDocument(); + expect(screen.getByText("03:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + + // test user interaction + const showButton = result.container.querySelector('#toggle-show-apikey-btn'); + expect(showButton).toBeInTheDocument(); + await user.click(showButton); + expect(screen.getByText("123456789")).toBeInTheDocument(); + }) + + test("render without token", () => { + useAxios.mockImplementation(() => [{ + data: undefined, + loading: false, + error: {response: {status: 404}, "errors":{"detail":"Not found."}}, + }]); + + render( + + ); + + expect(screen.getByText("No active API key")).toBeInTheDocument(); + }) + + test("delete token", async () => { + useAxios.mockImplementation(() => [{ + data: { + key: "987654321", + created: "2024-02-22T18:48:18.257944", + }, + loading: false, + error: "", + }]).mockImplementationOnce(() => [{ + data: { + key: "123456789", + created: "2024-02-22T15:48:18.257944", + }, + loading: false, + error: "", + }]); + + const user = userEvent.setup(); + + const result = render( + + ); + expect(screen.getByText("Created")).toBeInTheDocument(); + expect(screen.getByText("03:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + + const deleteButton = result.container.querySelector('#delete-apikey-btn'); + expect(deleteButton).toBeInTheDocument(); + await user.click(deleteButton); + const deletionConfirmButton = screen.getByRole("button", { name: /Yes/i }); + expect(deletionConfirmButton).toBeInTheDocument(); + await user.click(deletionConfirmButton); + await waitFor(() => { + expect(axios.delete).toHaveBeenCalledWith( + `${APIACCESS_BASE_URI}`, + ); + }); + await waitFor(() => { + expect(useAxios).toHaveBeenCalledWith( + {"url": `${APIACCESS_BASE_URI}`}, {"useCache": false} + ); + }) + result.rerender() + expect(screen.getByText("06:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + }) + + test("create token", async () => { + useAxios.mockImplementation(() => [{ + data: { + key: "987654321", + created: "2024-02-22T18:48:18.257944", + }, + loading: false, + error: "", + }]).mockImplementationOnce(() => [{ + data: undefined, + loading: false, + error: {response: {status: 404}, "errors":{"detail":"Not found."}}, + }]); + + const user = userEvent.setup(); + + const result = render( + + ); + + expect(screen.getByText("No active API key")).toBeInTheDocument(); + const createButton = result.container.querySelector('#create-apikey-btn'); + expect(createButton).toBeInTheDocument(); + await user.click(createButton); + await waitFor(() => { + expect(axios.post).toHaveBeenCalledWith( + `${APIACCESS_BASE_URI}`, + ); + }); + + result.rerender() + expect(screen.getByText("Created")).toBeInTheDocument(); + expect(screen.getByText("06:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + }) +}); diff --git a/frontend/tests/components/user/token/TokenPage.test.jsx b/frontend/tests/components/user/token/TokenPage.test.jsx new file mode 100644 index 0000000000..3e5145ca34 --- /dev/null +++ b/frontend/tests/components/user/token/TokenPage.test.jsx @@ -0,0 +1,27 @@ +import React from "react"; +import "@testing-library/jest-dom"; +import { render, screen } from "@testing-library/react"; + +import useAxios from 'axios-hooks' + +import TokenPage from "../../../../src/components/user/token/TokenPage"; + +jest.mock('axios-hooks') + +useAxios.mockReturnValue([{ + data: { + key: "123456789", + created: "2024-02-22T15:48:18.257944Z", + }, +}]); + +describe("test TokenPage", () => { + test("render", () => { + render( + + ); + expect(screen.getByText("API Access")).toBeInTheDocument(); + expect(screen.getByText("Created")).toBeInTheDocument(); + expect(screen.getByText("04:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + }) +}); From 37d306bcb0eb1c556c4d3f397bd51c2bf1392ca5 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 27 Feb 2024 17:41:50 +0100 Subject: [PATCH 26/58] fixed auth tests --- .../migrations/0002_migrate_from_durin.py | 4 +- authentication/views.py | 22 ++++------ docker/ci.override.yml | 5 +++ intel_owl/settings/rest.py | 6 +-- tests/api_app/test_views.py | 2 +- tests/auth/test_auth.py | 40 +++++-------------- tests/auth/test_oauth.py | 19 +++++---- 7 files changed, 42 insertions(+), 56 deletions(-) diff --git a/authentication/migrations/0002_migrate_from_durin.py b/authentication/migrations/0002_migrate_from_durin.py index d3e315de3f..b3d75d91fb 100644 --- a/authentication/migrations/0002_migrate_from_durin.py +++ b/authentication/migrations/0002_migrate_from_durin.py @@ -13,7 +13,7 @@ def move_token_from_durin(apps, schema_editor): for durin_token in AuthToken.objects.all(): # export only CLI token (client name PyIntelOwl) - # only in case user didnìt have a rest framework token + # only in case user didn't have a rest framework token if durin_token.client.name == "PyIntelOwl": if not Token.objects.filter(user_id=durin_token.user.id).exists(): Token.objects.create( @@ -28,6 +28,8 @@ def move_token_from_durin(apps, schema_editor): class Migration(migrations.Migration): dependencies = [ ("authentication", "0001_initial"), + ("authtoken", "0003_tokenproxy"), + ("api_app", "0054_job_jobbisearch"), ] operations = [ diff --git a/authentication/views.py b/authentication/views.py index 10e693fba2..7674e359e1 100644 --- a/authentication/views.py +++ b/authentication/views.py @@ -183,22 +183,16 @@ def validate_and_return_user(request): email=user_email, username=user_name, password=None ) - def get(self, *args, **kwargs): - return self.post(*args, **kwargs) + def get(self, request, *args, **kwargs): + return self.post(request, *args, **kwargs) - def post(self, *args, **kwargs): - response = super().post(*args, **kwargs) - token = response.data["token"] + def post(self, request, *args, **kwargs): + user = self.validate_and_return_user(request=request) + logger.info(f"perform_login received request from '{user.username}''.") + login(request, user) # Uncomment this for local testing - # return redirect(f"http://localhost:3001/login?token={token}") - return redirect(self.request.build_absolute_uri(f"/login?token={token}")) - - @staticmethod - def get_post_response_data(request, token_obj) -> dict: - data = { - "token": token_obj.token, - } - return data + # return redirect("http://localhost/login") + return redirect(self.request.build_absolute_uri("/login")) @api_view(["get"]) diff --git a/docker/ci.override.yml b/docker/ci.override.yml index 20b7b18431..37b00f95da 100644 --- a/docker/ci.override.yml +++ b/docker/ci.override.yml @@ -14,6 +14,11 @@ services: image: intelowlproject/intelowl:ci env_file: - env_file_app_ci + + daphne: + image: intelowlproject/intelowl:ci + env_file: + - env_file_app_ci nginx: build: diff --git a/intel_owl/settings/rest.py b/intel_owl/settings/rest.py index 904e3205e7..949fedc6ff 100644 --- a/intel_owl/settings/rest.py +++ b/intel_owl/settings/rest.py @@ -13,9 +13,9 @@ "DEFAULT_RENDERER_CLASSES": ["rest_framework.renderers.JSONRenderer"], # Auth "DEFAULT_AUTHENTICATION_CLASSES": [ - 'rest_framework.authentication.SessionAuthentication', - 'rest_framework.authentication.TokenAuthentication', - 'rest_framework.authentication.BasicAuthentication', + "rest_framework.authentication.TokenAuthentication", + "rest_framework.authentication.SessionAuthentication", + "rest_framework.authentication.BasicAuthentication", ], # Pagination "DEFAULT_PAGINATION_CLASS": "certego_saas.ext.pagination.CustomPageNumberPagination", diff --git a/tests/api_app/test_views.py b/tests/api_app/test_views.py index 1941773d5b..9a4508cff0 100644 --- a/tests/api_app/test_views.py +++ b/tests/api_app/test_views.py @@ -36,7 +36,7 @@ def test_get(self): # logged out self.client.logout() response = self.client.get(self.URL, {}, format="json") - self.assertEqual(response.status_code, 401) + self.assertEqual(response.status_code, 401, response.json()) param = Parameter.objects.create( is_secret=True, diff --git a/tests/auth/test_auth.py b/tests/auth/test_auth.py index 62f15adbde..48c33fa203 100644 --- a/tests/auth/test_auth.py +++ b/tests/auth/test_auth.py @@ -2,10 +2,10 @@ # See the file 'LICENSE' for copying permission. from django.contrib.auth import get_user_model +from django.contrib.sessions.models import Session from django.core import mail from django.core.cache import cache from django.test import tag -from durin.models import AuthToken, Client from rest_email_auth.models import EmailConfirmation, PasswordResetToken from rest_framework.reverse import reverse @@ -44,45 +44,27 @@ def setUp(self): def tearDown(self): # skipcq: PYL-R0201 # cache clear (for throttling) cache.clear() - # db clean - AuthToken.objects.all().delete() - Client.objects.all().delete() def test_login_200(self): - self.assertEqual(AuthToken.objects.count(), 0) + self.assertEqual(Session.objects.count(), 0) body = { **self.creds, "recaptcha": "testkey", } response = self.client.post(login_uri, body) - content = response.json() - msg = (response, content) - - self.assertEqual(response.status_code, 200, msg=msg) - self.assertIn("token", response.data, msg=msg) - self.assertIn("expiry", response.data, msg=msg) - self.assertIn("user", response.data, msg=msg) - self.assertIn(self.user.USERNAME_FIELD, response.data["user"], msg=msg) - - self.assertEqual(AuthToken.objects.count(), 1) + self.assertEqual(response.status_code, 200) + self.assertEqual(Session.objects.count(), 1) + session = Session.objects.all().first() + session_data = session.get_decoded() + self.assertIsNotNone(session_data) + self.assertIn("_auth_user_id", session_data.keys()) + self.assertEqual(str(self.user.pk), session_data["_auth_user_id"]) def test_logout_204(self): - self.assertEqual(AuthToken.objects.count(), 0) - - token = AuthToken.objects.create( - user=self.user, - client=Client.objects.create(name="test_logout_deletes_keys"), - ) - self.assertEqual(AuthToken.objects.count(), 1) - - self.client.credentials(HTTP_AUTHORIZATION=("Token %s" % token.token)) + self.client.force_authenticate(user=self.user) response = self.client.post(logout_uri) - - self.assertEqual(response.status_code, 204, msg=(response)) - self.assertEqual( - AuthToken.objects.count(), 0, "other tokens should remain after logout" - ) + self.assertEqual(response.status_code, 200) def test_register_username_taken_400(self): current_users = User.objects.count() diff --git a/tests/auth/test_oauth.py b/tests/auth/test_oauth.py index fdc36809a8..d668e45eee 100644 --- a/tests/auth/test_oauth.py +++ b/tests/auth/test_oauth.py @@ -5,8 +5,8 @@ from urllib.parse import parse_qs, urlparse from django.contrib.auth import get_user_model +from django.contrib.sessions.models import Session from django.test import tag -from durin.models import AuthToken from rest_framework import status from rest_framework.reverse import reverse @@ -37,6 +37,8 @@ def test_google_disabled(self): oauth._registry = prev_registry def test_google_enabled(self): + # IMPORTANT! Without GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET configured + # this test will fail! response = self.client.get(self.google_auth_uri, follow=False) self.assertEqual(response.status_code, 302) msg = response.url @@ -60,15 +62,16 @@ def test_google_enabled(self): @patch("authentication.views.GoogleLoginCallbackView.validate_and_return_user") def test_google_callback(self, mock_validate_and_return_user: Mock): + self.assertEqual(Session.objects.count(), 0) mock_validate_and_return_user.return_value = self.user response = self.client.get(self.google_auth_callback_uri, follow=False) msg = response.url self.assertEqual(response.status_code, 302, msg) response_redirect = urlparse(response.url) - response_redirect_query = parse_qs(response_redirect.query) - self.assertTrue( - AuthToken.objects.filter( - token=response_redirect_query.get("token")[0], user=self.user - ).exists(), - msg=msg, - ) + print(response_redirect) + self.assertEqual(Session.objects.count(), 1) + session = Session.objects.all().first() + session_data = session.get_decoded() + self.assertIsNotNone(session_data) + self.assertIn("_auth_user_id", session_data.keys()) + self.assertEqual(str(self.user.pk), session_data["_auth_user_id"]) From a929d997e31b7e7852a4e8990b4400babdfd5a22 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 28 Feb 2024 16:07:57 +0100 Subject: [PATCH 27/58] test api to manage api token --- tests/auth/test_api.py | 111 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 tests/auth/test_api.py diff --git a/tests/auth/test_api.py b/tests/auth/test_api.py new file mode 100644 index 0000000000..7028860dd4 --- /dev/null +++ b/tests/auth/test_api.py @@ -0,0 +1,111 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +from django.contrib.auth import get_user_model +from django.test import tag +from rest_framework.authtoken.models import Token +from rest_framework.reverse import reverse + +from . import CustomOAuthTestCase + +User = get_user_model() + +api_uri = reverse("auth_apiaccess") + + +@tag("api", "user") +class TestUserAuth(CustomOAuthTestCase): + # def setUp(self): + # # test data + # self.testregisteruser = { + # "email": "testregisteruser@test.com", + # "username": "testregisteruser", + # "first_name": "testregisteruser", + # "last_name": "testregisteruser", + # "password": "testregisteruser", + # "profile": { + # "company_name": "companytest", + # "company_role": "intelowl test", + # "twitter_handle": "@fake", + # "discover_from": "other", + # }, + # } + # mail.outbox = [] + + # def tearDown(self): # skipcq: PYL-R0201 + # # cache clear (for throttling) + # cache.clear() + + def test_get_token_unauthorized(self): + response = self.client.get(api_uri) + self.assertEqual(response.status_code, 401) + self.assertEqual( + response.json(), {"detail": "Authentication credentials were not provided."} + ) + + def test_get_token_no_token_available(self): + self.assertEqual(Token.objects.count(), 0) + self.client.force_authenticate(self.user) + response = self.client.get(api_uri) + self.assertEqual(response.status_code, 404) + + def test_get_token_available(self): + token, _ = Token.objects.get_or_create(user=self.user) + self.client.force_authenticate(self.user) + response = self.client.get(api_uri) + self.assertEqual(response.status_code, 200) + response_data = response.json() + self.assertEqual(response_data["key"], token.key) + self.assertEqual( + response_data["created"], token.created.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + ) + + def test_create_token_unauthorized(self): + response = self.client.post(api_uri) + self.assertEqual(response.status_code, 401) + self.assertEqual( + response.json(), {"detail": "Authentication credentials were not provided."} + ) + + def test_create_token_already_exist(self): + Token.objects.get_or_create(user=self.user) + self.client.force_authenticate(self.user) + response = self.client.post(api_uri) + self.assertEqual(response.status_code, 400) + response_data = response.json() + self.assertCountEqual( + response_data, {"errors": ["An API token was already issued to you."]} + ) + + def test_create_token(self): + self.client.force_authenticate(self.user) + response = self.client.post(api_uri) + self.assertEqual(response.status_code, 201) + response_data = response.json() + token = Token.objects.get(user=self.user) + self.assertEqual(response_data["key"], token.key) + self.assertEqual( + response_data["created"], token.created.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + ) + + def test_delete_token_unauthorized(self): + Token.objects.get_or_create(user=self.user) + response = self.client.delete(api_uri) + self.assertEqual(response.status_code, 401) + self.assertEqual( + response.json(), {"detail": "Authentication credentials were not provided."} + ) + self.assertEqual(Token.objects.count(), 1) + + def test_delete_token_unavailable(self): + self.assertEqual(Token.objects.count(), 0) + self.client.force_authenticate(self.user) + response = self.client.delete(api_uri) + self.assertEqual(response.status_code, 404) + + def test_delete_token(self): + Token.objects.get_or_create(user=self.user) + self.client.force_authenticate(self.user) + response = self.client.delete(api_uri) + self.assertEqual(response.status_code, 204) + self.assertEqual(Token.objects.count(), 0) From b8f35024eca78354ce064a2abe60b4cdeb0ab387 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 28 Feb 2024 16:20:06 +0100 Subject: [PATCH 28/58] removed comment --- tests/auth/test_api.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/tests/auth/test_api.py b/tests/auth/test_api.py index 7028860dd4..2c8c3e1f6c 100644 --- a/tests/auth/test_api.py +++ b/tests/auth/test_api.py @@ -15,27 +15,6 @@ @tag("api", "user") class TestUserAuth(CustomOAuthTestCase): - # def setUp(self): - # # test data - # self.testregisteruser = { - # "email": "testregisteruser@test.com", - # "username": "testregisteruser", - # "first_name": "testregisteruser", - # "last_name": "testregisteruser", - # "password": "testregisteruser", - # "profile": { - # "company_name": "companytest", - # "company_role": "intelowl test", - # "twitter_handle": "@fake", - # "discover_from": "other", - # }, - # } - # mail.outbox = [] - - # def tearDown(self): # skipcq: PYL-R0201 - # # cache clear (for throttling) - # cache.clear() - def test_get_token_unauthorized(self): response = self.client.get(api_uri) self.assertEqual(response.status_code, 401) From e7be4224404c3f73f8949df0a8407aa8e012bdef Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 5 Mar 2024 10:06:28 +0100 Subject: [PATCH 29/58] unittest ws --- api_app/websocket.py | 31 +++-- tests/api_app/test_websocket.py | 202 ++++++++++++++++++++++++++++++++ 2 files changed, 220 insertions(+), 13 deletions(-) create mode 100644 tests/api_app/test_websocket.py diff --git a/api_app/websocket.py b/api_app/websocket.py index f0ee3c96ab..8429a1eee7 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -16,18 +16,23 @@ def connect(self): user = self.scope["user"] job_id = self.scope["url_route"]["kwargs"]["job_id"] logger.info(f"user: {user} requested the analysis for the job {job_id}") - self.accept() - job = Job.objects.get(id=job_id) - job_serializer = JobSerializer(job) - job_data = job_serializer.data - async_to_sync(self.channel_layer.group_add)( - JobConsumer.generate_group_name(job_id), self.channel_name - ) - # send data - async_to_sync(self.channel_layer.group_send)( - JobConsumer.generate_group_name(job_id), - {"type": "send.job", "job": job_data}, - ) + try: + job = Job.objects.get(id=job_id) + except Job.DoesNotExist: + logger.error(f"user: {user} request the non-existing job: {job_id}") + self.close(code=4040) + else: + self.accept() + job_serializer = JobSerializer(job) + job_data = job_serializer.data + async_to_sync(self.channel_layer.group_add)( + JobConsumer.generate_group_name(job_id), self.channel_name + ) + # send data + async_to_sync(self.channel_layer.group_send)( + JobConsumer.generate_group_name(job_id), + {"type": "send.job", "job": job_data}, + ) def disconnect(self, close_code): user = self.scope["user"] @@ -38,7 +43,7 @@ def disconnect(self, close_code): logger.info( f"user: {user} disconnected for the job: {job_id}. Close code: {close_code}" ) - self.close() + self.close(code=close_code) def receive_json(self, content): user = self.scope["user"] diff --git a/tests/api_app/test_websocket.py b/tests/api_app/test_websocket.py new file mode 100644 index 0000000000..62113b0534 --- /dev/null +++ b/tests/api_app/test_websocket.py @@ -0,0 +1,202 @@ +import abc +import datetime +import time +from contextlib import asynccontextmanager + +from asgiref.sync import sync_to_async +from channels.layers import channel_layers +from channels.testing import WebsocketCommunicator +from django.contrib.auth import get_user_model +from django.test import TransactionTestCase + +from api_app.analyzers_manager.constants import ObservableTypes, TypeChoices +from api_app.analyzers_manager.models import AnalyzerConfig +from api_app.choices import ParamTypes +from api_app.models import Job, Parameter, PluginConfig, PythonModule +from intel_owl.asgi import application +from intel_owl.tasks import job_set_final_status, run_plugin + +User = get_user_model() + + +class WebsocketTestCase(TransactionTestCase, metaclass=abc.ABCMeta): + """Class with utilities function for testing websockets""" + + @asynccontextmanager + async def connect_communicator(self, job_id: int, user: User = None): + """Connects a websocket communicator to this testcase application, + forcing given user to be added to its scope. + + To be used as context manager (disconnects on exit) + + :param job_id: id of the job to retrieve data + :type job_id: int + :param user: user to connect to websocket + :type user: auth.user + :yield: communicator and flag indicationg connection success + :rtype: tuple(WebsocketCommunicator, bool, int) + """ + communicator = WebsocketCommunicator(application, f"ws/jobs/{job_id}") + if user: + communicator.scope["user"] = user + connected, subprotocol = await communicator.connect() + try: + yield communicator, connected, subprotocol + finally: + await communicator.disconnect() + + def _pre_setup(self): + super()._pre_setup() + # force channel layers backend reset, this may avoid some RuntimeError + channel_layers.backends = {} + + +class JobConsumerTestCase(WebsocketTestCase): + def setUp(self) -> None: + self.user = User.objects.create(username="websocket_test") + self.job = Job.objects.create( + id=1027, + user=self.user, + status=Job.Status.REPORTED_WITHOUT_FAILS.value, + observable_name="8.8.8.8", + observable_classification=ObservableTypes.IP, + received_request_time=datetime.datetime.now(), + ) + + async def test_job_unauthorized(self): + self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) + async with self.connect_communicator(1027) as (_, connected, subprotocol): + self.assertFalse(connected) + self.assertEqual(subprotocol, 1008) + + async def test_job_not_exist(self): + self.assertEqual(await sync_to_async(Job.objects.filter(id=1028).count)(), 0) + async with self.connect_communicator(1028, self.user) as ( + _, + connected, + subprotocol, + ): + self.assertFalse(connected) + self.assertEqual(subprotocol, 4040) + + async def test_job_terminated(self): + self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) + async with self.connect_communicator(1027, self.user) as ( + communicator, + connected, + _, + ): + self.assertTrue(connected) + job_report = await communicator.receive_json_from() + self.assertEqual(job_report["id"], 1027) + self.assertEqual(job_report["observable_name"], "8.8.8.8") + self.assertEqual( + job_report["status"], Job.Status.REPORTED_WITHOUT_FAILS.value + ) + + async def test_job_running(self): + # Note: Sometimes reading from ws (receive_json_from) is too fast: + # it happens before other part of code send data. + # The test will be blocked waiting a response from ws that already happened. + # we need a sleep to wait. + # in this test happens for the functions: run_plugin set_final_status. + job = await sync_to_async(Job.objects.create)( + id=1029, + user=self.user, + status=Job.Status.PENDING.value, + observable_name="test.com", + observable_classification=ObservableTypes.DOMAIN, + received_request_time=datetime.datetime.now(), + ) + class_dns_python_module, _ = await sync_to_async( + PythonModule.objects.get_or_create + )( + base_path="api_app.analyzers_manager.observable_analyzers", + module="dns.dns_resolvers.classic_dns_resolver.ClassicDNSResolver", + ) + classic_dns_analyzer_config, _ = await sync_to_async( + AnalyzerConfig.objects.get_or_create + )( + name="Classic_DNS", + python_module=class_dns_python_module, + type=TypeChoices.OBSERVABLE.value, + observable_supported=[ + ObservableTypes.IP.value, + ObservableTypes.DOMAIN.value, + ObservableTypes.URL.value, + ], + ) + analyzer_list = [classic_dns_analyzer_config] + await sync_to_async(job.analyzers_requested.set)(analyzer_list) + await sync_to_async(job.analyzers_to_execute.set)(analyzer_list) + query_type_param = Parameter.objects.filter( + name="query_type", + description="Query type against the chosen DNS resolver.", + python_module=class_dns_python_module, + ) + if await sync_to_async(query_type_param.count)(): + query_type_param = await sync_to_async(query_type_param.first)() + else: + query_type_param = Parameter( + name="query_type", + description="Query type against the chosen DNS resolver.", + python_module=class_dns_python_module, + type=ParamTypes.STR, + is_secret=False, + required=True, + ) + await sync_to_async(query_type_param.save)() + plugin_config = PluginConfig( + owner=self.user, + for_organization=False, + value="A", + analyzer_config=classic_dns_analyzer_config, + parameter=query_type_param, + parameter_id=query_type_param.id, + ) + await sync_to_async(plugin_config.save)() + async with self.connect_communicator(1029, self.user) as ( + communicator, + connected, + _, + ): + self.assertTrue(connected) + job_report_running = await communicator.receive_json_from() + self.assertEqual(job_report_running["id"], 1029) + self.assertEqual(job_report_running["observable_name"], "test.com") + self.assertEqual(job_report_running["status"], Job.Status.PENDING.value) + self.assertEqual(job_report_running["analyzer_reports"], []) + self.assertIsNone(job_report_running["finished_analysis_time"]) + time.sleep(5) + # run plugin + await sync_to_async(run_plugin)( + job_id=1029, + python_module_pk=class_dns_python_module.pk, + plugin_config_pk=classic_dns_analyzer_config.pk, + runtime_configuration={}, + task_id=10, + ) + time.sleep(5) + job_analyzer_terminated = await communicator.receive_json_from() + time.sleep(5) + self.assertEqual(job_analyzer_terminated["id"], 1029) + self.assertEqual(job_analyzer_terminated["observable_name"], "test.com") + self.assertEqual( + job_analyzer_terminated["status"], Job.Status.PENDING.value + ) + self.assertIsNotNone(job_analyzer_terminated["analyzer_reports"]) + self.assertIsNone(job_analyzer_terminated["finished_analysis_time"]) + # terminate job (force status) + job.status = Job.Status.REPORTED_WITHOUT_FAILS + await sync_to_async(job.save)() + await sync_to_async(job_set_final_status)(1029) + time.sleep(5) + job_report_terminated = await communicator.receive_json_from() + time.sleep(5) + self.assertEqual(job_report_terminated["id"], 1029) + self.assertEqual(job_report_terminated["observable_name"], "test.com") + self.assertEqual( + job_report_terminated["status"], Job.Status.REPORTED_WITHOUT_FAILS.value + ) + self.assertIsNotNone(job_report_terminated["analyzer_reports"]) + self.assertIsNotNone(job_report_terminated["finished_analysis_time"]) From e562685f4c39a8fe4ff37b2592a6c41a201e2101 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 5 Mar 2024 11:04:59 +0100 Subject: [PATCH 30/58] added daphne healthcheck --- docker/default.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker/default.yml b/docker/default.yml index 4bd2b331e2..5bcd18df49 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -39,6 +39,12 @@ services: - "8011" env_file: - env_file_app + healthcheck: + test: ["CMD-SHELL", "nc -z localhost 8011 || exit 1"] + interval: 10s + timeout: 2s + start_period: 90s + retries: 5 nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} From 753386de84bdba5666e7664980e0cecb48dd0f73 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 5 Mar 2024 15:32:33 +0100 Subject: [PATCH 31/58] refactor --- api_app/websocket.py | 34 ++++++++++++++++++++-------------- intel_owl/tasks.py | 18 ++---------------- 2 files changed, 22 insertions(+), 30 deletions(-) diff --git a/api_app/websocket.py b/api_app/websocket.py index 8429a1eee7..0083780412 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -2,6 +2,7 @@ from asgiref.sync import async_to_sync from channels.generic.websocket import JsonWebsocketConsumer +from channels.layers import get_channel_layer from api_app.choices import Status from api_app.models import Job @@ -11,7 +12,7 @@ class JobConsumer(JsonWebsocketConsumer): - def connect(self): + def connect(self) -> None: logger.debug(f"{self.scope=}") user = self.scope["user"] job_id = self.scope["url_route"]["kwargs"]["job_id"] @@ -23,35 +24,29 @@ def connect(self): self.close(code=4040) else: self.accept() - job_serializer = JobSerializer(job) - job_data = job_serializer.data async_to_sync(self.channel_layer.group_add)( - JobConsumer.generate_group_name(job_id), self.channel_name - ) - # send data - async_to_sync(self.channel_layer.group_send)( - JobConsumer.generate_group_name(job_id), - {"type": "send.job", "job": job_data}, + JobConsumer._generate_group_name(job_id), self.channel_name ) + JobConsumer.serialize_and_send_job(job) - def disconnect(self, close_code): + def disconnect(self, close_code) -> None: user = self.scope["user"] job_id = self.scope["url_route"]["kwargs"]["job_id"] async_to_sync(self.channel_layer.group_discard)( - JobConsumer.generate_group_name(job_id), self.channel_name + JobConsumer._generate_group_name(job_id), self.channel_name ) logger.info( f"user: {user} disconnected for the job: {job_id}. Close code: {close_code}" ) self.close(code=close_code) - def receive_json(self, content): + def receive_json(self, content) -> None: user = self.scope["user"] logger.warning( f"user {user} send {content} to the websocket, this shouldn't happen" ) - def send_job(self, event): + def send_job(self, event) -> None: job_data = event["job"] logger.debug(f"job data: {job_data}") self.send_json(content=job_data) @@ -60,5 +55,16 @@ def send_job(self, event): self.close() @classmethod - def generate_group_name(cls, job_id: int): + def _generate_group_name(self, job_id: int) -> str: return f"job-{job_id}" + + @classmethod + def serialize_and_send_job(cls, job: Job) -> None: + job_serializer = JobSerializer(job) + job_data = job_serializer.data + # send data + channel_layer = get_channel_layer() + async_to_sync(channel_layer.group_send)( + cls._generate_group_name(job.id), + {"type": "send.job", "job": job_data}, + ) diff --git a/intel_owl/tasks.py b/intel_owl/tasks.py index 7b1404fe1b..ed6a87a2c0 100644 --- a/intel_owl/tasks.py +++ b/intel_owl/tasks.py @@ -9,12 +9,10 @@ import typing import uuid -from asgiref.sync import async_to_sync from celery import Task, shared_task, signals from celery.worker.consumer import Consumer from celery.worker.control import control_command from celery.worker.request import Request -from channels.layers import get_channel_layer from django.conf import settings from django.db.models import Q from django.utils.timezone import now @@ -211,18 +209,12 @@ def update_notifications_with_releases(): @app.task(name="job_set_final_status", soft_time_limit=30) def job_set_final_status(job_id: int): from api_app.models import Job - from api_app.serializers import JobSerializer from api_app.websocket import JobConsumer job = Job.objects.get(pk=job_id) # execute some callbacks job.set_final_status() - channel_layer = get_channel_layer() - job_serializer = JobSerializer(job) - job_data = job_serializer.data - async_to_sync(channel_layer.group_send)( - JobConsumer.generate_group_name(job_id), {"type": "send.job", "job": job_data} - ) + JobConsumer.serialize_and_send_job(job) @shared_task(base=FailureLoggedTask, name="job_set_pipeline_status", soft_time_limit=30) @@ -268,7 +260,6 @@ def run_plugin( ): from api_app.classes import Plugin from api_app.models import Job, PythonModule - from api_app.serializers import JobSerializer from api_app.websocket import JobConsumer logger.info( @@ -296,12 +287,7 @@ def run_plugin( status=plugin.report_model.Status.FAILED.value ) job = Job.objects.get(pk=job_id) - channel_layer = get_channel_layer() - job_serializer = JobSerializer(job) - job_data = job_serializer.data - async_to_sync(channel_layer.group_send)( - JobConsumer.generate_group_name(job_id), {"type": "send.job", "job": job_data} - ) + JobConsumer.serialize_and_send_job(job) @shared_task(base=FailureLoggedTask, name="create_caches", soft_time_limit=200) From e2a6e841cf8d5f74542fcb4d7aea20d422e76db6 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Tue, 5 Mar 2024 16:18:27 +0100 Subject: [PATCH 32/58] send data via ws when the job or a plugin is killed --- api_app/models.py | 2 ++ api_app/views.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/api_app/models.py b/api_app/models.py index 70fa493b2a..dd6104b641 100644 --- a/api_app/models.py +++ b/api_app/models.py @@ -474,6 +474,7 @@ def kill_if_ongoing(self): from api_app.analyzers_manager.models import AnalyzerConfig from api_app.connectors_manager.models import ConnectorConfig from api_app.visualizers_manager.models import VisualizerConfig + from api_app.websocket import JobConsumer from intel_owl.celery import app as celery_app for config in [AnalyzerConfig, ConnectorConfig, VisualizerConfig]: @@ -493,6 +494,7 @@ def kill_if_ongoing(self): self.status = self.Status.KILLED self.save(update_fields=["status"]) + JobConsumer.serialize_and_send_job(self) def _get_signatures(self, queryset: PythonConfigQuerySet) -> Signature: config_class: PythonConfig = queryset.model diff --git a/api_app/views.py b/api_app/views.py index c2dc7e4cfd..21a6ce701b 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -21,6 +21,7 @@ from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet +from api_app.websocket import JobConsumer from certego_saas.apps.organization.permissions import IsObjectOwnerOrSameOrgPermission from certego_saas.apps.organization.permissions import ( IsObjectOwnerPermission as IsObjectUserPermission, @@ -727,6 +728,7 @@ def perform_kill(report: AbstractReport): job = Job.objects.get(pk=report.job.pk) job.set_final_status() + JobConsumer.serialize_and_send_job(job) @staticmethod def perform_retry(report: AbstractReport): From a3031b09f143c207e5745d53376a4986e8e225aa Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 6 Mar 2024 12:38:22 +0100 Subject: [PATCH 33/58] refactor ws + permission to kill job --- api_app/migrations/0055_jobchannel.py | 38 ++++++++++++ api_app/models.py | 2 +- api_app/serializers.py | 34 +++++++++-- api_app/views.py | 8 +-- api_app/weboscket/__init__.py | 0 .../{websocket.py => weboscket/consumer.py} | 29 +++++---- api_app/weboscket/models.py | 21 +++++++ intel_owl/asgi.py | 2 +- intel_owl/tasks.py | 4 +- tests/api_app/test_serializers.py | 6 +- tests/api_app/websocket/__init__.py | 0 .../api_app/{ => websocket}/test_websocket.py | 61 +++++++++++++++++-- 12 files changed, 171 insertions(+), 34 deletions(-) create mode 100644 api_app/migrations/0055_jobchannel.py create mode 100644 api_app/weboscket/__init__.py rename api_app/{websocket.py => weboscket/consumer.py} (69%) create mode 100644 api_app/weboscket/models.py create mode 100644 tests/api_app/websocket/__init__.py rename tests/api_app/{ => websocket}/test_websocket.py (77%) diff --git a/api_app/migrations/0055_jobchannel.py b/api_app/migrations/0055_jobchannel.py new file mode 100644 index 0000000000..e3f44db2a8 --- /dev/null +++ b/api_app/migrations/0055_jobchannel.py @@ -0,0 +1,38 @@ +# Generated by Django 4.2.8 on 2024-03-05 17:45 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ("api_app", "0054_job_jobbisearch"), + ] + + operations = [ + migrations.CreateModel( + name="JobChannel", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("job_id", models.PositiveIntegerField()), + ("channel_name", models.CharField()), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ], + ), + ] diff --git a/api_app/models.py b/api_app/models.py index dd6104b641..41fafd3d8a 100644 --- a/api_app/models.py +++ b/api_app/models.py @@ -474,7 +474,7 @@ def kill_if_ongoing(self): from api_app.analyzers_manager.models import AnalyzerConfig from api_app.connectors_manager.models import ConnectorConfig from api_app.visualizers_manager.models import VisualizerConfig - from api_app.websocket import JobConsumer + from api_app.weboscket.consumer import JobConsumer from intel_owl.celery import app as celery_app for config in [AnalyzerConfig, ConnectorConfig, VisualizerConfig]: diff --git a/api_app/serializers.py b/api_app/serializers.py index 3477667f9d..178d17de87 100644 --- a/api_app/serializers.py +++ b/api_app/serializers.py @@ -434,19 +434,41 @@ def get_fields(self): ) return super().get_fields() + +class RestJobSerializer(JobSerializer): def get_permissions(self, obj: Job) -> Dict[str, bool]: request = self.context.get("request", None) view = self.context.get("view", None) + has_perm = False if request and view: has_perm = IsObjectOwnerOrSameOrgPermission().has_object_permission( request, view, obj ) - return { - "kill": has_perm, - "delete": has_perm, - "plugin_actions": has_perm, - } - return {} + return { + "kill": has_perm, + "delete": has_perm, + "plugin_actions": has_perm, + } + + +class WsJobSerializer(JobSerializer): + def get_permissions(self, obj: Job) -> Dict[str, bool]: + from .weboscket.consumer import JobChannel + + has_perm = False + channel: JobChannel = self.context.get("channel", None) + if channel: + # channel user has the perm in case is the job owner or is in the same org + # same logic of IsObjectOwnerOrSameOrgPermission defined in certego_saas + has_perm = ( + channel.user == obj.user + or obj.user.membership.organization.user_has_membership(channel.user) + ) + return { + "kill": has_perm, + "delete": has_perm, + "plugin_actions": has_perm, + } class MultipleFileAnalysisSerializer(rfs.ListSerializer): diff --git a/api_app/views.py b/api_app/views.py index 21a6ce701b..0bbeed981b 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -21,7 +21,7 @@ from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet -from api_app.websocket import JobConsumer +from api_app.weboscket.consumer import JobConsumer from certego_saas.apps.organization.permissions import IsObjectOwnerOrSameOrgPermission from certego_saas.apps.organization.permissions import ( IsObjectOwnerPermission as IsObjectUserPermission, @@ -54,10 +54,10 @@ JobListSerializer, JobRecentScanSerializer, JobResponseSerializer, - JobSerializer, ObservableAnalysisSerializer, PluginConfigSerializer, PythonConfigSerializer, + RestJobSerializer, TagSerializer, ) @@ -283,9 +283,9 @@ class JobViewSet(ReadAndDeleteOnlyViewSet, SerializerActionMixin): queryset = ( Job.objects.prefetch_related("tags").order_by("-received_request_time").all() ) - serializer_class = JobSerializer + serializer_class = RestJobSerializer serializer_action_classes = { - "retrieve": JobSerializer, + "retrieve": RestJobSerializer, "list": JobListSerializer, } filterset_class = JobFilter diff --git a/api_app/weboscket/__init__.py b/api_app/weboscket/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api_app/websocket.py b/api_app/weboscket/consumer.py similarity index 69% rename from api_app/websocket.py rename to api_app/weboscket/consumer.py index 0083780412..f305bf3194 100644 --- a/api_app/websocket.py +++ b/api_app/weboscket/consumer.py @@ -6,7 +6,8 @@ from api_app.choices import Status from api_app.models import Job -from api_app.serializers import JobSerializer +from api_app.serializers import WsJobSerializer +from api_app.weboscket.models import JobChannel logger = logging.getLogger(__name__) @@ -24,17 +25,15 @@ def connect(self) -> None: self.close(code=4040) else: self.accept() - async_to_sync(self.channel_layer.group_add)( - JobConsumer._generate_group_name(job_id), self.channel_name + JobChannel.objects.create( + job_id=job_id, user=user, channel_name=self.channel_name ) JobConsumer.serialize_and_send_job(job) def disconnect(self, close_code) -> None: user = self.scope["user"] job_id = self.scope["url_route"]["kwargs"]["job_id"] - async_to_sync(self.channel_layer.group_discard)( - JobConsumer._generate_group_name(job_id), self.channel_name - ) + JobChannel.objects.filter(channel_name=self.channel_name).delete() logger.info( f"user: {user} disconnected for the job: {job_id}. Close code: {close_code}" ) @@ -60,11 +59,17 @@ def _generate_group_name(self, job_id: int) -> str: @classmethod def serialize_and_send_job(cls, job: Job) -> None: - job_serializer = JobSerializer(job) - job_data = job_serializer.data # send data channel_layer = get_channel_layer() - async_to_sync(channel_layer.group_send)( - cls._generate_group_name(job.id), - {"type": "send.job", "job": job_data}, - ) + for channel in JobChannel.objects.filter(job_id=job.id): + logger.debug( + f"send data for the job: {job.id} " + f"to the user: {channel.user.username} " + f"over the channel: {channel.channel_name}" + ) + job_serializer = WsJobSerializer(job, context={"channel": channel}) + job_data = job_serializer.data + async_to_sync(channel_layer.send)( + channel.channel_name, + {"type": "send.job", "job": job_data}, + ) diff --git a/api_app/weboscket/models.py b/api_app/weboscket/models.py new file mode 100644 index 0000000000..8b08ed1fc6 --- /dev/null +++ b/api_app/weboscket/models.py @@ -0,0 +1,21 @@ +"""In this file are available the models used to store the data about a channel""" + +from django.conf import settings +from django.db import models + + +class JobChannel(models.Model): + """Data stored about a job scan. + + * job_id is used to send data to all the channels waiting for the job: + multiple users waiting for the same job + * user is used to get the permission to kill or delete the job + """ + + job_id = models.PositiveIntegerField(null=False) + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + null=False, + ) + channel_name = models.CharField() diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py index 0690db5586..aef4b250ab 100644 --- a/intel_owl/asgi.py +++ b/intel_owl/asgi.py @@ -13,7 +13,7 @@ get_asgi_application() # pylint: disable=wrong-import-position -from api_app.websocket import JobConsumer # noqa: E402 +from api_app.weboscket.consumer import JobConsumer # noqa: E402 from intel_owl.middleware import WSAuthMiddleware # noqa: E402 application = ProtocolTypeRouter( diff --git a/intel_owl/tasks.py b/intel_owl/tasks.py index ed6a87a2c0..16d1e54b28 100644 --- a/intel_owl/tasks.py +++ b/intel_owl/tasks.py @@ -209,7 +209,7 @@ def update_notifications_with_releases(): @app.task(name="job_set_final_status", soft_time_limit=30) def job_set_final_status(job_id: int): from api_app.models import Job - from api_app.websocket import JobConsumer + from api_app.weboscket.consumer import JobConsumer job = Job.objects.get(pk=job_id) # execute some callbacks @@ -260,7 +260,7 @@ def run_plugin( ): from api_app.classes import Plugin from api_app.models import Job, PythonModule - from api_app.websocket import JobConsumer + from api_app.weboscket.consumer import JobConsumer logger.info( f"Configuring plugin {plugin_config_pk} for job {job_id} with task {task_id}" diff --git a/tests/api_app/test_serializers.py b/tests/api_app/test_serializers.py index 8bd787562d..bf51cd9ac0 100644 --- a/tests/api_app/test_serializers.py +++ b/tests/api_app/test_serializers.py @@ -17,10 +17,10 @@ FileAnalysisSerializer, JobRecentScanSerializer, JobResponseSerializer, - JobSerializer, ObservableAnalysisSerializer, PluginConfigSerializer, PythonListConfigSerializer, + RestJobSerializer, _AbstractJobCreateSerializer, ) from api_app.visualizers_manager.models import VisualizerConfig @@ -179,14 +179,14 @@ def test_validate(self): org.delete() -class JobSerializerTestCase(CustomTestCase): +class RestJobSerializerTestCase(CustomTestCase): def test_validate(self): job = Job.objects.create( observable_name="test.com", observable_classification="domain", user=self.user, ) - js = JobSerializer(job) + js = RestJobSerializer(job) self.assertIn("analyzer_reports", js.data) self.assertIn("connector_reports", js.data) self.assertIn("visualizer_reports", js.data) diff --git a/tests/api_app/websocket/__init__.py b/tests/api_app/websocket/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/api_app/test_websocket.py b/tests/api_app/websocket/test_websocket.py similarity index 77% rename from tests/api_app/test_websocket.py rename to tests/api_app/websocket/test_websocket.py index 62113b0534..72288b8482 100644 --- a/tests/api_app/test_websocket.py +++ b/tests/api_app/websocket/test_websocket.py @@ -13,6 +13,7 @@ from api_app.analyzers_manager.models import AnalyzerConfig from api_app.choices import ParamTypes from api_app.models import Job, Parameter, PluginConfig, PythonModule +from api_app.weboscket.models import JobChannel from intel_owl.asgi import application from intel_owl.tasks import job_set_final_status, run_plugin @@ -64,13 +65,16 @@ def setUp(self) -> None: ) async def test_job_unauthorized(self): + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) async with self.connect_communicator(1027) as (_, connected, subprotocol): self.assertFalse(connected) self.assertEqual(subprotocol, 1008) + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async def test_job_not_exist(self): self.assertEqual(await sync_to_async(Job.objects.filter(id=1028).count)(), 0) + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async with self.connect_communicator(1028, self.user) as ( _, connected, @@ -78,8 +82,10 @@ async def test_job_not_exist(self): ): self.assertFalse(connected) self.assertEqual(subprotocol, 4040) + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async def test_job_terminated(self): + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) async with self.connect_communicator(1027, self.user) as ( communicator, @@ -93,6 +99,7 @@ async def test_job_terminated(self): self.assertEqual( job_report["status"], Job.Status.REPORTED_WITHOUT_FAILS.value ) + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async def test_job_running(self): # Note: Sometimes reading from ws (receive_json_from) is too fast: @@ -100,6 +107,8 @@ async def test_job_running(self): # The test will be blocked waiting a response from ws that already happened. # we need a sleep to wait. # in this test happens for the functions: run_plugin set_final_status. + + # setup db job = await sync_to_async(Job.objects.create)( id=1029, user=self.user, @@ -155,19 +164,23 @@ async def test_job_running(self): parameter_id=query_type_param.id, ) await sync_to_async(plugin_config.save)() + + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async with self.connect_communicator(1029, self.user) as ( communicator, connected, _, ): self.assertTrue(connected) + time.sleep(1) + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 1) job_report_running = await communicator.receive_json_from() self.assertEqual(job_report_running["id"], 1029) self.assertEqual(job_report_running["observable_name"], "test.com") self.assertEqual(job_report_running["status"], Job.Status.PENDING.value) self.assertEqual(job_report_running["analyzer_reports"], []) self.assertIsNone(job_report_running["finished_analysis_time"]) - time.sleep(5) + time.sleep(1) # run plugin await sync_to_async(run_plugin)( job_id=1029, @@ -176,9 +189,9 @@ async def test_job_running(self): runtime_configuration={}, task_id=10, ) - time.sleep(5) + time.sleep(1) job_analyzer_terminated = await communicator.receive_json_from() - time.sleep(5) + time.sleep(1) self.assertEqual(job_analyzer_terminated["id"], 1029) self.assertEqual(job_analyzer_terminated["observable_name"], "test.com") self.assertEqual( @@ -190,9 +203,9 @@ async def test_job_running(self): job.status = Job.Status.REPORTED_WITHOUT_FAILS await sync_to_async(job.save)() await sync_to_async(job_set_final_status)(1029) - time.sleep(5) + time.sleep(1) job_report_terminated = await communicator.receive_json_from() - time.sleep(5) + time.sleep(1) self.assertEqual(job_report_terminated["id"], 1029) self.assertEqual(job_report_terminated["observable_name"], "test.com") self.assertEqual( @@ -200,3 +213,41 @@ async def test_job_running(self): ) self.assertIsNotNone(job_report_terminated["analyzer_reports"]) self.assertIsNotNone(job_report_terminated["finished_analysis_time"]) + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) + + async def test_job_killed(self): + await sync_to_async(Job.objects.create)( + id=1030, + user=self.user, + status=Job.Status.RUNNING.value, + observable_name="test.com", + observable_classification=ObservableTypes.DOMAIN, + received_request_time=datetime.datetime.now(), + ) + + await sync_to_async(self.client.force_login)(self.user) + + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) + time.sleep(1) + async with self.connect_communicator(1030, self.user) as ( + communicator, + connected, + _, + ): + self.assertTrue(connected) + time.sleep(1) + job_running = await communicator.receive_json_from() + self.assertEqual(job_running["id"], 1030) + self.assertEqual(job_running["observable_name"], "test.com") + self.assertEqual(job_running["status"], Job.Status.RUNNING.value) + + time.sleep(1) + await sync_to_async(self.client.patch)("/api/jobs/1030/kill") + + time.sleep(1) + job_killed = await communicator.receive_json_from() + self.assertEqual(job_killed["id"], 1030) + self.assertEqual(job_killed["observable_name"], "test.com") + self.assertEqual(job_killed["status"], Job.Status.KILLED.value) + + self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) From e12ad88fe3c227c8e7b6d8742634b655ef351a3b Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 6 Mar 2024 16:53:13 +0100 Subject: [PATCH 34/58] linter --- ...{0055_jobchannel.py => 0062_jobchannel.py} | 2 +- api_app/weboscket/consumer.py | 2 +- .../migrations/0002_migrate_from_durin.py | 2 +- frontend/src/stores/useAuthStore.jsx | 4 +- .../user/token/TokenAccess.test.jsx | 244 +++++++++--------- .../components/user/token/TokenPage.test.jsx | 30 +-- 6 files changed, 149 insertions(+), 135 deletions(-) rename api_app/migrations/{0055_jobchannel.py => 0062_jobchannel.py} (95%) diff --git a/api_app/migrations/0055_jobchannel.py b/api_app/migrations/0062_jobchannel.py similarity index 95% rename from api_app/migrations/0055_jobchannel.py rename to api_app/migrations/0062_jobchannel.py index e3f44db2a8..6e02be324c 100644 --- a/api_app/migrations/0055_jobchannel.py +++ b/api_app/migrations/0062_jobchannel.py @@ -8,7 +8,7 @@ class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("api_app", "0054_job_jobbisearch"), + ("api_app", "0061_job_depth_analysis"), ] operations = [ diff --git a/api_app/weboscket/consumer.py b/api_app/weboscket/consumer.py index f305bf3194..d493aa8936 100644 --- a/api_app/weboscket/consumer.py +++ b/api_app/weboscket/consumer.py @@ -6,7 +6,7 @@ from api_app.choices import Status from api_app.models import Job -from api_app.serializers import WsJobSerializer +from api_app.serializers.job import WsJobSerializer from api_app.weboscket.models import JobChannel logger = logging.getLogger(__name__) diff --git a/authentication/migrations/0002_migrate_from_durin.py b/authentication/migrations/0002_migrate_from_durin.py index b3d75d91fb..eacc65fcce 100644 --- a/authentication/migrations/0002_migrate_from_durin.py +++ b/authentication/migrations/0002_migrate_from_durin.py @@ -29,7 +29,7 @@ class Migration(migrations.Migration): dependencies = [ ("authentication", "0001_initial"), ("authtoken", "0003_tokenproxy"), - ("api_app", "0054_job_jobbisearch"), + ("api_app", "0061_job_depth_analysis"), ] operations = [ diff --git a/frontend/src/stores/useAuthStore.jsx b/frontend/src/stores/useAuthStore.jsx index 5bdb2aac69..42b51a2cce 100644 --- a/frontend/src/stores/useAuthStore.jsx +++ b/frontend/src/stores/useAuthStore.jsx @@ -23,7 +23,7 @@ export const useAuthStore = create((set, get) => ({ }, access: null, isAuthenticated: () => !!get().CSRFToken, - updateToken: () => set({ CSRFToken: Cookies.get(CSRF_TOKEN)}), + updateToken: () => set({ CSRFToken: Cookies.get(CSRF_TOKEN) }), deleteToken: () => set({ CSRFToken: "" }), service: { fetchUserAccess: async () => { @@ -64,7 +64,7 @@ export const useAuthStore = create((set, get) => ({ const onLogoutCb = () => { get().deleteToken(); // rmeove from the browser or it will persist next time we open a tab - Cookies.remove(CSRF_TOKEN) + Cookies.remove(CSRF_TOKEN); set({ loading: false }); addToast("Logged out!", null, "info"); }; diff --git a/frontend/tests/components/user/token/TokenAccess.test.jsx b/frontend/tests/components/user/token/TokenAccess.test.jsx index e8ecb00006..51fc4fda01 100644 --- a/frontend/tests/components/user/token/TokenAccess.test.jsx +++ b/frontend/tests/components/user/token/TokenAccess.test.jsx @@ -2,147 +2,161 @@ import React from "react"; import "@testing-library/jest-dom"; import { render, screen, waitFor } from "@testing-library/react"; -import useAxios from 'axios-hooks' +import useAxios from "axios-hooks"; import axios from "axios"; import userEvent from "@testing-library/user-event"; import TokenAccess from "../../../../src/components/user/token/TokenAccess"; import { APIACCESS_BASE_URI } from "../../../../src/constants/apiURLs"; -jest.mock('axios') -jest.mock('axios-hooks') +jest.mock("axios"); +jest.mock("axios-hooks"); describe("test TokenAccess", () => { - beforeEach(() => { - jest.clearAllMocks(); - - axios.post.mockImplementation(() => Promise.resolve({ - data: { - key: "987654321", - created: "2024-02-22T18:48:18.257944", - } - })); - axios.delete.mockImplementation(() => Promise.resolve({ - data: {} - })); - }); + beforeEach(() => { + jest.clearAllMocks(); - test("render with token", async () => { - useAxios.mockImplementation(() => [{ - data: { - key: "123456789", - created: "2024-02-22T15:48:18.257944", - }, - loading: false, - error: "", - }]); - - const user = userEvent.setup(); - - const result = render( - - ); - expect(screen.getByText("Created")).toBeInTheDocument(); - expect(screen.getByText("03:48 PM Feb 22nd, 2024")).toBeInTheDocument(); - - // test user interaction - const showButton = result.container.querySelector('#toggle-show-apikey-btn'); - expect(showButton).toBeInTheDocument(); - await user.click(showButton); - expect(screen.getByText("123456789")).toBeInTheDocument(); - }) - - test("render without token", () => { - useAxios.mockImplementation(() => [{ - data: undefined, - loading: false, - error: {response: {status: 404}, "errors":{"detail":"Not found."}}, - }]); - - render( - - ); - - expect(screen.getByText("No active API key")).toBeInTheDocument(); - }) - - test("delete token", async () => { - useAxios.mockImplementation(() => [{ - data: { - key: "987654321", - created: "2024-02-22T18:48:18.257944", - }, - loading: false, - error: "", - }]).mockImplementationOnce(() => [{ - data: { - key: "123456789", - created: "2024-02-22T15:48:18.257944", - }, - loading: false, - error: "", - }]); - - const user = userEvent.setup(); - - const result = render( - - ); - expect(screen.getByText("Created")).toBeInTheDocument(); - expect(screen.getByText("03:48 PM Feb 22nd, 2024")).toBeInTheDocument(); - - const deleteButton = result.container.querySelector('#delete-apikey-btn'); - expect(deleteButton).toBeInTheDocument(); - await user.click(deleteButton); - const deletionConfirmButton = screen.getByRole("button", { name: /Yes/i }); - expect(deletionConfirmButton).toBeInTheDocument(); - await user.click(deletionConfirmButton); - await waitFor(() => { - expect(axios.delete).toHaveBeenCalledWith( - `${APIACCESS_BASE_URI}`, - ); - }); - await waitFor(() => { - expect(useAxios).toHaveBeenCalledWith( - {"url": `${APIACCESS_BASE_URI}`}, {"useCache": false} - ); - }) - result.rerender() - expect(screen.getByText("06:48 PM Feb 22nd, 2024")).toBeInTheDocument(); - }) + axios.post.mockImplementation(() => + Promise.resolve({ + data: { + key: "987654321", + created: "2024-02-22T18:48:18.257944", + }, + }), + ); + axios.delete.mockImplementation(() => + Promise.resolve({ + data: {}, + }), + ); + }); - test("create token", async () => { - useAxios.mockImplementation(() => [{ + test("render with token", async () => { + useAxios.mockImplementation(() => [ + { data: { - key: "987654321", - created: "2024-02-22T18:48:18.257944", + key: "123456789", + created: "2024-02-22T15:48:18.257944", }, loading: false, error: "", - }]).mockImplementationOnce(() => [{ + }, + ]); + + const user = userEvent.setup(); + + const result = render(); + expect(screen.getByText("Created")).toBeInTheDocument(); + expect(screen.getByText("03:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + + // test user interaction + const showButton = result.container.querySelector( + "#toggle-show-apikey-btn", + ); + expect(showButton).toBeInTheDocument(); + await user.click(showButton); + expect(screen.getByText("123456789")).toBeInTheDocument(); + }); + + test("render without token", () => { + useAxios.mockImplementation(() => [ + { data: undefined, loading: false, - error: {response: {status: 404}, "errors":{"detail":"Not found."}}, - }]); + error: { response: { status: 404 }, errors: { detail: "Not found." } }, + }, + ]); + + render(); + + expect(screen.getByText("No active API key")).toBeInTheDocument(); + }); + + test("delete token", async () => { + useAxios + .mockImplementation(() => [ + { + data: { + key: "987654321", + created: "2024-02-22T18:48:18.257944", + }, + loading: false, + error: "", + }, + ]) + .mockImplementationOnce(() => [ + { + data: { + key: "123456789", + created: "2024-02-22T15:48:18.257944", + }, + loading: false, + error: "", + }, + ]); const user = userEvent.setup(); - const result = render( - - ); + const result = render(); + expect(screen.getByText("Created")).toBeInTheDocument(); + expect(screen.getByText("03:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + + const deleteButton = result.container.querySelector("#delete-apikey-btn"); + expect(deleteButton).toBeInTheDocument(); + await user.click(deleteButton); + const deletionConfirmButton = screen.getByRole("button", { name: /Yes/i }); + expect(deletionConfirmButton).toBeInTheDocument(); + await user.click(deletionConfirmButton); + await waitFor(() => { + expect(axios.delete).toHaveBeenCalledWith(`${APIACCESS_BASE_URI}`); + }); + await waitFor(() => { + expect(useAxios).toHaveBeenCalledWith( + { url: `${APIACCESS_BASE_URI}` }, + { useCache: false }, + ); + }); + result.rerender(); + expect(screen.getByText("06:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + }); + + test("create token", async () => { + useAxios + .mockImplementation(() => [ + { + data: { + key: "987654321", + created: "2024-02-22T18:48:18.257944", + }, + loading: false, + error: "", + }, + ]) + .mockImplementationOnce(() => [ + { + data: undefined, + loading: false, + error: { + response: { status: 404 }, + errors: { detail: "Not found." }, + }, + }, + ]); + + const user = userEvent.setup(); + + const result = render(); expect(screen.getByText("No active API key")).toBeInTheDocument(); - const createButton = result.container.querySelector('#create-apikey-btn'); + const createButton = result.container.querySelector("#create-apikey-btn"); expect(createButton).toBeInTheDocument(); await user.click(createButton); await waitFor(() => { - expect(axios.post).toHaveBeenCalledWith( - `${APIACCESS_BASE_URI}`, - ); + expect(axios.post).toHaveBeenCalledWith(`${APIACCESS_BASE_URI}`); }); - result.rerender() + result.rerender(); expect(screen.getByText("Created")).toBeInTheDocument(); expect(screen.getByText("06:48 PM Feb 22nd, 2024")).toBeInTheDocument(); - }) + }); }); diff --git a/frontend/tests/components/user/token/TokenPage.test.jsx b/frontend/tests/components/user/token/TokenPage.test.jsx index 3e5145ca34..9894297c76 100644 --- a/frontend/tests/components/user/token/TokenPage.test.jsx +++ b/frontend/tests/components/user/token/TokenPage.test.jsx @@ -2,26 +2,26 @@ import React from "react"; import "@testing-library/jest-dom"; import { render, screen } from "@testing-library/react"; -import useAxios from 'axios-hooks' +import useAxios from "axios-hooks"; import TokenPage from "../../../../src/components/user/token/TokenPage"; -jest.mock('axios-hooks') +jest.mock("axios-hooks"); -useAxios.mockReturnValue([{ - data: { - key: "123456789", - created: "2024-02-22T15:48:18.257944Z", - }, -}]); +useAxios.mockReturnValue([ + { + data: { + key: "123456789", + created: "2024-02-22T15:48:18.257944Z", + }, + }, +]); describe("test TokenPage", () => { test("render", () => { - render( - - ); - expect(screen.getByText("API Access")).toBeInTheDocument(); - expect(screen.getByText("Created")).toBeInTheDocument(); - expect(screen.getByText("04:48 PM Feb 22nd, 2024")).toBeInTheDocument(); - }) + render(); + expect(screen.getByText("API Access")).toBeInTheDocument(); + expect(screen.getByText("Created")).toBeInTheDocument(); + expect(screen.getByText("04:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + }); }); From ba43dd9bc2a344c82c1a69b45767d4780bfc7195 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 6 Mar 2024 17:34:44 +0100 Subject: [PATCH 35/58] suggestion --- api_app/weboscket/consumer.py | 2 +- api_app/weboscket/models.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/api_app/weboscket/consumer.py b/api_app/weboscket/consumer.py index d493aa8936..fc211db15a 100644 --- a/api_app/weboscket/consumer.py +++ b/api_app/weboscket/consumer.py @@ -54,7 +54,7 @@ def send_job(self, event) -> None: self.close() @classmethod - def _generate_group_name(self, job_id: int) -> str: + def _generate_group_name(cls, job_id: int) -> str: return f"job-{job_id}" @classmethod diff --git a/api_app/weboscket/models.py b/api_app/weboscket/models.py index 8b08ed1fc6..0a73739eb2 100644 --- a/api_app/weboscket/models.py +++ b/api_app/weboscket/models.py @@ -12,10 +12,9 @@ class JobChannel(models.Model): * user is used to get the permission to kill or delete the job """ - job_id = models.PositiveIntegerField(null=False) + job_id = models.PositiveIntegerField() user = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, - null=False, ) channel_name = models.CharField() From 2339b53185c9f2303ceb3ae000a854a826687401 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 6 Mar 2024 17:42:06 +0100 Subject: [PATCH 36/58] prettier --- frontend/src/components/Routes.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/Routes.jsx b/frontend/src/components/Routes.jsx index af37346930..b80abeb502 100644 --- a/frontend/src/components/Routes.jsx +++ b/frontend/src/components/Routes.jsx @@ -16,8 +16,8 @@ const Organization = React.lazy(() => import("./organization/Organization")); const TokenPage = React.lazy(() => import("./user/token/TokenPage")); const JobsTable = React.lazy(() => import("./jobs/table/JobsTable")); const JobResult = React.lazy(() => import("./jobs/result/JobResult")); -const CommentResult = React.lazy(() => - import("./jobs/result/bar/comment/CommentResult"), +const CommentResult = React.lazy( + () => import("./jobs/result/bar/comment/CommentResult"), ); const PluginsContainer = React.lazy(() => import("./plugins/PluginsContainer")); const Dashboard = React.lazy(() => import("./dashboard/Dashboard")); From 16d348fc595fb7626741df57a50abee051c11ea5 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 6 Mar 2024 17:44:21 +0100 Subject: [PATCH 37/58] black formatter --- tests/api_app/analyses_manager/test_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/api_app/analyses_manager/test_models.py b/tests/api_app/analyses_manager/test_models.py index dd09c71696..8ebe0a84fc 100644 --- a/tests/api_app/analyses_manager/test_models.py +++ b/tests/api_app/analyses_manager/test_models.py @@ -17,7 +17,7 @@ def test_set_correct_status_running(self): observable_name="test.com", observable_classification="domain", user=self.user, - status=Job.Status.REPORTED_WITH_FAILS + status=Job.Status.REPORTED_WITH_FAILS, ) an: Analysis = Analysis.objects.create(name="Test", owner=self.user) an.jobs.add(job) @@ -28,7 +28,7 @@ def test_set_correct_status_running(self): observable_name="test.com", observable_classification="domain", user=self.user, - status=Job.Status.PENDING + status=Job.Status.PENDING, ) an.refresh_from_db() an.set_correct_status() From c02ef97e8746378c9560ccdf395f682f857dbd2d Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 6 Mar 2024 17:46:24 +0100 Subject: [PATCH 38/58] isort --- intel_owl/settings/websocket.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/intel_owl/settings/websocket.py b/intel_owl/settings/websocket.py index f5b74fd84f..4e268d7beb 100644 --- a/intel_owl/settings/websocket.py +++ b/intel_owl/settings/websocket.py @@ -1,6 +1,7 @@ -from intel_owl import secrets import socket +from intel_owl import secrets + websockets_url = secrets.get_secret("WEBSOCKETS_URL", None) if not websockets_url: if socket.gethostname() in ["uwsgi", "daphne"]: From 1c6fceefc8a5d9c1e90a905ecb5ec3dab40ca92b Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Wed, 6 Mar 2024 17:52:01 +0100 Subject: [PATCH 39/58] fix test --- frontend/tests/components/user/token/TokenPage.test.jsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/tests/components/user/token/TokenPage.test.jsx b/frontend/tests/components/user/token/TokenPage.test.jsx index 9894297c76..96584e2c22 100644 --- a/frontend/tests/components/user/token/TokenPage.test.jsx +++ b/frontend/tests/components/user/token/TokenPage.test.jsx @@ -12,7 +12,7 @@ useAxios.mockReturnValue([ { data: { key: "123456789", - created: "2024-02-22T15:48:18.257944Z", + created: "2024-02-22T15:48:18.257944", }, }, ]); @@ -22,6 +22,6 @@ describe("test TokenPage", () => { render(); expect(screen.getByText("API Access")).toBeInTheDocument(); expect(screen.getByText("Created")).toBeInTheDocument(); - expect(screen.getByText("04:48 PM Feb 22nd, 2024")).toBeInTheDocument(); + expect(screen.getByText("03:48 PM Feb 22nd, 2024")).toBeInTheDocument(); }); }); From 5f7c74c17ada6b48b23c18488eaa43959140df99 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 7 Mar 2024 11:13:04 +0100 Subject: [PATCH 40/58] fixes --- api_app/serializers/job.py | 2 +- api_app/signals.py | 33 +++++++++---------- .../migrations/0002_migrate_from_durin.py | 9 +++-- authentication/views.py | 4 +-- tests/api_app/websocket/test_websocket.py | 3 -- 5 files changed, 23 insertions(+), 28 deletions(-) diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index ce4c50d9ab..7680c1889c 100644 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -404,7 +404,7 @@ class Meta: read_only=True, slug_field="name", many=True ) - def get_pivots_to_execute(self, obj: Job): + def get_pivots_to_execute(self, obj: Job): # skipcq: PYL-R0201 return obj.pivots_to_execute.all().values_list("name", flat=True) diff --git a/api_app/signals.py b/api_app/signals.py index 9404fb754f..5ee37f6e75 100644 --- a/api_app/signals.py +++ b/api_app/signals.py @@ -55,28 +55,27 @@ def post_delete_job(sender, instance: Job, **kwargs): instance.analysis.delete() -@receiver(models.signals.post_migrate) +@receiver(models.signals.post_migrate, sender=BeatConfig) def post_migrate_beat( sender, app_config, verbosity, interactive, stdout, using, plan, apps, **kwargs ): from django_celery_beat.models import PeriodicTask - if isinstance(sender, BeatConfig): - from intel_owl.tasks import update - - for module in PythonModule.objects.filter(health_check_schedule__isnull=False): - for config in module.configs.filter(health_check_task__isnull=True): - config.generate_health_check_periodic_task() - for module in PythonModule.objects.filter( - update_schedule__isnull=False, update_task__isnull=True - ): - module.generate_update_periodic_task() - - for task in PeriodicTask.objects.filter( - enabled=True, task=f"{update.__module__}.{update.__name__}" - ): - task.enabled &= settings.REPO_DOWNLOADER_ENABLED - task.save() + from intel_owl.tasks import update + + for module in PythonModule.objects.filter(health_check_schedule__isnull=False): + for config in module.configs.filter(health_check_task__isnull=True): + config.generate_health_check_periodic_task() + for module in PythonModule.objects.filter( + update_schedule__isnull=False, update_task__isnull=True + ): + module.generate_update_periodic_task() + + for task in PeriodicTask.objects.filter( + enabled=True, task=f"{update.__module__}.{update.__name__}" + ): + task.enabled &= settings.REPO_DOWNLOADER_ENABLED + task.save() @receiver(models.signals.post_save, sender=PluginConfig) diff --git a/authentication/migrations/0002_migrate_from_durin.py b/authentication/migrations/0002_migrate_from_durin.py index eacc65fcce..7112dbffa3 100644 --- a/authentication/migrations/0002_migrate_from_durin.py +++ b/authentication/migrations/0002_migrate_from_durin.py @@ -14,11 +14,10 @@ def move_token_from_durin(apps, schema_editor): for durin_token in AuthToken.objects.all(): # export only CLI token (client name PyIntelOwl) # only in case user didn't have a rest framework token - if durin_token.client.name == "PyIntelOwl": - if not Token.objects.filter(user_id=durin_token.user.id).exists(): - Token.objects.create( - key=durin_token.token, user_id=durin_token.user.pk - ) + if durin_token.client.name == "PyIntelOwl" and not Token.objects.filter(user_id=durin_token.user.id).exists(): + Token.objects.create( + key=durin_token.token, user_id=durin_token.user.pk + ) # delete durin db data AuthToken.objects.all().delete() diff --git a/authentication/views.py b/authentication/views.py index 7674e359e1..2963577ef9 100644 --- a/authentication/views.py +++ b/authentication/views.py @@ -133,7 +133,7 @@ def post(request: Request) -> Response: class LogoutView(APIView): permission_classes = [IsAuthenticated] - def post(self, request, *args, **kwargs): + def post(self, request, *args, **kwargs): # skipcq: PYL-R0201 user = request.user logger.info(f"perform_logout received request from '{user.username}''.") logout(request) @@ -261,7 +261,7 @@ def get(self, request, *args, **kwargs): serializer = TokenSerializer(instance) return Response(serializer.data) - def post(self, request): + def post(self, request): # skipcq: PYL-R0201 username = request.user.username logger.info(f"user {username} send a request to create the API token") serializer = TokenSerializer(data={}, context={"request": request}) diff --git a/tests/api_app/websocket/test_websocket.py b/tests/api_app/websocket/test_websocket.py index 72288b8482..40724e3aae 100644 --- a/tests/api_app/websocket/test_websocket.py +++ b/tests/api_app/websocket/test_websocket.py @@ -61,7 +61,6 @@ def setUp(self) -> None: status=Job.Status.REPORTED_WITHOUT_FAILS.value, observable_name="8.8.8.8", observable_classification=ObservableTypes.IP, - received_request_time=datetime.datetime.now(), ) async def test_job_unauthorized(self): @@ -115,7 +114,6 @@ async def test_job_running(self): status=Job.Status.PENDING.value, observable_name="test.com", observable_classification=ObservableTypes.DOMAIN, - received_request_time=datetime.datetime.now(), ) class_dns_python_module, _ = await sync_to_async( PythonModule.objects.get_or_create @@ -222,7 +220,6 @@ async def test_job_killed(self): status=Job.Status.RUNNING.value, observable_name="test.com", observable_classification=ObservableTypes.DOMAIN, - received_request_time=datetime.datetime.now(), ) await sync_to_async(self.client.force_login)(self.user) From ebe088418afd022f1f29078272bc3ee736b81d23 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 7 Mar 2024 11:15:21 +0100 Subject: [PATCH 41/58] fix --- tests/api_app/websocket/test_websocket.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/api_app/websocket/test_websocket.py b/tests/api_app/websocket/test_websocket.py index 40724e3aae..4cdcb8974b 100644 --- a/tests/api_app/websocket/test_websocket.py +++ b/tests/api_app/websocket/test_websocket.py @@ -1,5 +1,4 @@ import abc -import datetime import time from contextlib import asynccontextmanager From 1b39ab30ae87c56d20af9e8605ac85b28b7d7555 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 7 Mar 2024 11:32:36 +0100 Subject: [PATCH 42/58] fix --- api_app/serializers/job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index 7680c1889c..01627d910a 100644 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -460,7 +460,7 @@ class Meta: playbook_to_execute = rfs.SlugRelatedField(read_only=True, slug_field="name") permissions = rfs.SerializerMethodField() - def get_pivots_to_execute(self, obj: Job): + def get_pivots_to_execute(self, obj: Job): # skipcq: PYL-R0201 # this cast is required or serializer doesn't work with websocket return list(obj.pivots_to_execute.all().values_list("name", flat=True)) From 036d9ef468313f8ae695be24ed5161c7c94af7ec Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Thu, 7 Mar 2024 15:43:30 +0100 Subject: [PATCH 43/58] renamed package es --- api_app/models.py | 2 +- api_app/serializers/job.py | 2 +- api_app/views.py | 2 +- api_app/{weboscket => websocket}/__init__.py | 0 api_app/{weboscket => websocket}/consumer.py | 2 +- api_app/{weboscket => websocket}/models.py | 0 intel_owl/asgi.py | 2 +- intel_owl/tasks.py | 4 ++-- tests/api_app/websocket/test_websocket.py | 2 +- 9 files changed, 8 insertions(+), 8 deletions(-) rename api_app/{weboscket => websocket}/__init__.py (100%) rename api_app/{weboscket => websocket}/consumer.py (98%) rename api_app/{weboscket => websocket}/models.py (100%) diff --git a/api_app/models.py b/api_app/models.py index 977175896f..6eab0e3962 100644 --- a/api_app/models.py +++ b/api_app/models.py @@ -492,7 +492,7 @@ def kill_if_ongoing(self): from api_app.analyzers_manager.models import AnalyzerConfig from api_app.connectors_manager.models import ConnectorConfig from api_app.visualizers_manager.models import VisualizerConfig - from api_app.weboscket.consumer import JobConsumer + from api_app.websocket.consumer import JobConsumer from intel_owl.celery import app as celery_app for config in [AnalyzerConfig, ConnectorConfig, VisualizerConfig]: diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index 01627d910a..23624fff70 100644 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -501,7 +501,7 @@ def get_permissions(self, obj: Job) -> Dict[str, bool]: class WsJobSerializer(JobSerializer): def get_permissions(self, obj: Job) -> Dict[str, bool]: - from api_app.weboscket.models import JobChannel + from api_app.websocket.models import JobChannel has_perm = False channel: JobChannel = self.context.get("channel", None) diff --git a/api_app/views.py b/api_app/views.py index db8d91ceee..766848c3d3 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -20,7 +20,7 @@ from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet -from api_app.weboscket.consumer import JobConsumer +from api_app.websocket.consumer import JobConsumer from certego_saas.apps.organization.permissions import ( IsObjectOwnerOrSameOrgPermission as IsObjectUserOrSameOrgPermission, ) diff --git a/api_app/weboscket/__init__.py b/api_app/websocket/__init__.py similarity index 100% rename from api_app/weboscket/__init__.py rename to api_app/websocket/__init__.py diff --git a/api_app/weboscket/consumer.py b/api_app/websocket/consumer.py similarity index 98% rename from api_app/weboscket/consumer.py rename to api_app/websocket/consumer.py index fc211db15a..dd9d05c5f6 100644 --- a/api_app/weboscket/consumer.py +++ b/api_app/websocket/consumer.py @@ -7,7 +7,7 @@ from api_app.choices import Status from api_app.models import Job from api_app.serializers.job import WsJobSerializer -from api_app.weboscket.models import JobChannel +from api_app.websocket.models import JobChannel logger = logging.getLogger(__name__) diff --git a/api_app/weboscket/models.py b/api_app/websocket/models.py similarity index 100% rename from api_app/weboscket/models.py rename to api_app/websocket/models.py diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py index aef4b250ab..e8a57b7b88 100644 --- a/intel_owl/asgi.py +++ b/intel_owl/asgi.py @@ -13,7 +13,7 @@ get_asgi_application() # pylint: disable=wrong-import-position -from api_app.weboscket.consumer import JobConsumer # noqa: E402 +from api_app.websocket.consumer import JobConsumer # noqa: E402 from intel_owl.middleware import WSAuthMiddleware # noqa: E402 application = ProtocolTypeRouter( diff --git a/intel_owl/tasks.py b/intel_owl/tasks.py index ef1dbec899..ea090916dc 100644 --- a/intel_owl/tasks.py +++ b/intel_owl/tasks.py @@ -204,7 +204,7 @@ def update_notifications_with_releases(): @app.task(name="job_set_final_status", soft_time_limit=30) def job_set_final_status(job_id: int): from api_app.models import Job - from api_app.weboscket.consumer import JobConsumer + from api_app.websocket.consumer import JobConsumer job = Job.objects.get(pk=job_id) # execute some callbacks @@ -255,7 +255,7 @@ def run_plugin( ): from api_app.classes import Plugin from api_app.models import Job, PythonModule - from api_app.weboscket.consumer import JobConsumer + from api_app.websocket.consumer import JobConsumer logger.info( f"Configuring plugin {plugin_config_pk} for job {job_id} with task {task_id}" diff --git a/tests/api_app/websocket/test_websocket.py b/tests/api_app/websocket/test_websocket.py index 4cdcb8974b..3efffea9f8 100644 --- a/tests/api_app/websocket/test_websocket.py +++ b/tests/api_app/websocket/test_websocket.py @@ -12,7 +12,7 @@ from api_app.analyzers_manager.models import AnalyzerConfig from api_app.choices import ParamTypes from api_app.models import Job, Parameter, PluginConfig, PythonModule -from api_app.weboscket.models import JobChannel +from api_app.websocket.models import JobChannel from intel_owl.asgi import application from intel_owl.tasks import job_set_final_status, run_plugin From ad6d9fbc95c9fd4e0b3c52d7d60712055dbb9509 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Fri, 8 Mar 2024 09:53:13 +0100 Subject: [PATCH 44/58] removed custom collection for job channels --- api_app/migrations/0062_jobchannel.py | 38 ------- api_app/models.py | 2 +- api_app/serializers/job.py | 12 +-- api_app/views.py | 2 +- api_app/websocket.py | 122 ++++++++++++++++++++++ api_app/websocket/__init__.py | 0 api_app/websocket/consumer.py | 75 ------------- api_app/websocket/models.py | 20 ---- intel_owl/asgi.py | 2 +- intel_owl/tasks.py | 4 +- tests/api_app/websocket/test_websocket.py | 13 --- 11 files changed, 128 insertions(+), 162 deletions(-) delete mode 100644 api_app/migrations/0062_jobchannel.py create mode 100644 api_app/websocket.py delete mode 100644 api_app/websocket/__init__.py delete mode 100644 api_app/websocket/consumer.py delete mode 100644 api_app/websocket/models.py diff --git a/api_app/migrations/0062_jobchannel.py b/api_app/migrations/0062_jobchannel.py deleted file mode 100644 index 6e02be324c..0000000000 --- a/api_app/migrations/0062_jobchannel.py +++ /dev/null @@ -1,38 +0,0 @@ -# Generated by Django 4.2.8 on 2024-03-05 17:45 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("api_app", "0061_job_depth_analysis"), - ] - - operations = [ - migrations.CreateModel( - name="JobChannel", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("job_id", models.PositiveIntegerField()), - ("channel_name", models.CharField()), - ( - "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL, - ), - ), - ], - ), - ] diff --git a/api_app/models.py b/api_app/models.py index 6eab0e3962..5cf0e2262d 100644 --- a/api_app/models.py +++ b/api_app/models.py @@ -492,7 +492,7 @@ def kill_if_ongoing(self): from api_app.analyzers_manager.models import AnalyzerConfig from api_app.connectors_manager.models import ConnectorConfig from api_app.visualizers_manager.models import VisualizerConfig - from api_app.websocket.consumer import JobConsumer + from api_app.websocket import JobConsumer from intel_owl.celery import app as celery_app for config in [AnalyzerConfig, ConnectorConfig, VisualizerConfig]: diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index 23624fff70..008c478c4b 100644 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -501,17 +501,7 @@ def get_permissions(self, obj: Job) -> Dict[str, bool]: class WsJobSerializer(JobSerializer): def get_permissions(self, obj: Job) -> Dict[str, bool]: - from api_app.websocket.models import JobChannel - - has_perm = False - channel: JobChannel = self.context.get("channel", None) - if channel: - # channel user has the perm in case is the job owner or is in the same org - # same logic of IsObjectOwnerOrSameOrgPermission defined in certego_saas - has_perm = ( - channel.user == obj.user - or obj.user.membership.organization.user_has_membership(channel.user) - ) + has_perm = self.context.get("permissions", False) return { "kill": has_perm, "delete": has_perm, diff --git a/api_app/views.py b/api_app/views.py index 766848c3d3..775d16b959 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -20,7 +20,7 @@ from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet -from api_app.websocket.consumer import JobConsumer +from api_app.websocket import JobConsumer from certego_saas.apps.organization.permissions import ( IsObjectOwnerOrSameOrgPermission as IsObjectUserOrSameOrgPermission, ) diff --git a/api_app/websocket.py b/api_app/websocket.py new file mode 100644 index 0000000000..53bfbf09c6 --- /dev/null +++ b/api_app/websocket.py @@ -0,0 +1,122 @@ +import logging +from typing import List + +from asgiref.sync import async_to_sync +from channels.generic.websocket import JsonWebsocketConsumer +from channels.layers import get_channel_layer +from django.contrib.auth import get_user_model +from django.utils.functional import cached_property + +from api_app.choices import Status +from api_app.models import Job +from api_app.serializers.job import WsJobSerializer + +User = get_user_model() + + +logger = logging.getLogger(__name__) + + +class JobConsumer(JsonWebsocketConsumer): + class JobChannelGroups: + def __init__(self, job: Job) -> None: + self._job = job + + @cached_property + def job_group_name(self) -> str: + return f"job-{self._job.id}" + + @cached_property + def job_group_perm_name(self) -> str: + return f"{self.job_group_name}-perm" + + @cached_property + def group_list(self) -> List[str]: + return [self.job_group_name, self.job_group_perm_name] + + def get_group_for_user(self, user: User) -> str: + return ( + self.job_group_perm_name + if ( + self._job.user == user + or self._job.user.membership.organization.user_has_membership(user) + ) + else self.job_group_name + ) + + def connect(self) -> None: + logger.debug(f"{self.scope=}") + user: User = self.scope["user"] + job_id = self.scope["url_route"]["kwargs"]["job_id"] + logger.info(f"user: {user} requested the analysis for the job {job_id}") + try: + job = Job.objects.get(id=job_id) + except Job.DoesNotExist: + logger.error(f"user: {user} request the non-existing job: {job_id}") + self.close(code=4040) + else: + self.accept() + subscribed_group = self.JobChannelGroups(job).get_group_for_user(user) + async_to_sync(self.channel_layer.group_add)( + subscribed_group, + self.channel_name, + ) + logger.debug(f"user: {user} added to the group: {subscribed_group}") + JobConsumer.serialize_and_send_job(job) + + def disconnect(self, close_code) -> None: + user: User = self.scope["user"] + job_id = self.scope["url_route"]["kwargs"]["job_id"] + try: + job = Job.objects.get(id=job_id) + except Job.DoesNotExist: + logger.warning( + f"close ws by the user: {user} for a non-existing job " + "This happens in case used tried to open a conn to a non existing job" + ) + subscribed_group = "" + else: + subscribed_group = self.JobChannelGroups(job).get_group_for_user(user) + async_to_sync(self.channel_layer.group_discard)( + subscribed_group, + self.channel_name, + ) + logger.info( + f"user: {user} disconnected from the group: {subscribed_group}. " + f"Close code: {close_code}" + ) + self.close(code=close_code) + + def receive_json(self, content) -> None: + user: User = self.scope["user"] + logger.warning( + f"user {user} send {content} to the websocket, this shouldn't happen" + ) + + def send_job(self, event) -> None: + job_data = event["job"] + logger.debug(f"job data: {job_data}") + self.send_json(content=job_data) + if job_data["status"] in Status.final_statuses(): + logger.debug("job sent to the client and terminated, close ws") + self.close() + + @classmethod + def serialize_and_send_job(cls, job: Job) -> None: + # send data + groups = cls.JobChannelGroups(job) + groups_list = groups.group_list + channel_layer = get_channel_layer() + logger.debug( + f"send data for the job: {job.id} " f"to the groups: {groups_list}" + ) + for group in groups_list: + logger.debug(f"send data to the group: {group}") + job_serializer = WsJobSerializer( + job, context={"permissions": "perm" in group} + ) + job_data = job_serializer.data + async_to_sync(channel_layer.group_send)( + group, + {"type": "send.job", "job": job_data}, + ) diff --git a/api_app/websocket/__init__.py b/api_app/websocket/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api_app/websocket/consumer.py b/api_app/websocket/consumer.py deleted file mode 100644 index dd9d05c5f6..0000000000 --- a/api_app/websocket/consumer.py +++ /dev/null @@ -1,75 +0,0 @@ -import logging - -from asgiref.sync import async_to_sync -from channels.generic.websocket import JsonWebsocketConsumer -from channels.layers import get_channel_layer - -from api_app.choices import Status -from api_app.models import Job -from api_app.serializers.job import WsJobSerializer -from api_app.websocket.models import JobChannel - -logger = logging.getLogger(__name__) - - -class JobConsumer(JsonWebsocketConsumer): - def connect(self) -> None: - logger.debug(f"{self.scope=}") - user = self.scope["user"] - job_id = self.scope["url_route"]["kwargs"]["job_id"] - logger.info(f"user: {user} requested the analysis for the job {job_id}") - try: - job = Job.objects.get(id=job_id) - except Job.DoesNotExist: - logger.error(f"user: {user} request the non-existing job: {job_id}") - self.close(code=4040) - else: - self.accept() - JobChannel.objects.create( - job_id=job_id, user=user, channel_name=self.channel_name - ) - JobConsumer.serialize_and_send_job(job) - - def disconnect(self, close_code) -> None: - user = self.scope["user"] - job_id = self.scope["url_route"]["kwargs"]["job_id"] - JobChannel.objects.filter(channel_name=self.channel_name).delete() - logger.info( - f"user: {user} disconnected for the job: {job_id}. Close code: {close_code}" - ) - self.close(code=close_code) - - def receive_json(self, content) -> None: - user = self.scope["user"] - logger.warning( - f"user {user} send {content} to the websocket, this shouldn't happen" - ) - - def send_job(self, event) -> None: - job_data = event["job"] - logger.debug(f"job data: {job_data}") - self.send_json(content=job_data) - if job_data["status"] in Status.final_statuses(): - logger.debug("job sent to the client and terminated, close ws") - self.close() - - @classmethod - def _generate_group_name(cls, job_id: int) -> str: - return f"job-{job_id}" - - @classmethod - def serialize_and_send_job(cls, job: Job) -> None: - # send data - channel_layer = get_channel_layer() - for channel in JobChannel.objects.filter(job_id=job.id): - logger.debug( - f"send data for the job: {job.id} " - f"to the user: {channel.user.username} " - f"over the channel: {channel.channel_name}" - ) - job_serializer = WsJobSerializer(job, context={"channel": channel}) - job_data = job_serializer.data - async_to_sync(channel_layer.send)( - channel.channel_name, - {"type": "send.job", "job": job_data}, - ) diff --git a/api_app/websocket/models.py b/api_app/websocket/models.py deleted file mode 100644 index 0a73739eb2..0000000000 --- a/api_app/websocket/models.py +++ /dev/null @@ -1,20 +0,0 @@ -"""In this file are available the models used to store the data about a channel""" - -from django.conf import settings -from django.db import models - - -class JobChannel(models.Model): - """Data stored about a job scan. - - * job_id is used to send data to all the channels waiting for the job: - multiple users waiting for the same job - * user is used to get the permission to kill or delete the job - """ - - job_id = models.PositiveIntegerField() - user = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.CASCADE, - ) - channel_name = models.CharField() diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py index e8a57b7b88..0690db5586 100644 --- a/intel_owl/asgi.py +++ b/intel_owl/asgi.py @@ -13,7 +13,7 @@ get_asgi_application() # pylint: disable=wrong-import-position -from api_app.websocket.consumer import JobConsumer # noqa: E402 +from api_app.websocket import JobConsumer # noqa: E402 from intel_owl.middleware import WSAuthMiddleware # noqa: E402 application = ProtocolTypeRouter( diff --git a/intel_owl/tasks.py b/intel_owl/tasks.py index ea090916dc..11cbeab179 100644 --- a/intel_owl/tasks.py +++ b/intel_owl/tasks.py @@ -204,7 +204,7 @@ def update_notifications_with_releases(): @app.task(name="job_set_final_status", soft_time_limit=30) def job_set_final_status(job_id: int): from api_app.models import Job - from api_app.websocket.consumer import JobConsumer + from api_app.websocket import JobConsumer job = Job.objects.get(pk=job_id) # execute some callbacks @@ -255,7 +255,7 @@ def run_plugin( ): from api_app.classes import Plugin from api_app.models import Job, PythonModule - from api_app.websocket.consumer import JobConsumer + from api_app.websocket import JobConsumer logger.info( f"Configuring plugin {plugin_config_pk} for job {job_id} with task {task_id}" diff --git a/tests/api_app/websocket/test_websocket.py b/tests/api_app/websocket/test_websocket.py index 3efffea9f8..32bff1040b 100644 --- a/tests/api_app/websocket/test_websocket.py +++ b/tests/api_app/websocket/test_websocket.py @@ -12,7 +12,6 @@ from api_app.analyzers_manager.models import AnalyzerConfig from api_app.choices import ParamTypes from api_app.models import Job, Parameter, PluginConfig, PythonModule -from api_app.websocket.models import JobChannel from intel_owl.asgi import application from intel_owl.tasks import job_set_final_status, run_plugin @@ -63,16 +62,13 @@ def setUp(self) -> None: ) async def test_job_unauthorized(self): - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) async with self.connect_communicator(1027) as (_, connected, subprotocol): self.assertFalse(connected) self.assertEqual(subprotocol, 1008) - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async def test_job_not_exist(self): self.assertEqual(await sync_to_async(Job.objects.filter(id=1028).count)(), 0) - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async with self.connect_communicator(1028, self.user) as ( _, connected, @@ -80,10 +76,8 @@ async def test_job_not_exist(self): ): self.assertFalse(connected) self.assertEqual(subprotocol, 4040) - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async def test_job_terminated(self): - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) async with self.connect_communicator(1027, self.user) as ( communicator, @@ -97,7 +91,6 @@ async def test_job_terminated(self): self.assertEqual( job_report["status"], Job.Status.REPORTED_WITHOUT_FAILS.value ) - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async def test_job_running(self): # Note: Sometimes reading from ws (receive_json_from) is too fast: @@ -162,7 +155,6 @@ async def test_job_running(self): ) await sync_to_async(plugin_config.save)() - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async with self.connect_communicator(1029, self.user) as ( communicator, connected, @@ -170,7 +162,6 @@ async def test_job_running(self): ): self.assertTrue(connected) time.sleep(1) - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 1) job_report_running = await communicator.receive_json_from() self.assertEqual(job_report_running["id"], 1029) self.assertEqual(job_report_running["observable_name"], "test.com") @@ -210,7 +201,6 @@ async def test_job_running(self): ) self.assertIsNotNone(job_report_terminated["analyzer_reports"]) self.assertIsNotNone(job_report_terminated["finished_analysis_time"]) - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) async def test_job_killed(self): await sync_to_async(Job.objects.create)( @@ -223,7 +213,6 @@ async def test_job_killed(self): await sync_to_async(self.client.force_login)(self.user) - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) time.sleep(1) async with self.connect_communicator(1030, self.user) as ( communicator, @@ -245,5 +234,3 @@ async def test_job_killed(self): self.assertEqual(job_killed["id"], 1030) self.assertEqual(job_killed["observable_name"], "test.com") self.assertEqual(job_killed["status"], Job.Status.KILLED.value) - - self.assertEqual(await sync_to_async(JobChannel.objects.all().count)(), 0) From 9e71337359c7bb257afb7c02b2fbebd034f65a07 Mon Sep 17 00:00:00 2001 From: Simone Berni Date: Fri, 8 Mar 2024 11:39:29 +0100 Subject: [PATCH 45/58] Update intel_owl/settings/celery.py Co-authored-by: Matteo Lodi <30625432+mlodic@users.noreply.github.com> --- intel_owl/settings/celery.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/intel_owl/settings/celery.py b/intel_owl/settings/celery.py index d24fd29a0f..bf8fa185d6 100644 --- a/intel_owl/settings/celery.py +++ b/intel_owl/settings/celery.py @@ -9,10 +9,11 @@ RESULT_BACKEND = "django-db" BROKER_URL = get_secret("BROKER_URL", None) -if not BROKER_URL and AWS_SQS: - BROKER_URL = "sqs://" -else: - BROKER_URL = "redis://redis:6379/1" # 0 is used by channels +if not BROKER_URL: + if AWS_SQS: + BROKER_URL = "sqs://" + else: + BROKER_URL = "redis://redis:6379/1" # 0 is used by channels DEFAULT_QUEUE = "default" BROADCAST_QUEUE = "broadcast" From 33c8ef943942d98bb585b1064698cb37c0c5f9c6 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Fri, 8 Mar 2024 15:36:44 +0100 Subject: [PATCH 46/58] fixes --- api_app/apps.py | 3 ++ api_app/signals.py | 47 +++++++++++-------- api_app/websocket.py | 8 +--- authentication/serializers.py | 9 ++-- authentication/views.py | 2 +- intel_owl/settings/django.py | 2 +- intel_owl/settings/rest.py | 1 - .../api_app/{websocket => }/test_websocket.py | 10 ++-- tests/api_app/websocket/__init__.py | 0 9 files changed, 43 insertions(+), 39 deletions(-) rename tests/api_app/{websocket => }/test_websocket.py (97%) delete mode 100644 tests/api_app/websocket/__init__.py diff --git a/api_app/apps.py b/api_app/apps.py index 2817469e6b..e976ff00f0 100644 --- a/api_app/apps.py +++ b/api_app/apps.py @@ -9,3 +9,6 @@ class ApiAppConfig(AppConfig): name = "api_app" + + def ready(self): # skipcq: PYL-R0201 + from . import signals # noqa diff --git a/api_app/signals.py b/api_app/signals.py index 5ee37f6e75..bed512ce25 100644 --- a/api_app/signals.py +++ b/api_app/signals.py @@ -55,27 +55,36 @@ def post_delete_job(sender, instance: Job, **kwargs): instance.analysis.delete() -@receiver(models.signals.post_migrate, sender=BeatConfig) +@receiver(models.signals.post_migrate) def post_migrate_beat( - sender, app_config, verbosity, interactive, stdout, using, plan, apps, **kwargs + sender, + app_config, + verbosity, + interactive, + stdout=None, + using=None, + plan=None, + apps=None, + **kwargs, ): - from django_celery_beat.models import PeriodicTask - - from intel_owl.tasks import update - - for module in PythonModule.objects.filter(health_check_schedule__isnull=False): - for config in module.configs.filter(health_check_task__isnull=True): - config.generate_health_check_periodic_task() - for module in PythonModule.objects.filter( - update_schedule__isnull=False, update_task__isnull=True - ): - module.generate_update_periodic_task() - - for task in PeriodicTask.objects.filter( - enabled=True, task=f"{update.__module__}.{update.__name__}" - ): - task.enabled &= settings.REPO_DOWNLOADER_ENABLED - task.save() + if isinstance(sender, BeatConfig): + from django_celery_beat.models import PeriodicTask + + from intel_owl.tasks import update + + for module in PythonModule.objects.filter(health_check_schedule__isnull=False): + for config in module.configs.filter(health_check_task__isnull=True): + config.generate_health_check_periodic_task() + for module in PythonModule.objects.filter( + update_schedule__isnull=False, update_task__isnull=True + ): + module.generate_update_periodic_task() + + for task in PeriodicTask.objects.filter( + enabled=True, task=f"{update.__module__}.{update.__name__}" + ): + task.enabled &= settings.REPO_DOWNLOADER_ENABLED + task.save() @receiver(models.signals.post_save, sender=PluginConfig) diff --git a/api_app/websocket.py b/api_app/websocket.py index 53bfbf09c6..fdf9d2c8bd 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -81,18 +81,12 @@ def disconnect(self, close_code) -> None: subscribed_group, self.channel_name, ) - logger.info( + logger.debug( f"user: {user} disconnected from the group: {subscribed_group}. " f"Close code: {close_code}" ) self.close(code=close_code) - def receive_json(self, content) -> None: - user: User = self.scope["user"] - logger.warning( - f"user {user} send {content} to the websocket, this shouldn't happen" - ) - def send_job(self, event) -> None: job_data = event["job"] logger.debug(f"job data: {job_data}") diff --git a/authentication/serializers.py b/authentication/serializers.py index a24b398ccc..815d490f83 100644 --- a/authentication/serializers.py +++ b/authentication/serializers.py @@ -244,10 +244,9 @@ class Meta: "created", ] - def create(self, validated_data): - user = self.context["request"].user + def validate(self, data): + user = self.context["user"] if Token.objects.filter(user=user).exists(): raise rfs.ValidationError("An API token was already issued to you.") - - validated_data["user"] = user - return super().create(validated_data) + data["user"] = user + return data diff --git a/authentication/views.py b/authentication/views.py index 2963577ef9..797f22c100 100644 --- a/authentication/views.py +++ b/authentication/views.py @@ -264,7 +264,7 @@ def get(self, request, *args, **kwargs): def post(self, request): # skipcq: PYL-R0201 username = request.user.username logger.info(f"user {username} send a request to create the API token") - serializer = TokenSerializer(data={}, context={"request": request}) + serializer = TokenSerializer(data={}, context={"user": request.user}) serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) diff --git a/intel_owl/settings/django.py b/intel_owl/settings/django.py index 46182deadc..9e30b0c70b 100644 --- a/intel_owl/settings/django.py +++ b/intel_owl/settings/django.py @@ -19,7 +19,7 @@ ] SESSION_COOKIE_HTTPONLY = True -SESSION_COOKIE_AGE = 60 * 60 * 24 * 90 # seconds * minutes * hours * days +SESSION_COOKIE_AGE = 60 * 60 * 24 * 14 # seconds * minutes * hours * days if DEBUG: MIDDLEWARE.append("silk.middleware.SilkyMiddleware") diff --git a/intel_owl/settings/rest.py b/intel_owl/settings/rest.py index 949fedc6ff..0df114beda 100644 --- a/intel_owl/settings/rest.py +++ b/intel_owl/settings/rest.py @@ -15,7 +15,6 @@ "DEFAULT_AUTHENTICATION_CLASSES": [ "rest_framework.authentication.TokenAuthentication", "rest_framework.authentication.SessionAuthentication", - "rest_framework.authentication.BasicAuthentication", ], # Pagination "DEFAULT_PAGINATION_CLASS": "certego_saas.ext.pagination.CustomPageNumberPagination", diff --git a/tests/api_app/websocket/test_websocket.py b/tests/api_app/test_websocket.py similarity index 97% rename from tests/api_app/websocket/test_websocket.py rename to tests/api_app/test_websocket.py index 32bff1040b..419fca9e54 100644 --- a/tests/api_app/websocket/test_websocket.py +++ b/tests/api_app/test_websocket.py @@ -61,13 +61,13 @@ def setUp(self) -> None: observable_classification=ObservableTypes.IP, ) - async def test_job_unauthorized(self): + async def test_job_unauthorized(self, *args, **kwargs): self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) async with self.connect_communicator(1027) as (_, connected, subprotocol): self.assertFalse(connected) self.assertEqual(subprotocol, 1008) - async def test_job_not_exist(self): + async def test_job_not_exist(self, *args, **kwargs): self.assertEqual(await sync_to_async(Job.objects.filter(id=1028).count)(), 0) async with self.connect_communicator(1028, self.user) as ( _, @@ -77,7 +77,7 @@ async def test_job_not_exist(self): self.assertFalse(connected) self.assertEqual(subprotocol, 4040) - async def test_job_terminated(self): + async def test_job_terminated(self, *args, **kwargs): self.assertEqual(await sync_to_async(Job.objects.filter(id=1027).count)(), 1) async with self.connect_communicator(1027, self.user) as ( communicator, @@ -92,7 +92,7 @@ async def test_job_terminated(self): job_report["status"], Job.Status.REPORTED_WITHOUT_FAILS.value ) - async def test_job_running(self): + async def test_job_running(self, *args, **kwargs): # Note: Sometimes reading from ws (receive_json_from) is too fast: # it happens before other part of code send data. # The test will be blocked waiting a response from ws that already happened. @@ -202,7 +202,7 @@ async def test_job_running(self): self.assertIsNotNone(job_report_terminated["analyzer_reports"]) self.assertIsNotNone(job_report_terminated["finished_analysis_time"]) - async def test_job_killed(self): + async def test_job_killed(self, *args, **kwargs): await sync_to_async(Job.objects.create)( id=1030, user=self.user, diff --git a/tests/api_app/websocket/__init__.py b/tests/api_app/websocket/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 From 07e7fad6c76db08d0bfcdf5d2cefcef54716d37f Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Fri, 8 Mar 2024 17:27:38 +0100 Subject: [PATCH 47/58] update comment --- intel_owl/asgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/intel_owl/asgi.py b/intel_owl/asgi.py index 0690db5586..8c9e8cfb99 100644 --- a/intel_owl/asgi.py +++ b/intel_owl/asgi.py @@ -18,7 +18,7 @@ application = ProtocolTypeRouter( { - # WebSocket chat handler + # websocket protocol routing "websocket": AllowedHostsOriginValidator( AuthMiddlewareStack( WSAuthMiddleware( From cc18efbeb47a67ad71c6231fb2ea1499e2ae69f7 Mon Sep 17 00:00:00 2001 From: Matteo Lodi <30625432+mlodic@users.noreply.github.com> Date: Mon, 11 Mar 2024 10:17:41 +0100 Subject: [PATCH 48/58] Update docs/source/Advanced-Configuration.md --- docs/source/Advanced-Configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/Advanced-Configuration.md b/docs/source/Advanced-Configuration.md index e232f99ec9..5817559501 100644 --- a/docs/source/Advanced-Configuration.md +++ b/docs/source/Advanced-Configuration.md @@ -177,9 +177,9 @@ Then you need to configure permission access to the chosen S3 bucket. #### Message Broker IntelOwl at the moment supports 3 different message brokers: +- Redis (default) - RabbitMQ - Aws SQS -- Redis The default broker, if nothing is specified, is `Redis`. From 0640541ed4ba2664da7c43e256bfaf853f87f0b0 Mon Sep 17 00:00:00 2001 From: Matteo Lodi <30625432+mlodic@users.noreply.github.com> Date: Mon, 11 Mar 2024 10:30:34 +0100 Subject: [PATCH 49/58] updated v6 doc --- docs/source/Installation.md | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/docs/source/Installation.md b/docs/source/Installation.md index 130ce2736b..f805c9e0ee 100644 --- a/docs/source/Installation.md +++ b/docs/source/Installation.md @@ -294,9 +294,27 @@ Below you can find the additional process required to upgrade from each major ve IntelOwl v6 introduced some major changes regarding how the project is started. Before upgrading, some important things should be checked by the administrator: * Docker Compose V1 support has been dropped project-wide. If you are still using a Compose version prior to v2.3.4, please [upgrade](https://docs.docker.com/compose/migrate/) to a newer version or install Docker Compose V2. -* IntelOwl is now started with the new Bash `start` script that has the same options as the old Python `start.py` script but is more manageable and has decreased the overall project dependencies. The `start.py` script has now been removed. +* IntelOwl is now started with the new Bash `start` script that has the same options as the old Python `start.py` script but is more manageable and has decreased the overall project dependencies. The `start.py` script has now been removed. Please use the new `start` script instead. +* The default message broker is now Redis. We have replaced Rabbit-MQ for Redis to allow support for Websockets in the application: + * This change is transparent if you use our `start` script to run IntelOwl. That would spawn a Redis instance instead of a Rabbit-MQ one locally. + * If you were using an external broker like AWS SQS or a managed Rabbit-MQ, they are still supported but we suggest to move to a Redis supported service to simplify the architecture (because Redis is now mandatory for Websockets) +* We upgraded the base PostgreSQL image from version 12 to version 16. You have 2 choice: + * remove your actual database and start from scratch with a new one + * maintain your database and do not update Postgres. This could break the application at anytime because we do not support it anymore. + * if you want to keep your old DB, follow the migration procedure you can find below -The migration procedure is as follows: +
+

Warning

+CARE! We are providing this database migration procedure to help the users to migrate to a new PostgreSQL version. + +Upgrading PostgreSQL is outside the scope of the IntelOwl project so we do not guarantee that everything will work as intended. + +In case of doubt, please check the official PostgreSQL documentation. + +Upgrade at your own risk. +
+ +The database migration procedure is as follows: - You have IntelOwl version 5.x.x up and running - Bring down the application (you can use the start script or manually concatenate your docker compose configuration ) - Go inside the docker folder `cd docker` @@ -308,7 +326,7 @@ The migration procedure is as follows: - Add the data to the volume `cat /tmp/dump_postgres.sql| docker exec -i intelowl_postgres_16 psql -U $POSTGRES_USER -d $POSTGRES_PASSWORD` - Remove the intermediary container `docker rm intelowl_postgres_16` - Update IntelOwl to the latest version -- Bring up the application back again (you can use the start script or manually concatenate your docker compose configuration ) +- Bring up the application back again (you can use the start script or manually concatenate your docker compose configuration) #### Updating to >=5.0.0 from a 4.x.x version From 9989d9ed3abcebc629db96bc24f16fc05ce39861 Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Mon, 11 Mar 2024 10:37:01 +0100 Subject: [PATCH 50/58] Fixes Signed-off-by: 0ssigeno --- docker/ci.override.yml | 5 ----- intel_owl/settings/websocket.py | 4 ++-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/docker/ci.override.yml b/docker/ci.override.yml index 37b00f95da..5f7e5026c1 100644 --- a/docker/ci.override.yml +++ b/docker/ci.override.yml @@ -38,9 +38,4 @@ services: env_file: - env_file_app_ci -volumes: - postgres_data: - nginx_logs: - generic_logs: - shared_files: diff --git a/intel_owl/settings/websocket.py b/intel_owl/settings/websocket.py index 4e268d7beb..1826f03562 100644 --- a/intel_owl/settings/websocket.py +++ b/intel_owl/settings/websocket.py @@ -2,10 +2,10 @@ from intel_owl import secrets -websockets_url = secrets.get_secret("WEBSOCKETS_URL", None) +websockets_url = secrets.get_secret("WEBSOCKETS_URL", "redis://redis:6379") if not websockets_url: if socket.gethostname() in ["uwsgi", "daphne"]: - raise RuntimeError("Unable to configure websockets") + raise RuntimeError("Unable to configure websockets. Please set WEBSOCKETS_URL") else: ASGI_APPLICATION = "intel_owl.asgi.application" CHANNEL_LAYERS = { From a972284736f95b3ffdd6386271db6d9faf8496a3 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Mon, 11 Mar 2024 10:47:38 +0100 Subject: [PATCH 51/58] removed redis cache setting --- intel_owl/settings/cache.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/intel_owl/settings/cache.py b/intel_owl/settings/cache.py index 62ff2ac218..21b487774d 100644 --- a/intel_owl/settings/cache.py +++ b/intel_owl/settings/cache.py @@ -35,8 +35,6 @@ def get_where(self, starts_with: str, version=None) -> Dict[str, Any]: return self.get_many([row[0] for row in rows], version=version) -DJANGO_REDIS_IGNORE_EXCEPTIONS = True - CACHES = { "default": { "BACKEND": "intel_owl.settings.cache.DatabaseCacheExtended", From 902810a6c9323830c394c5f6955e3aa64dcf3e2f Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Mon, 11 Mar 2024 10:50:21 +0100 Subject: [PATCH 52/58] Added daphne file Signed-off-by: 0ssigeno --- docker/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/Dockerfile b/docker/Dockerfile index d8eff4948c..d4716e8a88 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -65,6 +65,7 @@ RUN touch ${LOG_PATH}/django/api_app.log ${LOG_PATH}/django/api_app_errors.log \ && touch ${LOG_PATH}/django/django_auth_ldap.log ${LOG_PATH}/django/django_errors.log \ && touch ${LOG_PATH}/django/certego_saas.log ${LOG_PATH}/django/certego_saas_errors.log \ && touch ${LOG_PATH}/django/authentication.log ${LOG_PATH}/django/authentication_errors.log \ + && touch ${LOG_PATH}/asgi/daphne.log \ && chown -R www-data:www-data ${LOG_PATH} /opt/deploy/ \ # this is cause stringstifer creates this directory during the build and cause celery to crash && rm -rf /root/.local \ From de5831cf799dbf9a4c6001f5551ea4285207cbfb Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Mon, 11 Mar 2024 11:18:59 +0100 Subject: [PATCH 53/58] More logs Signed-off-by: 0ssigeno --- .github/workflows/pull_request_automation.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml index d10815696b..a9ec000403 100644 --- a/.github/workflows/pull_request_automation.yml +++ b/.github/workflows/pull_request_automation.yml @@ -106,6 +106,7 @@ jobs: if: always() run: | docker ps -a + docker logs intelowl_daphne docker logs intelowl_uwsgi - name: Setup coverage From 034796dee2d4e95e24e18f7266a8fc1983caa99f Mon Sep 17 00:00:00 2001 From: 0ssigeno Date: Mon, 11 Mar 2024 11:51:04 +0100 Subject: [PATCH 54/58] Fixes Signed-off-by: 0ssigeno --- .github/workflows/pull_request_automation.yml | 2 +- docker/default.yml | 3 +++ docker/env_file_app_ci | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml index a9ec000403..7864faa5da 100644 --- a/.github/workflows/pull_request_automation.yml +++ b/.github/workflows/pull_request_automation.yml @@ -106,8 +106,8 @@ jobs: if: always() run: | docker ps -a - docker logs intelowl_daphne docker logs intelowl_uwsgi + docker logs intelowl_daphne - name: Setup coverage run: | diff --git a/docker/default.yml b/docker/default.yml index 5bcd18df49..531666090e 100644 --- a/docker/default.yml +++ b/docker/default.yml @@ -45,6 +45,9 @@ services: timeout: 2s start_period: 90s retries: 5 + depends_on: + uwsgi: + condition: service_healthy nginx: image: intelowlproject/intelowl_nginx:${REACT_APP_INTELOWL_VERSION} diff --git a/docker/env_file_app_ci b/docker/env_file_app_ci index 454327fde9..0d10b6b9e4 100644 --- a/docker/env_file_app_ci +++ b/docker/env_file_app_ci @@ -56,8 +56,8 @@ DJANGO_SETTINGS_MODULE=intel_owl.settings COVERAGE_PROCESS_START=.coveragerc # broker configuration -BROKER_URL=amqp://guest:guest@rabbitmq:5672 - +BROKER_URL=redis://redis:6379 +WEBSOCKETS_URL=redis://redis:6379 # drf-recaptcha USE_RECAPTCHA=False From 86b2519013ad855090d9160eada660dbd6660ea9 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Mon, 11 Mar 2024 15:57:53 +0100 Subject: [PATCH 55/58] deep source --- api_app/serializers/job.py | 6 +++--- api_app/views.py | 5 ++++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index 008c478c4b..f307692652 100644 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -132,7 +132,7 @@ class Meta: required=False, ) - def validate_runtime_configuration(self, runtime_config: Dict): + def validate_runtime_configuration(self, runtime_config: Dict): # skipcq: PYL-R0201 from api_app.validators import validate_runtime_configuration if not runtime_config: @@ -144,13 +144,13 @@ def validate_runtime_configuration(self, runtime_config: Dict): raise ValidationError({"detail": "Runtime Configuration Validation Failed"}) return runtime_config - def validate_tags_labels(self, tags_labels): + def validate_tags_labels(self, tags_labels): # skipcq: PYL-R0201 for label in tags_labels: yield Tag.objects.get_or_create( label=label, defaults={"color": gen_random_colorhex()} )[0] - def validate_tlp(self, tlp: str): + def validate_tlp(self, tlp: str): # skipcq: PYL-R0201 if tlp == "WHITE": return TLP.CLEAR.value return tlp diff --git a/api_app/views.py b/api_app/views.py index 775d16b959..827153d2ca 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -631,7 +631,10 @@ def get_permissions(self): if self.action in ["destroy", "update"]: if self.request.method == "PUT": raise PermissionDenied() - permissions.append((IsObjectAdminPermission | IsObjectOwnerPermission)()) + # code quality checker marks this as error, but it works correctly + permissions.append( + (IsObjectAdminPermission | IsObjectOwnerPermission)() + ) # skipcq: PYL-E1102 return permissions From aa3e4472dffc8cebbc46e5b3cc4170cd62022778 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Mon, 11 Mar 2024 16:03:05 +0100 Subject: [PATCH 56/58] fix --- api_app/views.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api_app/views.py b/api_app/views.py index 827153d2ca..7505fb4019 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -633,8 +633,10 @@ def get_permissions(self): raise PermissionDenied() # code quality checker marks this as error, but it works correctly permissions.append( - (IsObjectAdminPermission | IsObjectOwnerPermission)() - ) # skipcq: PYL-E1102 + ( + IsObjectAdminPermission | IsObjectOwnerPermission + )() # skipcq: PYL-E1102 + ) return permissions From 27d327bb2099ca2f50f82e8e03d11f1bba23443e Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Mon, 11 Mar 2024 16:08:13 +0100 Subject: [PATCH 57/58] fix --- api_app/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api_app/views.py b/api_app/views.py index 7505fb4019..5239630f69 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -633,9 +633,9 @@ def get_permissions(self): raise PermissionDenied() # code quality checker marks this as error, but it works correctly permissions.append( - ( + ( # skipcq: PYL-E1102 IsObjectAdminPermission | IsObjectOwnerPermission - )() # skipcq: PYL-E1102 + )() ) return permissions From e5a7d25ea097629e84667e05a5d62526e1a30265 Mon Sep 17 00:00:00 2001 From: Daniele Rosetti Date: Mon, 11 Mar 2024 18:12:44 +0100 Subject: [PATCH 58/58] fix --- api_app/websocket.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/api_app/websocket.py b/api_app/websocket.py index fdf9d2c8bd..a5eaf9e76f 100644 --- a/api_app/websocket.py +++ b/api_app/websocket.py @@ -10,6 +10,7 @@ from api_app.choices import Status from api_app.models import Job from api_app.serializers.job import WsJobSerializer +from certego_saas.apps.organization.membership import Membership User = get_user_model() @@ -35,12 +36,15 @@ def group_list(self) -> List[str]: return [self.job_group_name, self.job_group_perm_name] def get_group_for_user(self, user: User) -> str: + try: + is_member = self._job.user.membership.organization.user_has_membership( + user + ) + except Membership.DoesNotExist: + is_member = False return ( self.job_group_perm_name - if ( - self._job.user == user - or self._job.user.membership.organization.user_has_membership(user) - ) + if self._job.user == user or is_member else self.job_group_name )