diff --git a/api_app/analyzers_manager/migrations/0071_analyzer_config_greynoise_labs.py b/api_app/analyzers_manager/migrations/0071_analyzer_config_greynoise_labs.py new file mode 100644 index 0000000000..1399a6132a --- /dev/null +++ b/api_app/analyzers_manager/migrations/0071_analyzer_config_greynoise_labs.py @@ -0,0 +1,135 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": { + "minute": "0", + "hour": "*/6", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "module": "greynoise_labs.GreynoiseLabs", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "Greynoise_Labs", + "description": "scan an IP against the (Greynoise Labs API)[https://www.greynoise.io/] (requires authentication token obtained from cookies on greynoise website)", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "RED", + "observable_supported": ["ip"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "greynoise_labs.GreynoiseLabs", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "auth_token", + "type": "str", + "description": "Authentication token obtained from cookies on greynoise website.", + "is_secret": True, + "required": True, + } +] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ForwardManyToOneDescriptor, ForwardOneToOneDescriptor] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0061_job_depth_analysis"), + ("analyzers_manager", "0071_analyzer_config_tor_nodes_danmeuk"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/observable_analyzers/greynoise_labs.py b/api_app/analyzers_manager/observable_analyzers/greynoise_labs.py new file mode 100644 index 0000000000..9a6660c23f --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/greynoise_labs.py @@ -0,0 +1,218 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +import logging +import os + +import requests +from django.conf import settings + +from api_app.analyzers_manager.classes import ObservableAnalyzer +from api_app.analyzers_manager.exceptions import AnalyzerRunException +from api_app.models import PluginConfig +from tests.mock_utils import MockUpResponse, if_mock_connections, patch + +logger = logging.getLogger(__name__) + +url = "https://api.labs.greynoise.io/1/query" +db_name = "topc2s_ips.txt" +db_location = f"{settings.MEDIA_ROOT}/{db_name}" + +queries = { + "noiserank": { + "query_string": "query NoiseRank($ip: String) { noiseRank(ip: $ip) \ + { queryInfo { resultsAvailable resultsLimit } ips { ip noise_score \ + sensor_pervasiveness country_pervasiveness payload_diversity \ + port_diversity request_rate } } }", + "ip_required": True, + }, + "topknocks": { + "query_string": "query TopKnocks($ip: String) { topKnocks(ip: $ip) \ + { queryInfo { resultsAvailable resultsLimit } knock { last_crawled \ + last_seen source_ip knock_port title favicon_mmh3_32 \ + favicon_mmh3_128 jarm ips emails links tor_exit headers apps } } } ", + "ip_required": True, + }, + "topc2s": { + "query_string": "query TopC2s { topC2s { queryInfo \ + { resultsAvailable resultsLimit } c2s { source_ip c2_ips \ + c2_domains payload hits pervasiveness } } } ", + "ip_required": False, + "db_location": db_location, + }, +} + + +class GreynoiseLabs(ObservableAnalyzer): + _auth_token: str + + def run(self): + result = {} + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self._auth_token}", + } + + try: + for key, value in queries.items(): + if not value["ip_required"]: + if not os.path.isfile(value["db_location"]) and not self.update(): + error_message = f"Failed extraction from {key} db" + self.report.errors.append(error_message) + self.report.save() + logger.error(error_message) + continue + + with open(value["db_location"], "r", encoding="utf-8") as f: + db = f.read() + + db_list = db.split("\n") + if self.observable_name in db_list: + result[key] = {"found": True} + else: + result[key] = {"found": False} + continue + + json_body = { + "query": value["query_string"], + "variables": {"ip": f"{self.observable_name}"}, + } + response = requests.post(headers=headers, json=json_body, url=url) + response.raise_for_status() + result[key] = response.json() + except requests.RequestException as e: + raise AnalyzerRunException(e) + + return result + + @classmethod + def _get_auth_token(cls): + for plugin in PluginConfig.objects.filter( + parameter__python_module=cls.python_module, + parameter__is_secret=True, + parameter__name="auth_token", + ): + if plugin.value: + return plugin.value + return None + + @classmethod + def _update_db(cls, auth_token: str): + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {auth_token}", + } + + try: + logger.info("Fetching data from greynoise API (Greynoise_Labs).....") + response = requests.post( + headers=headers, + json={"query": queries["topc2s"]["query_string"]}, + url=url, + ) + response.raise_for_status() + topc2s_data = response.json() + + with open(db_location, "w", encoding="utf-8") as f: + for value in topc2s_data["data"]["topC2s"]["c2s"]: + ip = value["source_ip"] + if ip: + f.write(f"{ip}\n") + + if not os.path.exists(db_location): + return False + + logger.info("Data fetched from greynoise API (Greynoise_Labs).....") + return True + except Exception as e: + logger.exception(e) + + @classmethod + def update(cls): + auth_token = cls._get_auth_token() + return cls._update_db(auth_token=auth_token) + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "requests.post", + side_effect=[ + MockUpResponse( + { + "data": { + "noiseRank": { + "queryInfo": { + "resultsAvailable": 1, + "resultsLimit": 1, + }, + "ips": [ + { + "ip": "20.235.249.22", + "noise_score": 12, + "sensor_pervasiveness": "very low", + "country_pervasiveness": "low", + "payload_diversity": "very low", + "port_diversity": "very low", + "request_rate": "low", + } + ], + } + } + }, + 200, + ), + MockUpResponse( + { + "data": { + "topKnocks": { + "queryInfo": { + "resultsAvailable": 1, + "resultsLimit": 1, + }, + } + } + }, + 200, + ), + MockUpResponse( + { + "data": { + "topC2s": { + "queryInfo": { + "resultsAvailable": 1914, + "resultsLimit": 191, + }, + "c2s": [ + { + "source_ip": "91.92.247.12", + "c2_ips": ["103.245.236.120"], + "c2_domains": [], + "hits": 11608, + }, + { + "source_ip": "14.225.208.190", + "c2_ips": ["14.225.213.142"], + "c2_domains": [], + "hits": 2091, + "pervasiveness": 26, + }, + { + "source_ip": "157.10.53.101", + "c2_ips": ["14.225.208.190"], + "c2_domains": [], + "hits": 1193, + "pervasiveness": 23, + }, + ], + }, + }, + }, + 200, + ), + ], + ) + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/docs/source/Usage.md b/docs/source/Usage.md index 13573cb267..ba9ae9e890 100644 --- a/docs/source/Usage.md +++ b/docs/source/Usage.md @@ -249,6 +249,7 @@ The following is the list of the available analyzers you can run out-of-the-box. * `GreedyBear`: scan an IP or a domain against the [GreedyBear](https://greedybear.honeynet.org/) API (requires API key) * `GreyNoise`: scan an IP against the [Greynoise](https://www.greynoise.io/) API (requires API key) * `GreyNoiseCommunity`: scan an IP against the [Community Greynoise API](https://www.greynoise.io/) (requires API key)) +* `Greynoise_Labs`: scan an IP against the [Greynoise API](https://www.greynoise.io/) (requires authentication token which can be obtained from cookies on Greynoise website after launching the playground from [here](https://api.labs.greynoise.io/)) * `HashLookupServer_Get_Observable`: check if a md5 or sha1 is available in the database of [known file hosted by CIRCL](https://github.com/adulau/hashlookup-server) * `HoneyDB_Get`: [HoneyDB](https://honeydb.io/) IP lookup service * `HoneyDB_Scan_Twitter`: scan an IP against HoneyDB.io's Twitter Threat Feed diff --git a/tests/test_crons.py b/tests/test_crons.py index 8777760133..2dc9770674 100644 --- a/tests/test_crons.py +++ b/tests/test_crons.py @@ -10,6 +10,7 @@ from api_app.analyzers_manager.file_analyzers import quark_engine, yara_scan from api_app.analyzers_manager.observable_analyzers import ( feodo_tracker, + greynoise_labs, maxmind, phishing_army, talos, @@ -148,3 +149,46 @@ def test_quark_updater(self): def test_yara_updater(self): yara_scan.YaraScan.update() self.assertTrue(len(os.listdir(settings.YARA_RULES_PATH))) + + @if_mock_connections( + patch( + "requests.post", + return_value=MockUpResponse( + { + "data": { + "topC2s": { + "queryInfo": { + "resultsAvailable": 1914, + "resultsLimit": 191, + }, + "c2s": [ + { + "source_ip": "91.92.247.12", + "c2_ips": ["103.245.236.120"], + "c2_domains": [], + "hits": 11608, + }, + { + "source_ip": "14.225.208.190", + "c2_ips": ["14.225.213.142"], + "c2_domains": [], + "hits": 2091, + "pervasiveness": 26, + }, + { + "source_ip": "157.10.53.101", + "c2_ips": ["14.225.208.190"], + "c2_domains": [], + "hits": 1193, + "pervasiveness": 23, + }, + ], + }, + }, + }, + 200, + ), + ) + ) + def test_greynoise_labs_updater(self, mock_post=None): + self.assertTrue(greynoise_labs.GreynoiseLabs.update())