diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 5c14d467f..56a1149ed 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -17,6 +17,13 @@ Steps to reproduce the behavior:
3. Scroll down to '....'
4. See error
+**errors.log**
+If there's an error logged to the errors.log in your output directory, you can post it here
+```
+Error goes here
+```
+
+
**Expected behavior**
A clear and concise description of what you expected to happen.
@@ -33,7 +40,7 @@ Master or Develop?
- Are you running slips in docker or locally? [yes/no]
- Docker version (if running slips in docker) [e.g. 20.10.22]
- Slips docker image used (if running slips in docker) [e.g. macosm1-image, macosm1-P2P-image, ubuntu-image, dependency-image]
-
+ - Commit hash: ( `git rev-parse --short HEAD` )
**Additional context**
Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/task.md b/.github/ISSUE_TEMPLATE/task.md
new file mode 100644
index 000000000..2adab5a6e
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/task.md
@@ -0,0 +1,31 @@
+---
+name: Task Template
+about: "Org admins and GSoC contributors (who will be working on this task) should use this template and create tasks, otherwise go for a Bug report or a Feature request"
+
+---
+
+
+
+**Task Category**
+Suggest what the tasks are for (eg: gsoc or collaborations)
+
+**Goal**
+What is this task about?
+
+**Subtasks**
+There should be 3-5 subtasks
+-
+-
+-
+
+
+**Time Estimate**
+
+How much time do think this task will take? in weeks, days, hrs, etc.
+
+**Related issues**
+(optional)
+
+**Size**
+
+S-M-L
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 753631b63..8a116a986 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -1,11 +1,11 @@
version: 2
updates:
- - package-ecosystem: pip
+ - package-ecosystem: pip
directory: "/install"
target-branch: "develop"
schedule:
interval: "daily"
-
+
- package-ecosystem: "github-actions"
directory: "/"
target-branch: "develop"
diff --git a/.github/workflows/CI-production-testing.yml b/.github/workflows/CI-production-testing.yml
deleted file mode 100644
index 6db4ee3a7..000000000
--- a/.github/workflows/CI-production-testing.yml
+++ /dev/null
@@ -1,208 +0,0 @@
-name: CI-production-testing
-
-on:
- pull_request:
- branches:
- - 'master'
- - '!develop'
-
-jobs:
-
- unit_and_integration_tests:
- # runs the tests on a GH VM
- runs-on: ubuntu-22.04
- # 2 hours timeout
- timeout-minutes: 7200
-
-
- steps:
- - uses: actions/checkout@v3
- with:
- ref: 'develop'
- # Fetch all history for all tags and branches
- fetch-depth: ''
-
- - name: Install slips dependencies
- run: sudo apt-get update --fix-missing && sudo apt-get -y --no-install-recommends install python3 redis-server python3-pip python3-certifi python3-dev build-essential file lsof net-tools iproute2 iptables python3-tzlocal nfdump tshark git whois golang nodejs notify-osd yara libnotify-bin
-
- - name: Install Zeek
- run: |
- sudo echo 'deb http://download.opensuse.org/repositories/security:/zeek/xUbuntu_22.04/ /' | sudo tee /etc/apt/sources.list.d/security:zeek.list
- curl -fsSL https://download.opensuse.org/repositories/security:zeek/xUbuntu_22.04/Release.key | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/security_zeek.gpg > /dev/null
- sudo apt update && sudo apt install -y --no-install-recommends --fix-missing zeek
- sudo ln -s /opt/zeek/bin/zeek /usr/local/bin/bro
-
- - name: confirm that zeek is installed
- run: |
- which bro
- bro -h
-
- - name: Set up Python 3.10.12
- uses: actions/setup-python@v2
- with:
- python-version: "3.10.12"
-
- - name: Install Python dependencies
- run: |
- python -m pip install --upgrade pip
- python3 -m pip install --no-cache-dir -r install/requirements.txt
-
- - name: Start redis server
- run: redis-server --daemonize yes
-
-
- - name: Run database unit tests
- run: |
- python3 -m pytest tests/test_database.py -p no:warnings -vv
-
- - name: main unit tests
- run: |
- python3 -m pytest tests/test_main.py -p no:warnings -vv
-
- - name: Flowalerts Unit Tests
- run: |
- python3 -m pytest tests/test_flowalerts.py -p no:warnings -vv
-
- - name: conn Unit Tests
- run: |
- python3 -m pytest tests/test_conn.py -p no:warnings -vv
-
- - name: downloaded file Unit Tests
- run: |
- python3 -m pytest tests/test_downloaded_file.py -p no:warnings -vv
-
- - name: SSL Unit Tests
- run: |
- python3 -m pytest tests/test_ssl.py -p no:warnings -vv
-
- - name: Tunnel Unit Tests
- run: |
- python3 -m pytest tests/test_tunnel.py -p no:warnings -vv
-
- - name: SSH Unit Tests
- run: |
- python3 -m pytest tests/test_ssh.py -p no:warnings -vv
-
-
- - name: dns Unit Tests
- run: |
- python3 -m pytest tests/test_dns.py -p no:warnings -vv
-
- - name: Notice Unit Tests
- run: |
- python3 -m pytest tests/test_notice.py -p no:warnings -vv
-
- - name: Software Unit Tests
- run: |
- python3 -m pytest tests/test_software.py -p no:warnings -vv
-
- - name: SMTP Unit Tests
- run: |
- python3 -m pytest tests/test_smtp.py -p no:warnings -vv
-
- - name: Whitelist Unit Tests
- run: |
- python3 -m pytest tests/test_whitelist.py -p no:warnings -vv
-
- - name: ARP Unit Tests
- run: |
- python3 -m pytest tests/test_arp.py -p no:warnings -vv
-
- - name: Blocking Unit Tests
- run: |
- python3 -m pytest tests/test_blocking.py -p no:warnings -vv
-
- - name: Flow handler Unit Tests
- run: |
- python3 -m pytest tests/test_flow_handler.py -p no:warnings -vv
-
- - name: Horizontal Portscans Unit Tests
- run: |
- python3 -m pytest tests/test_horizontal_portscans.py -p no:warnings -vv
-
- - name: HTTP Analyzer Unit Tests
- run: |
- python3 -m pytest tests/test_http_analyzer.py -p no:warnings -vv
-
- - name: Vertical Portscans Unit Tests
- run: |
- python3 -m pytest tests/test_vertical_portscans.py -p no:warnings -vv
-
- - name: Network Discovery Unit Tests
- run: |
- python3 -m pytest tests/test_network_discovery.py -p no:warnings -vv
-
- - name: Virustotal Unit Tests
- run: |
- python3 -m pytest tests/test_virustotal.py -p no:warnings -vv
-
- - name: Update Manager Unit tests
- run: |
- python3 -m pytest tests/test_update_file_manager.py -p no:warnings -vv
-
- - name: Threat Intelligence Unit tests
- run: |
- python3 -m pytest tests/test_threat_intelligence.py -p no:warnings -vv
-
- - name: Slips Utils Unit tests
- run: |
- python3 -m pytest tests/test_slips_utils.py -p no:warnings -vv
-
- - name: Slips.py Unit Tests
- run: |
- python3 -m pytest tests/test_slips.py -p no:warnings -vv
-
- - name: Profiler Unit Tests
- run: |
- python3 -m pytest tests/test_profiler.py -p no:warnings -vv
-
- - name: Leak Detector Unit Tests
- run: |
- python3 -m pytest tests/test_leak_detector.py -p no:warnings -vv
-
- - name: Ipinfo Unit Tests
- run: |
- python3 -m pytest tests/test_ip_info.py -p no:warnings -vv
-
- - name: evidence tests
- run: |
- python3 -m pytest -s tests/test_evidence.py -p no:warnings -vv
-
- - name: Urlhaus Unit Tests
- run: |
- python3 -m pytest tests/test_urlhaus.py -p no:warnings -vv
-
- - name: Input Unit Tests
- run: |
- python3 -m pytest tests/test_inputProc.py -p no:warnings -vv
-
- - name: set Evidence Unit Tests
- run: |
- python3 -m pytest tests/test_set_evidence.py -p no:warnings -vv
-
- - name: Clear redis cache
- run: ./slips.py -cc
-
-
-
- - name: Config file tests
- run: |
- python3 -m pytest -s tests/integration_tests/test_config_files.py -p no:warnings -vv
-
- - name: Portscan tests
- run: |
- python3 -m pytest -s tests/integration_tests/test_portscans.py -p no:warnings -vv
-
- - name: Dataset Integration tests
- run: |
- python3 -m pytest -s tests/integration_tests/test_dataset.py -p no:warnings -vv
-
-
- - name: Upload Artifacts
- # run this job whether the above jobs failed or passed
- if: success() || failure()
- uses: actions/upload-artifact@v3
- with:
- name: test_slips_locally-integration-tests-output
- path: |
- output/integration_tests
diff --git a/.github/workflows/CI-publishing-dependency-image.yml b/.github/workflows/CI-publishing-dependency-image.yml
deleted file mode 100644
index 0eb9223b8..000000000
--- a/.github/workflows/CI-publishing-dependency-image.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-name: CI-production-publishing-dependency-image
-
-on:
- push:
- branches:
- - 'master'
- - '!develop'
-
-jobs:
- build_and_push_dependency_image:
- runs-on: ubuntu-latest
- steps:
- # clone slips and checkout branch
- - uses: actions/checkout@v3
- with:
- ref: 'master'
-
- - name: Login to DockerHub
- uses: docker/login-action@v2
- with:
- username: stratosphereips
- password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
-
- - name: Build and push latest dependency image
- id: docker_build_dependency_image
- uses: docker/build-push-action@v2
- with:
- context: ./
- file: ./docker/dependency-image/Dockerfile
- tags: stratosphereips/slips_dependencies:latest
- push: true
diff --git a/.github/workflows/CI-publishing-p2p-image.yml b/.github/workflows/CI-publishing-p2p-image.yml
index 25a92ef87..603f7bd45 100644
--- a/.github/workflows/CI-publishing-p2p-image.yml
+++ b/.github/workflows/CI-publishing-p2p-image.yml
@@ -21,7 +21,7 @@ jobs:
# clone slips and checkout branch
# By default it checks out only one commit
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
ref: 'master'
# Fetch all history for all tags and branches
@@ -30,7 +30,7 @@ jobs:
- name: Login to DockerHub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: stratosphereips
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
@@ -38,11 +38,11 @@ jobs:
# Set up Docker Buildx with docker-container driver is required
# at the moment to be able to use a subdirectory with Git context
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
- name: Build and push p2p image using dockerfile
id: docker_build_p2p_for_slips
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v6
with:
allow: network.host
context: ./
diff --git a/.github/workflows/CI-publishing-ubuntu-image.yml b/.github/workflows/CI-publishing-ubuntu-image.yml
index a804a23e8..62f2fd8c7 100644
--- a/.github/workflows/CI-publishing-ubuntu-image.yml
+++ b/.github/workflows/CI-publishing-ubuntu-image.yml
@@ -17,7 +17,7 @@ jobs:
echo "SLIPS_VERSION=$VER" >> $GITHUB_ENV
# add release tag
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: rickstaa/action-create-tag@v1
with:
tag: ${{ env.SLIPS_VERSION }}
@@ -35,14 +35,14 @@ jobs:
# clone slips and checkout branch
# By default it checks out only one commit
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
ref: 'master'
# Fetch all history for all tags and branches
fetch-depth: ''
- name: Login to DockerHub
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: stratosphereips
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
@@ -50,7 +50,7 @@ jobs:
- name: Build and publish ubuntu image from Dockerfile
id: docker_build_slips
timeout-minutes: 15
- uses: docker/build-push-action@v5
+ uses: docker/build-push-action@v6
with:
debug: true
verbose-debug: true
@@ -58,6 +58,6 @@ jobs:
context: ./
file: ./docker/ubuntu-image/Dockerfile
tags: |
- stratosphereips/ubuntu_image:latest
- stratosphereips/ubuntu_image:${{ env.SLIPS_VERSION }}
+ stratosphereips/slips:latest
+ stratosphereips/slips:${{ env.SLIPS_VERSION }}
push: true
diff --git a/.github/workflows/CI-publishing-update-code-docs.yml b/.github/workflows/CI-publishing-update-code-docs.yml
index 55983cea6..465590ef1 100644
--- a/.github/workflows/CI-publishing-update-code-docs.yml
+++ b/.github/workflows/CI-publishing-update-code-docs.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: 'code-docs-branch'
fetch-depth: 0 # otherwise, you will fail to push refs to the dest repo
diff --git a/.github/workflows/CI-staging.yml b/.github/workflows/CI-staging.yml
deleted file mode 100644
index fa2489536..000000000
--- a/.github/workflows/CI-staging.yml
+++ /dev/null
@@ -1,366 +0,0 @@
-# This workflow will install Slips dependencies and run slips tests
-name: CI-staging
-
-on:
- push:
- branches:
- # features will be added to this branch using PRs, not need to re-run the tests on push
- - '!develop'
- - '!master'
- pull_request:
- branches:
- - 'develop'
- - '!master'
-
-jobs:
- unit_tests:
- # specify the host OS
- runs-on: ubuntu-latest
- # 2 hours timeout
- timeout-minutes: 7200
- # start a container using slips dependencies image
- container:
- image: stratosphereips/slips_dependencies:latest
-
- steps:
- - uses: actions/checkout@v2
-
- - name: Start redis server
- run: redis-server --daemonize yes
-
- - name: Run database unit tests
- run: |
- coverage run --source=./ -m pytest tests/test_database.py -p no:warnings -vv
- coverage report --include="slips_files/core/database/*"
- coverage html --include="slips_files/core/database/*" -d coverage_reports/database
-
- - name: Clear redis cache
- run: ./slips.py -cc
-
- - name: Flowalerts Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_flowalerts.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/*"
- coverage html --include="modules/flowalerts/*" -d coverage_reports/flowalerts
-
- - name: conn Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_conn.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/conn.py*"
- coverage html --include="modules/flowalerts/conn.py*" -d coverage_reports/flowalerts
-
- - name: Tunnel Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_tunnel.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/tunnel.py*"
- coverage html --include="modules/flowalerts/tunnel.py*" -d coverage_reports/flowalerts
-
-
- - name: Downloaded_file Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_downloaded_file.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/downloaded_file.py*"
- coverage html --include="modules/flowalerts/downloaded_file.py*" -d coverage_reports/flowalerts
-
- - name: ssl Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_ssl.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/ssl.py*"
- coverage html --include="modules/flowalerts/ssl.py*" -d coverage_reports/flowalerts
-
- - name: SSH Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_ssh.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/ssh.py*"
- coverage html --include="modules/flowalerts/ssh.py*" -d coverage_reports/flowalerts
-
-
-
- - name: dns Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_dns.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/dns.py*"
- coverage html --include="modules/flowalerts/dns.py*" -d coverage_reports/flowalerts
-
- - name: notice Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_notice.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/notice.py*"
- coverage html --include="modules/flowalerts/notice.py*" -d coverage_reports/flowalerts
-
- - name: Software Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_software.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/software.py*"
- coverage html --include="modules/flowalerts/software.py*" -d coverage_reports/flowalerts
-
-
- - name: Smtp Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_smtp.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/smtp.py*"
- coverage html --include="modules/flowalerts/smtp.py*" -d coverage_reports/flowalerts
- - name: main Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_main.py -p no:warnings -vv
- coverage report --include="slips/main.py*"
- coverage html --include="slips/main.py*" -d coverage_reports/main
-
- - name: Whitelist Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_whitelist.py -p no:warnings -vv
- coverage report --include="slips_files/core/helpers/whitelist/*"
- coverage html --include="slips_files/core/helpers/whitelist/*" -d coverage_reports/whitelist
-
- - name: ARP Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_arp.py -p no:warnings -vv
- coverage report --include="modules/arp/*"
- coverage html --include="modules/arp/*" -d coverage_reports/arp
-
- - name: Blocking Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_blocking.py -p no:warnings -vv
- coverage report --include="modules/blocking/*"
- coverage html --include="modules/blocking/*" -d coverage_reports/blocking
-
- - name: Flowhandler Unit Test
- run: |
- coverage run --source=./ -m pytest tests/test_flow_handler.py -p no:warnings -vv
- coverage report --include="slips_files/core/helpers/flow_handler.py*"
- coverage html --include="slips_files/core/helpers/flow_handler.py*" -d coverage_reports/flowhandler
-
- - name: Horizontal Portscans Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_horizontal_portscans.py -p no:warnings -vv
- coverage report --include="modules/network_discovery/horizontal_portscan.py*"
- coverage html --include="modules/network_discovery/horizontal_portscan.py*" -d coverage_reports/horizontal_portscan
-
- - name: HTTP Analyzer Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_http_analyzer.py -p no:warnings -vv
- coverage report --include="modules/http_analyzer/http_analyzer.py*"
- coverage html --include="modules/http_analyzer/http_analyzer.py*" -d coverage_reports/http_analyzer
-
- - name: Vertical Portscans Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_vertical_portscans.py -p no:warnings -vv
- coverage report --include="modules/network_discovery/vertical_portscan.py*"
- coverage html --include="modules/network_discovery/vertical_portscan.py*" -d coverage_reports/vertical_portscan
-
- - name: Network discovery Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_network_discovery.py -p no:warnings -vv
- coverage report --include="modules/network_discovery/network_discovery.py*"
- coverage html --include="modules/network_discovery/network_discovery.py*" -d coverage_reports/network_discovery
-
- - name: evidence Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_evidence.py -p no:warnings -vv
- coverage report --include="slips_files/core/evidence_structure/evidence.py*"
- coverage html --include="slips_files/core/evidence_structure/evidence.py*" -d coverage_reports/evidence_structure
-
- - name: Virustotal Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_virustotal.py -p no:warnings -vv
- coverage report --include="modules/virustotal/virustotal.py*"
- coverage html --include="modules/virustotal/virustotal.py*" -d coverage_reports/virustotal
-
- - name: Update Manager Unit tests
- run: |
- coverage run --source=./ -m pytest tests/test_update_file_manager.py -p no:warnings -vv
- coverage report --include="modules/update_manager/update_manager.py*"
- coverage html --include="modules/update_manager/update_manager.py*" -d coverage_reports/updatemanager
-
- - name: Threat Intelligence Unit tests
- run: |
- coverage run --source=./ -m pytest tests/test_threat_intelligence.py -p no:warnings -vv
- coverage report --include="modules/threat_intelligence/threat_intelligence.py*"
- coverage html --include="modules/threat_intelligence/threat_intelligence.py*" -d coverage_reports/threat_intelligence
-
- - name: Slips Utils Unit tests
- run: |
- coverage run --source=./ -m pytest tests/test_slips_utils.py -p no:warnings -vv
- coverage report --include="slips_files/common/slips_utils.py*"
- coverage html --include="slips_files/common/slips_utils.py*" -d coverage_reports/slips_utils
-
- - name: Slips.py Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_slips.py -p no:warnings -vv
- coverage report --include="slips.py*"
- coverage html --include="slips.py*" -d coverage_reports/slips
-
- - name: Profiler Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_profiler.py -p no:warnings -vv
- coverage report --include="slips_files/core/profiler.py*"
- coverage html --include="slips_files/core/profiler.py*" -d coverage_reports/profiler
-
- - name: Leak Detector Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_leak_detector.py -p no:warnings -vv
- coverage report --include="modules/leak_detector/leak_detector.py*"
- coverage html --include="modules/leak_detector/leak_detector.py*" -d coverage_reports/leak_detector
-
- - name: Ipinfo Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_ip_info.py -p no:warnings -vv
- coverage report --include="modules/ip_info/ip_info.py*"
- coverage html --include="modules/ip_info/ip_info.py*" -d coverage_reports/ip_info
-
- - name: Input Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_inputProc.py -p no:warnings -vv
- coverage report --include="slips_files/core/input.py*"
- coverage html --include="slips_files/core/input.py*" -d coverage_reports/input
-
- - name: urlhaus Unit Tests
- run: |
- coverage run --source=./ -m pytest tests/test_urlhaus.py -p no:warnings -vv
- coverage report --include="modules/threat_intelligence/urlhaus.py*"
- coverage html --include="modules/threat_intelligence/urlhaus.py*" -d coverage_reports/urlhaus
-
- - name: set evidence test
- run: |
- coverage run --source=./ -m pytest tests/test_set_evidence.py -p no:warnings -vv
- coverage report --include="modules/flowalerts/set_evidence.py*"
- coverage html --include="modules/flowalerts/set_evidence.py*" -d coverage_reports/set_evidence
-
-
-
- dataset_integration_tests:
- # specify the host OS
- runs-on: ubuntu-latest
- # 1h timeout
- timeout-minutes: 3600
- # start a container using slips dependencies image
- container:
- image: stratosphereips/slips_dependencies:latest
- steps:
- - uses: actions/checkout@v2
-
- - name: Start redis server
- run: redis-server --daemonize yes
-
- - name: Dataset Integration Tests
- run: |
- python3 -m pytest -s tests/integration_tests/test_dataset.py -p no:warnings -vv
-
- - name: Upload Artifacts
- if: failure()
- uses: actions/upload-artifact@v3
- with:
- name: dataset_integration_tests_output
- path: |
- output/integration_tests
-
- zeek_integration_tests:
- # specify the host OS
- runs-on: ubuntu-latest
- # 1h timeout
- timeout-minutes: 3600
- # start a container using slips dependencies image
- container:
- image: stratosphereips/slips_dependencies:latest
- steps:
- - uses: actions/checkout@v2
-
- - name: Start redis server
- run: redis-server --daemonize yes
-
- - name: Dataset Integration Tests
- run: |
- python3 -m pytest -s tests/integration_tests/test_pcap_dataset.py -p no:warnings -vv
- - name: Upload Artifacts
- # run this job whether the above jobs failed or passed
- if: success() || failure()
- uses: actions/upload-artifact@v3
- with:
- name: zeek_integration_tests_output
- path: |
- output/integration_tests
-
- pcap_integration_tests:
- # specify the host OS
- runs-on: ubuntu-latest
- # 1h timeout
- timeout-minutes: 3600
- # start a container using slips dependencies image
- container:
- image: stratosphereips/slips_dependencies:latest
- steps:
- - uses: actions/checkout@v2
-
- - name: Start redis server
- run: redis-server --daemonize yes
-
- - name: Dataset Integration Tests
- run: |
- python3 -m pytest -s tests/integration_tests/test_zeek_dataset.py -p no:warnings -vv
- - name: Upload Artifact
- # run this job whether the above jobs failed or passed
- if: success() || failure()
- uses: actions/upload-artifact@v3
- with:
- name: pcap_integration_tests_output
- path: |
- output/integration_tests
- coverage_reports/
-
-
- port_scans_integration_tests:
- # specify the host OS
- runs-on: ubuntu-latest
- # 1h timeout
- timeout-minutes: 3600
- # start a container using slips dependencies image
- container:
- image: stratosphereips/slips_dependencies:latest
- steps:
- - uses: actions/checkout@v2
-
- - name: Start redis server
- run: redis-server --daemonize yes
-
- - name: Dataset Integration Tests
- # python3 -m pytest -s tests/integration_tests/test_portscans.py -p no:warnings -vv
- run: |
- coverage run --source=./ -m pytest -s tests/integration_tests/test_portscans.py -p no:warnings -vv
- coverage report --include="modules/network_discovery/*"
- coverage html --include="modules/network_discovery/*" -d coverage_reports/network_discovery
-
- - name: Upload Artifacts
- # run this job whether the above jobs failed or passed
- if: success() || failure()
- uses: actions/upload-artifact@v3
- with:
- name: port_scans_integration_tests_output
- path: |
- output/integration_tests
-
- config_files_integration_tests:
- # specify the host OS
- runs-on: ubuntu-latest
- # 1h timeout
- timeout-minutes: 3600
- # start a container using slips dependencies image
- container:
- image: stratosphereips/slips_dependencies:latest
- steps:
- - uses: actions/checkout@v2
-
- - name: Start redis server
- run: redis-server --daemonize yes
-
- - name: Config File Integration Tests
- run: |
- python3 -m pytest -s tests/integration_tests/test_config_files.py -p no:warnings -vv
-
- - name: Upload Artifacts
- # run this job whether the above jobs failed or passed
- if: success() || failure()
- uses: actions/upload-artifact@v3
- with:
- name: config_files_integration_tests_output
- path: |
- output/integration_tests
diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
new file mode 100644
index 000000000..b5705f400
--- /dev/null
+++ b/.github/workflows/integration-tests.yml
@@ -0,0 +1,60 @@
+name: integration-tests
+
+on:
+ pull_request:
+ branches:
+ - 'master'
+ - 'develop'
+
+jobs:
+ tests:
+ runs-on: ubuntu-22.04
+ timeout-minutes: 7200
+
+ strategy:
+ matrix:
+ test_file:
+ - tests/integration_tests/test_config_files.py
+ - tests/integration_tests/test_portscans.py
+ - tests/integration_tests/test_dataset.py
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ github.ref }}
+ fetch-depth: ''
+
+ - name: Install slips dependencies
+ run: sudo apt-get update --fix-missing && sudo apt-get -y --no-install-recommends install python3 redis-server python3-pip python3-certifi python3-dev build-essential file lsof net-tools iproute2 iptables python3-tzlocal nfdump tshark git whois golang nodejs notify-osd yara libnotify-bin
+
+ - name: Install Zeek
+ run: |
+ sudo echo 'deb http://download.opensuse.org/repositories/security:/zeek/xUbuntu_22.04/ /' | sudo tee /etc/apt/sources.list.d/security:zeek.list
+ curl -fsSL https://download.opensuse.org/repositories/security:zeek/xUbuntu_22.04/Release.key | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/security_zeek.gpg > /dev/null
+ sudo apt update && sudo apt install -y --no-install-recommends --fix-missing zeek
+ sudo ln -s /opt/zeek/bin/zeek /usr/local/bin/bro
+
+ - name: Set up Python 3.10.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10.12"
+
+ - name: Install Python dependencies
+ run: |
+ python -m pip install --upgrade pip
+ python3 -m pip install --no-cache-dir -r install/requirements.txt
+ python3 -m pip install pytest-timeout
+
+ - name: Start redis server
+ run: redis-server --daemonize yes
+
+ - name: Run Integration Tests for ${{ matrix.test_file }}
+ run: python3 -m pytest ${{ matrix.test_file }} -vvv -s
+
+ - name: Upload Artifacts
+ if: success() || failure()
+ uses: actions/upload-artifact@v3
+ with:
+ name: test_slips_locally-integration-tests-output
+ path: |
+ output/integration_tests
diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml
new file mode 100644
index 000000000..554177207
--- /dev/null
+++ b/.github/workflows/unit-tests.yml
@@ -0,0 +1,105 @@
+name: unit-tests
+
+on:
+ pull_request:
+ branches:
+ - 'master'
+ - 'develop'
+
+jobs:
+ tests:
+ runs-on: ubuntu-22.04
+ timeout-minutes: 1800
+
+ strategy:
+ matrix:
+ test_file:
+ - tests/test_inputProc.py
+ - tests/test_main.py
+ - tests/test_conn.py
+ - tests/test_downloaded_file.py
+ - tests/test_ssl.py
+ - tests/test_tunnel.py
+ - tests/test_ssh.py
+ - tests/test_dns.py
+ - tests/test_notice.py
+ - tests/test_software.py
+ - tests/test_smtp.py
+ - tests/test_whitelist.py
+ - tests/test_arp.py
+ - tests/test_blocking.py
+ - tests/test_flow_handler.py
+ - tests/test_horizontal_portscans.py
+ - tests/test_http_analyzer.py
+ - tests/test_vertical_portscans.py
+ - tests/test_network_discovery.py
+ - tests/test_virustotal.py
+ - tests/test_update_file_manager.py
+ - tests/test_threat_intelligence.py
+ - tests/test_slips_utils.py
+ - tests/test_slips.py
+ - tests/test_profiler.py
+ - tests/test_leak_detector.py
+ - tests/test_ip_info.py
+ - tests/test_evidence.py
+ - tests/test_asn_info.py
+ - tests/test_urlhaus.py
+ - tests/test_markov_chain.py
+ - tests/test_progress_bar.py
+ - tests/test_daemon.py
+ - tests/test_go_director.py
+ - tests/test_notify.py
+ - tests/test_checker.py
+ - tests/test_base_model.py
+ - tests/test_set_evidence.py
+ - tests/test_trustdb.py
+ - tests/test_cesnet.py
+ - tests/test_output.py
+
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ github.ref }}
+ fetch-depth: ''
+
+ - name: Enable memory overcommit (for redis)
+ run: sysctl vm.overcommit_memory=1
+
+ - name: Install slips dependencies
+ run: sudo apt-get update --fix-missing && sudo apt-get -y --no-install-recommends install python3 redis-server python3-pip python3-certifi python3-dev build-essential file lsof net-tools iproute2 iptables python3-tzlocal nfdump tshark git whois golang nodejs notify-osd yara libnotify-bin
+
+ - name: Install Zeek
+ run: |
+ sudo echo 'deb http://download.opensuse.org/repositories/security:/zeek/xUbuntu_22.04/ /' | sudo tee /etc/apt/sources.list.d/security:zeek.list
+ curl -fsSL https://download.opensuse.org/repositories/security:zeek/xUbuntu_22.04/Release.key | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/security_zeek.gpg > /dev/null
+ sudo apt update && sudo apt install -y --no-install-recommends --fix-missing zeek
+ sudo ln -s /opt/zeek/bin/zeek /usr/local/bin/bro
+
+ - name: Set up Python 3.10.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10.12"
+
+ - name: Install Python dependencies
+ run: |
+ python -m pip install --upgrade pip
+ python3 -m pip install --no-cache-dir -r install/requirements.txt
+
+ - name: Start redis server
+ run: redis-server --daemonize yes
+
+ - name: Run Database Unit Tests
+ run: |
+ python3 -m pytest tests/test_database.py -p no:warnings -vv
+
+ - name: Run Unit Tests for ${{ matrix.test_file }}
+ run: |
+ python3 -m pytest ${{ matrix.test_file }} -p no:warnings -vv -s -n 5
+
+ - name: Upload Artifacts
+ if: success() || failure()
+ uses: actions/upload-artifact@v3
+ with:
+ name: test_slips_locally-integration-tests-output
+ path: |
+ output/integration_tests
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a7f8edab0..e6290bd7c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,13 @@
+- 1.1.1 (September 4th, 2024)
+- Better unit tests. thanks to @Sekhar-Kumar-Dash.
+- Fix Zeek warning caused by one of the loaded zeek scripts.
+- Fix Slips installation scripts at install/install.sh
+- Improve how Slips validates domains taken from TI feeds.
+- Improve whitelists.
+- Fix the issue of flowalerts module not analyzing all given conn.log flows.
+- Update python dependencies.
+- Better handling of problems connecting to Redis database.
+
- 1.1 (July 2024)
- Update Python version to 3.10.12 and all python libraries used by Slips.
- Update nodejs and zeek.
diff --git a/README.md b/README.md
index dc30bd74e..63c25eeef 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-Slips v1.1
+Slips v1.1.1
diff --git a/VERSION b/VERSION
index 9459d4ba2..524cb5524 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-1.1
+1.1.1
diff --git a/config/slips.yaml b/config/slips.yaml
index 4469a072d..3fba2ef32 100644
--- a/config/slips.yaml
+++ b/config/slips.yaml
@@ -36,7 +36,6 @@ parameters:
# Export the strato letters used for detecting C&C by the RNN model
# to the strato_letters.tsv in the current output directory.
# these letters are used for re-training the model.
- # options are yes or no
export_strato_letters: False
# This option determines whether to analyze only what goes OUT of the local network or also what is coming IN the local network
@@ -56,19 +55,19 @@ parameters:
analysis_direction : out
- # Delete zeek log files after stopping slips. Only yes or no
+ # Delete zeek log files after stopping slips.
delete_zeek_files : False
- # Store a copy of zeek files in the output dir after the analysis is done. Only yes or no
+ # Store a copy of zeek files in the output dir after the analysis is done.
# shouldn't be set to yes if delete_zeek_files is set to yes, because if the zeek files
# are deleted after slips is done, there's no way to store a copy of them anywhere
store_a_copy_of_zeek_files : False
- # store the generated zeek files in the output dir while the slips is running. Only yes or no
+ # store the generated zeek files in the output dir while the slips is running.
store_zeek_files_in_the_output_dir : True
# Create a metadata dir output/metadata/ that has a copy of slips.yaml, whitelist file,
- # current commit and date available options are yes or no
+ # current commit and date
metadata_dir : True
# Default pcap packet filter. Used with zeek
@@ -164,7 +163,7 @@ detection:
evidence_detection_threshold : 3.46
- # Slips can show a popup/notification with every alert. Only yes or no
+ # Slips can show a popup/notification with every alert.
popup_alerts : False
#############################
@@ -333,8 +332,6 @@ exporting_alerts:
CESNET:
# Slips supports exporting and importing evidence in the IDEA format to/from warden servers.
-
- # supported values are yes or no
send_alerts : False
receive_alerts : False
@@ -423,8 +420,6 @@ web_interface:
#############################
P2P:
- # create p2p.log with additional info about peer communications? yes or no
+ # create p2p.log with additional info about peer communications?
create_p2p_logfile : False
-
- # use_p2p : yes
use_p2p : False
diff --git a/dataset/test18-malicious-ctu-sme-11-win/README.md b/dataset/test18-malicious-ctu-sme-11-win/README.md
new file mode 100644
index 000000000..17d78a6c5
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/README.md
@@ -0,0 +1,6 @@
+# Description
+This capture is a short part of the Dataset [CTU-SME-11](https://zenodo.org/records/7958259), capture Experiment-VM-Microsoft-Windows7full-3, day 2023-02-22. It consist of only the first 5000 packets.
+
+# Labels
+The labels were assigned by an expert by hand. The configuration file is `labels.config` and it was labeled using the tool [netflowlabeler](https://github.com/stratosphereips/netflowlabeler).
+
diff --git a/dataset/test18-malicious-ctu-sme-11-win/labels.config b/dataset/test18-malicious-ctu-sme-11-win/labels.config
new file mode 100644
index 000000000..b3b564bc1
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/labels.config
@@ -0,0 +1,183 @@
+Unknown:
+ - srcIP=all
+Malicious, From_malicious-To_malicious-Malware_C2-Ingress_tool_access-RemcosRAT-HTTP:
+ - dstIp=178.237.33.50
+Malicious, From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT:
+ - dstIp=66.63.168.35
+Benign, From_benign-To_benign:
+ - srcIp=192.168.1.1
+ - srcIp=104.244.43.131
+ - srcIp=104.244.42.69
+ - srcIp=146.75.120.159
+ - srcIp=77.88.55.55
+ - srcIp=5.255.255.88
+ - srcIp=87.250.251.15
+ - srcIp=239.255.255.250
+ - srcIp=192.168.1.132
+ - srcIp=224.0.0.252
+ - srcIp=192.168.1.255
+ - srcIp=192.168.1.135
+ - srcIp=255.255.255.255
+ - srcIp=192.168.1.25
+ - dstIp=192.168.1.0/24
+Benign, From_benign-To_benign-Amazon:
+Benign, From_benign-To_benign-Cloudfront:
+Benign, From_benign-To_benign-Application_spotify:
+Benign, From_benign-To_benign-Microsoft:
+Benign, From_benign-To_benign-Google:
+ - dstIp=34.104.35.123
+Benign, From_benign-To_benign-Cloudflare:
+ - dstIp=162.159.136.232
+ - dstIp=162.159.128.233
+ - dstIp=162.159.138.232
+ - dstIp=162.159.135.232
+ - dstIp=162.159.134.233
+ - dstIp=162.159.137.232
+ - dstIp=162.159.130.233
+Benign, From_benign-To_benign-Uptimerobot:
+Benign, From_benign-To_benign-Apple:
+Benign, From_benign-To_benign-Akamai:
+Benign, From_benign-To_benign-1e100:
+ - dstIp=142.251.36.131
+ - dstIp=142.251.37.106
+ - dstIp=142.251.37.110
+ - dstIp=142.251.36.99
+ - dstIp=142.251.37.99
+ - dstIp=142.251.36.106
+ - dstIp=142.251.36.138
+ - dstIp=142.251.37.100
+ - dstIp=142.251.36.97
+ - dstIp=142.251.36.98
+ - dstIp=173.194.79.102
+ - dstIp=142.251.36.110
+ - dstIp=142.251.36.118
+ - dstIp=142.251.36.100
+ - dstIp=142.251.36.142
+ - dstIp=142.251.36.150
+ - dstIp=142.251.37.97
+ - dstIp=142.250.102.188
+Benign, From_benign-To_benign-Application_twitter:
+ - dstIp=104.244.42.1
+ - dstIp=152.199.21.141
+ - dstIp=152.199.21.140
+ - dstIp=104.244.42.67
+ - dstIp=104.244.42.66
+ - dstIp=104.244.42.130
+ - dstIp=104.244.42.2
+Benign, From_benign-To_benign-Application_tiktok:
+Benign, From_benign-To_benign-Application_instagram:
+ - dstIp=157.240.30.174
+ - dstIp=157.240.30.63
+Benign, From_benign-To_benign-Facebook:
+ - dstIp=157.240.30.35
+ - dstIp=157.240.30.3
+ - dstIp=157.240.221.16
+ - dstIp=157.240.30.18
+Benign, From_benign-To_benign-Facebook_CDN:
+ - dstIp=157.240.30.27
+ - dstIp=102.132.101.10
+ - dstIp=31.13.84.4
+ - dstIp=90.244.154.209
+ - dstIp=195.13.189.81
+Benign, From_benign-To_benign-Digicert:
+Benign, From_benign-To_benign-Akaquill:
+Benign, From_benign-To_benign-Applimg:
+Benign, From_benign-To_benign-mail.me:
+Benign, From_benign-To_benign-Snssdk:
+Benign, From_benign-To_benign-Seznam.cz:
+Benign, From_benign-To_benign-Alza.cz:
+Benign, From_benign-To_benign-Centrum.cz:
+Benign, From_benign-To_benign-imedia.cz:
+Benign, From_benign-To_benign-ihned.cz:
+Benign, From_benign-To_benign-cpex.cz:
+Benign, From_benign-To_benign-Vine.co:
+ - dstIp=192.229.220.133
+Benign, From_benign-To_benign-Application_discord:
+ - dstIp=162.159.136.234
+ - dstIp=162.159.133.234
+ - dstIp=162.159.135.234
+ - dstIp=162.159.134.234
+ - dstIp=162.159.130.234
+Benign, From_benign-To_benign-Linkedin:
+Benign, From_benign-To_benign-Vimeo:
+Benign, From_benign-To_benign-Github:
+Benign, From_benign-To_benign-Application_twitch:
+Benign, From_benign-To_benign-Mapy.cz:
+Benign, From_benign-To_benign-Live-video:
+Benign, From_benign-To_benign-Justin.tv:
+Benign, From_benign-To_benign-Yahoo:
+Benign, From_benign-To_benign-Application_Chess.com:
+Benign, From_benign-To_benign-Ubuntu:
+Benign, From_benign-To_benign-Canonical:
+Benign, From_benign-To_benign-Speedtest:
+Benign, From_benign-To_benign-Application_telegram:
+Benign, From_benign-To_benign-Hotmail:
+Benign, From_benign-To_benign-Application_dropbox:
+Benign, From_benign-To_benign-Reddit:
+ - dstIp=151.101.193.140
+ - dstIp=151.101.1.140
+ - dstIp=151.101.65.140
+Benign, From_benign-To_benign-Application_jitsi:
+Benign, From_benign-To
+Benign, From_benign-To_benign-Apple:
+ - dstIP=17.0.0.0/8
+ - srcIP=17.0.0.0/8
+Benign, From_benign-To_benign-Google:
+ - srcIP=34.158.0.0/22
+ - dstIP=34.158.0.0/22
+ - srcIP=216.239.0.0/16
+ - dstIP=216.239.0.0/16
+ - dstIP=74.125.133.188
+Benign, From_benign-To_benign-Application_twitter:
+ - srcIP=209.237.201.128
+ - dstIP=209.237.201.128
+ - dstIP=34.158.0.0/22
+ - srcIP=34.158.0.0/22
+ - srcIP=216.239.34.36
+ - dstIP=216.239.34.36
+Benign, From_benign-To_benign-Fastly:
+ - dstIP=146.75.0.0/16
+ - dstIP=151.101.0.0/16
+ - dstIP=199.232.38.248
+Benign, From_benign-To_benign-Azure:
+ - dstIP=13.107.237.44
+ - srcIP=13.107.237.44
+Benign, From_benign-To_benign-Cloudflare:
+ - dstIP=1.0.0.1
+ - dstIP=104.18.0.0/16
+ - dstIP=104.26.0.0/16
+ - dstIP=104.22.0.0/16
+ - dstIP=172.67.8.174
+Benign, From_benign-To_benign-Device_CVUT:
+ - dstIP=147.32.0.0/16
+Benign, From_benign-To_benign-Application_discord:
+ - dstIP=66.22.0.0/16
+Benign, From_benign-To_benign-Inmobi:
+ - dstIP=20.157.27.0/24
+ - dstIP=20.157.127.0/24
+Benign, From_benign-To_benign-Application_vodafone:
+ - dstIP=85.205.100.141
+Benign, From_benign-To_benign-Seznam.cz:
+ - dstIP=77.75.0.0/16
+Benign, From_benign-To_benign-AFP:
+ - dstIP=10.0.0.0/16 & dstPort=7000
+Benign, From_benign-To_benign-Device_edimax_camera:
+ - dstIP=192.168.1.129
+Benign, From_benign-To_benign-DNS:
+ - dstIP=1.1.1.1
+ - dstIP=9.9.9.9
+Benign, From_benign-To_benign-DHCP:
+ - dstIP=192.168.1.1 & Proto=udp
+ - dstIP=255.255.255.255 & Proto=udp & dstPort=67
+Benign, From_benign-To_benign-ICMP:
+ - dstIP=192.168.1.1 & Proto=icmp
+ - srcIP=192.168.1.1 & Proto=icmp
+Benign, From_benign-To_benign-NTP:
+ - dstPort=123 & Proto=udp
+Benign, From_benign-To_benign-SSDP:
+ - dstIP=239.255.255.250
+Benign, From_benign-To_benign-Device_chromecast_tv_assistant:
+ - srcIP=192.168.1.135
+ - dstIP=192.168.1.135
+Benign, From_benign-To_benign-Multicast:
+ - dstIP=224.0.0.0/4
\ No newline at end of file
diff --git a/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/conn.log.labeled b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/conn.log.labeled
new file mode 100644
index 000000000..65ef6c3bd
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/conn.log.labeled
@@ -0,0 +1,774 @@
+#separator \x09
+#set_separator ,
+#empty_field (empty)
+#unset_field -
+#path conn
+#open 2024-08-16-09-45-01
+#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto service duration orig_bytes resp_bytes conn_state local_orig local_resp missed_bytes history orig_pkts orig_ip_bytes resp_pkts resp_ip_bytes tunnel_parents label detailedlabel
+#types time string addr port addr port enum string interval count count string bool bool count string count count count count set[string] string string
+1677024003.714845 C6SgKom3WB2KEL2ae 192.168.1.107 65164 66.63.168.35 5888 tcp - 0.037629 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024004.254641 CgSyI22HD7wdsSfMG4 192.168.1.107 65164 66.63.168.35 5888 tcp - 0.037353 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024005.833572 C4bODh3GKRAdFktnj 192.168.1.107 65165 66.63.168.35 5888 tcp - 0.016959 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024006.354504 CHlThCypsohI7osuh 192.168.1.107 65165 66.63.168.35 5888 tcp - 0.017011 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024007.890831 CM0PTi1kU1tOjWMJU2 192.168.1.107 65166 66.63.168.35 5888 tcp - 0.031642 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024008.423060 Cfl9zga2ugDcgnr87 192.168.1.107 65166 66.63.168.35 5888 tcp - 0.031811 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024004.792335 Cn6IKS3DvmYWE6nR4k 192.168.1.107 65164 66.63.168.35 5888 tcp - 0.037429 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024009.987787 CN1XUZ2BgyfFbrcp15 192.168.1.107 65167 66.63.168.35 5888 tcp - 0.030531 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024010.528923 C79YY8SxVgTBcE6vf 192.168.1.107 65167 66.63.168.35 5888 tcp - 0.030680 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024006.873277 CNtWcvhaQ9jsKAeH5 192.168.1.107 65165 66.63.168.35 5888 tcp - 0.017158 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024012.093436 ClglEnaIRAV2z4iGl 192.168.1.107 65168 66.63.168.35 5888 tcp - 0.033140 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024012.625123 CqC8mM2kbiORLSY098 192.168.1.107 65168 66.63.168.35 5888 tcp - 0.033320 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024008.950097 CeSdDu44REohRnhm9b 192.168.1.107 65166 66.63.168.35 5888 tcp - 0.031794 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024014.201445 CenQyR31vBBVqb03Xj 192.168.1.107 65169 66.63.168.35 5888 tcp - 0.017168 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024014.722198 CFgoW324PoNsl0Mwrg 192.168.1.107 65169 66.63.168.35 5888 tcp - 0.017067 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024011.061345 CuiDvf2DCFVz90BGa9 192.168.1.107 65167 66.63.168.35 5888 tcp - 0.030510 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024016.263723 C1Lj3WjheHwvkxw42 192.168.1.107 65170 66.63.168.35 5888 tcp - 0.025380 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024016.795332 CgFdOS25RAygPHzDJ6 192.168.1.107 65170 66.63.168.35 5888 tcp - 0.025474 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024013.167080 COjWGLKxo76vgKHod 192.168.1.107 65168 66.63.168.35 5888 tcp - 0.033305 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024018.352264 Ci9X2p1wtxfH95aRU9 192.168.1.107 65171 66.63.168.35 5888 tcp - 0.027980 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024018.884359 CbbdOC4cyWVgbFlCR 192.168.1.107 65171 66.63.168.35 5888 tcp - 0.027964 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024015.243940 Cmq4xn3hFXOE8RI9k1 192.168.1.107 65169 66.63.168.35 5888 tcp - 0.017013 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024020.446177 CUi0k12CDPJYdqTLol 192.168.1.107 65172 66.63.168.35 5888 tcp - 0.028066 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024020.976547 CQvCxv3bX0gd8XXgR3 192.168.1.107 65172 66.63.168.35 5888 tcp - 0.027997 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024017.325640 COjdUU39MDYr54oFt8 192.168.1.107 65170 66.63.168.35 5888 tcp - 0.025359 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024022.535062 CeKwJD1h3VujHTxCL8 192.168.1.107 65173 66.63.168.35 5888 tcp - 0.016854 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024023.054998 CsjRSg4SnvEYIzrnr2 192.168.1.107 65173 66.63.168.35 5888 tcp - 0.016872 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024019.416619 CISGBQbcsvC3iXtOc 192.168.1.107 65171 66.63.168.35 5888 tcp - 0.027984 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024024.595673 CFQ2L911oHmKRIh9fd 192.168.1.107 65174 66.63.168.35 5888 tcp - 0.028454 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024025.127693 CYDhno2FMXJXLdwaA2 192.168.1.107 65174 66.63.168.35 5888 tcp - 0.028601 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024021.506374 CD2K2aVWwqQviYSd7 192.168.1.107 65172 66.63.168.35 5888 tcp - 0.027964 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024026.693006 CUH8cG2tSgSoq5CYK9 192.168.1.107 65175 66.63.168.35 5888 tcp - 0.025382 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024027.224563 C0olP54O2CTUnC3Kli 192.168.1.107 65175 66.63.168.35 5888 tcp - 0.025411 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024023.576775 CZXnvd5nRwcvtwWTb 192.168.1.107 65173 66.63.168.35 5888 tcp - 0.016898 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024028.781156 Co6HfT13deG0mGRED9 192.168.1.107 65176 66.63.168.35 5888 tcp - 0.032981 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024029.316238 CEbDiv4C3Abfc3LwFe 192.168.1.107 65176 66.63.168.35 5888 tcp - 0.033058 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024025.659176 C6iLt02pjoiSrmYZI1 192.168.1.107 65174 66.63.168.35 5888 tcp - 0.028272 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024030.899523 CxCaNG2nfNUY4toiA7 192.168.1.107 65177 66.63.168.35 5888 tcp - 0.031687 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024031.431484 Chq4j61oSd5ov9EXyb 192.168.1.107 65177 66.63.168.35 5888 tcp - 0.031809 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024027.754216 Cmy4Uj3DMimPoaQPn9 192.168.1.107 65175 66.63.168.35 5888 tcp - 0.025610 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024032.995393 CWPiTP3Eym3iGw8o2k 192.168.1.107 65178 66.63.168.35 5888 tcp - 0.016794 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024033.518045 CqWqN32rmErZCCYY63 192.168.1.107 65178 66.63.168.35 5888 tcp - 0.016888 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024029.859972 Cx5ByC2fAWRl2OwrK3 192.168.1.107 65176 66.63.168.35 5888 tcp - 0.033064 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024035.049227 CseL1s1iJ2BAFzjSk6 192.168.1.107 65179 66.63.168.35 5888 tcp - 0.031715 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024035.580396 CcM1xCTjAU0b43igl 192.168.1.107 65179 66.63.168.35 5888 tcp - 0.031602 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024031.962247 CYtlZj1kr7s5gqUY3e 192.168.1.107 65177 66.63.168.35 5888 tcp - 0.031605 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024037.147352 CkujM41NrSjNcd8wI7 192.168.1.107 65180 66.63.168.35 5888 tcp - 0.037589 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024037.685696 Cr8rZ7t4CFpLB0od3 192.168.1.107 65180 66.63.168.35 5888 tcp - 0.037318 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024034.031277 C1f1x61crY85SOvSPc 192.168.1.107 65178 66.63.168.35 5888 tcp - 0.016810 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024039.263441 CVexYpEcJFVnrRbjc 192.168.1.107 65181 66.63.168.35 5888 tcp - 0.028094 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024039.793845 CGGHJJPOO4tRRj1D3 192.168.1.107 65181 66.63.168.35 5888 tcp - 0.028133 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024036.113589 CuT1wX3AMTA6FK9P59 192.168.1.107 65179 66.63.168.35 5888 tcp - 0.031867 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024041.354791 C8MJdp17QieBB1yW8 192.168.1.107 65182 66.63.168.35 5888 tcp - 0.028492 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024041.888063 CkYO3P1t0KNYBUlg4b 192.168.1.107 65182 66.63.168.35 5888 tcp - 0.028443 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024038.224226 CZJ2IUvE3Db4kvJ43 192.168.1.107 65180 66.63.168.35 5888 tcp - 0.037271 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024043.441438 CcgSy42hLlECo0kufk 192.168.1.107 65183 66.63.168.35 5888 tcp - 0.028592 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024043.972264 CxJtd32gHl400g8mNi 192.168.1.107 65183 66.63.168.35 5888 tcp - 0.028357 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024040.322639 Cl87Zxagav7IzXFil 192.168.1.107 65181 66.63.168.35 5888 tcp - 0.028339 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024045.626036 CQT90IvCGCssOlw1 192.168.1.107 65184 66.63.168.35 5888 tcp - 0.037775 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024046.167540 CZsfqB1kVHqtJE81U2 192.168.1.107 65184 66.63.168.35 5888 tcp - 0.037719 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024042.412029 Ctt7yz1DZppFwLjaX7 192.168.1.107 65182 66.63.168.35 5888 tcp - 0.028449 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024047.737791 C2zlM6xPcD4JNUHQg 192.168.1.107 65185 66.63.168.35 5888 tcp - 0.038355 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024048.273635 ChCaoxDm6mqHgmcqh 192.168.1.107 65185 66.63.168.35 5888 tcp - 0.038388 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024044.503123 CVhShJErlEYitbU0d 192.168.1.107 65183 66.63.168.35 5888 tcp - 0.028416 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024049.860952 CkBLpy4iS6I9V8gGR4 192.168.1.107 65186 66.63.168.35 5888 tcp - 0.017033 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024050.382396 CRIb4p4NmcvU9KPsyh 192.168.1.107 65186 66.63.168.35 5888 tcp - 0.016976 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024046.699038 CDE5EiNqzrOpNQFJh 192.168.1.107 65184 66.63.168.35 5888 tcp - 0.037744 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024051.923768 C0hUeY3Mirk2ENKh95 192.168.1.107 65187 66.63.168.35 5888 tcp - 0.028276 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024052.455682 CQpkXMUZA8Nsd92j 192.168.1.107 65187 66.63.168.35 5888 tcp - 0.028320 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024048.815980 C6Bpyu3qok0Ew4V713 192.168.1.107 65185 66.63.168.35 5888 tcp - 0.038358 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024054.015757 CdFH2n1ZOcDLjDMxxf 192.168.1.107 65188 66.63.168.35 5888 tcp - 0.017032 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024054.539866 CWfmxV2ZdhseoSngcd 192.168.1.107 65188 66.63.168.35 5888 tcp - 0.017077 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024050.904368 CBeX4d3DcE0tRDBVrh 192.168.1.107 65186 66.63.168.35 5888 tcp - 0.016981 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024056.077920 CLNBhr3EUD2ZxgRUT6 192.168.1.107 65189 66.63.168.35 5888 tcp - 0.017032 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024056.603481 CZc1KYjvx8qlsEqt1 192.168.1.107 65189 66.63.168.35 5888 tcp - 0.017122 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024052.986560 CtjiHq19KuCkGiumeg 192.168.1.107 65187 66.63.168.35 5888 tcp - 0.028490 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024058.152990 CGRdMltBjfTmajked 192.168.1.107 65190 66.63.168.35 5888 tcp - 0.033316 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024058.687672 CH2dClDxBe5XbAXMg 192.168.1.107 65190 66.63.168.35 5888 tcp - 0.033371 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024055.059853 Cabgal3OGuJjYE9s23 192.168.1.107 65188 66.63.168.35 5888 tcp - 0.017142 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024060.258551 C8gqp711G99oAB5cZ9 192.168.1.107 65191 66.63.168.35 5888 tcp - 0.028481 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024060.779871 C0DL2T1FzHkTenzJQe 192.168.1.107 65191 66.63.168.35 5888 tcp - 0.028299 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024057.129970 CBXopo4PHC2NsU1zp1 192.168.1.107 65189 66.63.168.35 5888 tcp - 0.016987 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024062.350938 CLKqAqtv7usa6yPb2 192.168.1.107 65192 66.63.168.35 5888 tcp - 0.038126 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024062.893193 CqpFOE3EUUr3beqLxe 192.168.1.107 65192 66.63.168.35 5888 tcp - 0.038067 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024059.225351 Clrsy01zgdudM3prv8 192.168.1.107 65190 66.63.168.35 5888 tcp - 0.033227 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024064.474119 Cij8cOJ1hJmWkvX1h 192.168.1.107 65193 66.63.168.35 5888 tcp - 0.017006 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024064.997444 CsfCLp2SSSYnnJIJne 192.168.1.107 65193 66.63.168.35 5888 tcp - 0.016901 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024061.310369 CYHDhA2vsfGFpZvLD2 192.168.1.107 65191 66.63.168.35 5888 tcp - 0.028551 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024066.540145 C5VSOx1QlAEoueZFGb 192.168.1.107 65194 66.63.168.35 5888 tcp - 0.031825 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024067.072723 CxaNny3c2SwL9p7XN2 192.168.1.107 65194 66.63.168.35 5888 tcp - 0.031954 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024063.436435 CuGOW64c1p7cg5oZI1 192.168.1.107 65192 66.63.168.35 5888 tcp - 0.037845 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024068.641468 CrYWxg1e8jPt30b9ga 192.168.1.107 65195 66.63.168.35 5888 tcp - 0.026447 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024069.164498 CgMKzCF0G2K7iGLs1 192.168.1.107 65195 66.63.168.35 5888 tcp - 0.026411 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024065.518689 CjjqVa3QXLEd1zLzZ1 192.168.1.107 65193 66.63.168.35 5888 tcp - 0.016770 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024070.725585 Cdh2PU2dtIHWZ7rjRb 192.168.1.107 65196 66.63.168.35 5888 tcp - 0.036417 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024071.263174 Cy4Vix2zyvW5QQXeng 192.168.1.107 65196 66.63.168.35 5888 tcp - 0.036438 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024067.604154 Cysf7x9QnTx4bcZvj 192.168.1.107 65194 66.63.168.35 5888 tcp - 0.031926 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024072.838682 CN06Zq4TkPRyOPJNsl 192.168.1.107 65197 66.63.168.35 5888 tcp - 0.026327 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024073.365218 CTMm4oUbDpXt4CWU 192.168.1.107 65197 66.63.168.35 5888 tcp - 0.026357 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024069.696484 C0bxI43fivvraO5KI4 192.168.1.107 65195 66.63.168.35 5888 tcp - 0.026407 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024074.920731 CUMVah2PBAB9Js90T7 192.168.1.107 65198 66.63.168.35 5888 tcp - 0.027797 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024075.449924 CP7zQ343ujWAIjBmvc 192.168.1.107 65198 66.63.168.35 5888 tcp - 0.027826 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024016.666957 CisS5z4l9PYGBr3h3f 192.168.1.135 42892 192.168.1.107 58211 udp - 0.000215 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024071.801284 C8MQ9M3lBWDFsEMtqc 192.168.1.107 65196 66.63.168.35 5888 tcp - 0.036341 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024077.001098 CuIa6S3NStcVsvQxS1 192.168.1.107 65199 66.63.168.35 5888 tcp - 0.025346 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024017.690925 CelrUl4ZjYJC2UD0u1 192.168.1.135 56232 192.168.1.107 58211 udp - 0.000217 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024077.521968 CZkrsF2v2xtoJQMPBf 192.168.1.107 65199 66.63.168.35 5888 tcp - 0.025185 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024018.510203 CyGZS84Gnh08YXE518 192.168.1.135 42351 192.168.1.107 58211 udp - - - - S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024073.892847 CDC73g2Z1gaGQON9jc 192.168.1.107 65197 66.63.168.35 5888 tcp - 0.026548 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024016.482320 CCs7y14a0dFDq0MIBj 192.168.1.107 58211 239.255.255.250 1900 udp - 3.021239 7700 0 S0 T F 0 D 44 8932 0 0 - Benign From_benign-To_benign-Multicast
+1677024019.534065 COjnex4IFFUtwciV27 192.168.1.135 56840 192.168.1.107 58211 udp - 0.000242 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024079.067278 CAdlah30OBhZVKUi64 192.168.1.107 65200 66.63.168.35 5888 tcp - 0.016944 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024079.588518 CevnaI2U6G8k8z0f1b 192.168.1.107 65200 66.63.168.35 5888 tcp - 0.016905 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024075.972286 CBkuH64ueNJpCnp1e9 192.168.1.107 65198 66.63.168.35 5888 tcp - 0.027821 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024081.117871 CERkfv17QhQxV3fN5l 192.168.1.107 65201 66.63.168.35 5888 tcp - 0.025278 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024081.639171 C2rf5o3KPxPrNb3hmg 192.168.1.107 65201 66.63.168.35 5888 tcp - 0.025342 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024078.042069 C3VkpM10cwvq4kjudb 192.168.1.107 65199 66.63.168.35 5888 tcp - 0.025193 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024083.186608 CNrFsYJdUFpyGrnX9 192.168.1.107 65202 66.63.168.35 5888 tcp - 0.025730 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024083.709169 C8RzaXScq8HN6qnna 192.168.1.107 65202 66.63.168.35 5888 tcp - 0.025607 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024080.099654 CQGn1V2bARYACgfs3b 192.168.1.107 65200 66.63.168.35 5888 tcp - 0.017080 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024085.257541 C2E9g9cf3COAzZkU4 192.168.1.107 65203 66.63.168.35 5888 tcp - 0.027245 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024085.778395 CT4MWn2zz7KdUuYNO 192.168.1.107 65203 66.63.168.35 5888 tcp - 0.027100 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024082.160527 Cna7sR2vrcBzs10Cjg 192.168.1.107 65201 66.63.168.35 5888 tcp - 0.025219 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024087.370665 CT4Qlo1Oonl5kpArZ6 192.168.1.107 65204 66.63.168.35 5888 tcp - 0.017045 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024087.881366 CVAKIm4KPHGCjtQ4G3 192.168.1.107 65204 66.63.168.35 5888 tcp - 0.017138 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024084.229611 CkHKE51mfhWCFXm8C1 192.168.1.107 65202 66.63.168.35 5888 tcp - 0.025688 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024089.420795 ConsRm2H6h4Si1Ls3f 192.168.1.107 65205 66.63.168.35 5888 tcp - 0.026442 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024017.108205 CfCF743iHGGmMCaQz5 192.168.1.132 1900 192.168.1.107 58211 udp - 12.987435 14208 0 S0 T T 0 D 48 15552 0 0 - Benign From_benign-To_benign
+1677024021.618037 CI4fft1NQK3lx0uCGk 192.168.1.107 3 192.168.1.132 3 icmp - 8.478187 9720 0 OTH T T 0 - 30 10560 0 0 - Benign From_benign-To_benign
+1677024089.939472 C62pH846NS3P0rybT7 192.168.1.107 65205 66.63.168.35 5888 tcp - 0.026492 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024086.301281 C7mAcg3cgyicNLMJS6 192.168.1.107 65203 66.63.168.35 5888 tcp - 0.027155 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024091.494644 COSSXW23Qi8klqjDv2 192.168.1.107 65206 66.63.168.35 5888 tcp - 0.016874 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024092.015571 CarOMC4wO0vWpjvdmk 192.168.1.107 65206 66.63.168.35 5888 tcp - 0.016985 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024088.401787 CZbn3K1Oe5NzRJywyb 192.168.1.107 65204 66.63.168.35 5888 tcp - 0.017041 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024093.553002 CJTpdm3BKhWIfMlmeh 192.168.1.107 65207 66.63.168.35 5888 tcp - 0.028556 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024094.084493 CKtc5F1w2Mn1Ey5hw9 192.168.1.107 65207 66.63.168.35 5888 tcp - 0.028458 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024090.467105 CqZqMd2xdQTdUDev97 192.168.1.107 65205 66.63.168.35 5888 tcp - 0.026568 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024095.644603 C6EC4T2ftY6LIw5kN2 192.168.1.107 65208 66.63.168.35 5888 tcp - 0.016930 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024096.165254 Cl7Vrv33raqXROncV1 192.168.1.107 65208 66.63.168.35 5888 tcp - 0.016908 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024092.536216 CkosnP3wztqruFJMx 192.168.1.107 65206 66.63.168.35 5888 tcp - 0.016971 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024097.707184 C6op2E4Ljp4jsTDpsk 192.168.1.107 65209 66.63.168.35 5888 tcp - 0.025213 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024098.234707 Cc7Rfj2O61u8YgMpG4 192.168.1.107 65209 66.63.168.35 5888 tcp - 0.025108 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024094.614808 CbcvYsMKNwdqwXURa 192.168.1.107 65207 66.63.168.35 5888 tcp - 0.028458 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024099.793326 C0tGqY3NRwiO0eyyE5 192.168.1.107 65210 66.63.168.35 5888 tcp - 0.016971 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024100.314840 CwwHeZ3tOcDFroqDLg 192.168.1.107 65210 66.63.168.35 5888 tcp - 0.017007 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024096.688248 CbavnM3N07fzsm69z 192.168.1.107 65208 66.63.168.35 5888 tcp - 0.016859 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024101.851380 CGK83C1SzAkLj7lc9k 192.168.1.107 65211 66.63.168.35 5888 tcp - 0.017088 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024102.372319 CIZ9ji4nYUm6wxmPEc 192.168.1.107 65211 66.63.168.35 5888 tcp - 0.017106 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024098.766757 CHhywQ1UVZUI4SxlGh 192.168.1.107 65209 66.63.168.35 5888 tcp - 0.025176 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024103.911857 CRYeRZJCzZBZUdeS8 192.168.1.107 65212 66.63.168.35 5888 tcp - 0.016911 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024104.433414 CRBXaP1AWrRC6O7v66 192.168.1.107 65212 66.63.168.35 5888 tcp - 0.017183 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024100.829196 CXrA4xRLCnrvj4X82 192.168.1.107 65210 66.63.168.35 5888 tcp - 0.017227 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024105.969120 CYY9VV30MiO8kD1ym4 192.168.1.107 65213 66.63.168.35 5888 tcp - 0.028082 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024106.498378 CTubtS1U3RHopoBOs 192.168.1.107 65213 66.63.168.35 5888 tcp - 0.028185 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024102.893648 CjVoIoOuJqi70ince 192.168.1.107 65211 66.63.168.35 5888 tcp - 0.017272 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024108.055993 CxaH8A1txhDNsFktNl 192.168.1.107 65214 66.63.168.35 5888 tcp - 0.028389 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024108.586431 CbAN5A4zaYjKpE8vr3 192.168.1.107 65214 66.63.168.35 5888 tcp - 0.028577 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024104.951786 CNB88S3biWfoSkoMe9 192.168.1.107 65212 66.63.168.35 5888 tcp - 0.016853 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024110.146211 CDkRMu1g6lxgE9cpuc 192.168.1.107 65215 66.63.168.35 5888 tcp - 0.030248 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024110.678174 CT1a8x2lLSTMgN7ZZ8 192.168.1.107 65215 66.63.168.35 5888 tcp - 0.030452 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024107.027947 CR3sI26zOo8aYSxH 192.168.1.107 65213 66.63.168.35 5888 tcp - 0.028060 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024112.242441 CQAvqG4M3J0i1y74Fe 192.168.1.107 65216 66.63.168.35 5888 tcp - 0.017121 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024112.764281 CP8sHPAR2s7YPO6Xi 192.168.1.107 65216 66.63.168.35 5888 tcp - 0.016903 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024109.117108 Cdqv6x1y01k6Jl6Z7 192.168.1.107 65214 66.63.168.35 5888 tcp - 0.028539 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024114.303305 CFmu904JhaHhrQEs8g 192.168.1.107 65217 66.63.168.35 5888 tcp - 0.017033 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024114.824308 CpKPZy43NzS1w0M8Ii 192.168.1.107 65217 66.63.168.35 5888 tcp - 0.016952 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024111.211200 CNblokZmDyUbUU6 192.168.1.107 65215 66.63.168.35 5888 tcp - 0.030484 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024116.361304 CyTtkezCegz9JfXPd 192.168.1.107 65218 66.63.168.35 5888 tcp - 0.028442 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024116.895108 CsGI7g48gCYrO6jWX3 192.168.1.107 65218 66.63.168.35 5888 tcp - 0.028384 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024113.285621 CLKn001ZTN1uoMUHLa 192.168.1.107 65216 66.63.168.35 5888 tcp - 0.017128 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024118.453883 CcoMOyC8yGYMFGYU8 192.168.1.107 65219 66.63.168.35 5888 tcp - 0.033360 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024118.984513 CHnmuS1j3LnYTM9vie 192.168.1.107 65219 66.63.168.35 5888 tcp - 0.033366 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024115.342247 CeFMAA4ykpHDtkMTRk 192.168.1.107 65217 66.63.168.35 5888 tcp - 0.016986 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024120.561339 CrrBP4qqBGQg1wHTd 192.168.1.107 65220 66.63.168.35 5888 tcp - 0.017189 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024121.082527 CmV6MA2VIy4ouC94r8 192.168.1.107 65220 66.63.168.35 5888 tcp - 0.017274 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024117.425156 CFEDrEyoGZAtbbuh3 192.168.1.107 65218 66.63.168.35 5888 tcp - 0.028317 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024122.622924 CgFvQu3zVYu7jMVpt8 192.168.1.107 65221 66.63.168.35 5888 tcp - 0.033610 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024123.153136 Cf7rxX3IIk3erogZY3 192.168.1.107 65221 66.63.168.35 5888 tcp - 0.033183 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024119.527716 CisCtc3w5v9rBeGlx8 192.168.1.107 65219 66.63.168.35 5888 tcp - 0.033404 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024124.717459 C4donb1Fx7WqBw32ic 192.168.1.107 65222 66.63.168.35 5888 tcp - 0.016920 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024125.230336 CxT9m11rxWO9FPuMh7 192.168.1.107 65222 66.63.168.35 5888 tcp - 0.017099 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024121.603801 C7Rf5Kca3LqAFTBxk 192.168.1.107 65220 66.63.168.35 5888 tcp - 0.017206 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024126.771622 CoujQtlFmR3juwr74 192.168.1.107 65224 66.63.168.35 5888 tcp - 0.017060 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024127.291366 CkABpWRUiNsdubPKd 192.168.1.107 65224 66.63.168.35 5888 tcp - 0.017064 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024123.683193 CnPXTs08vIv0kQBC4 192.168.1.107 65221 66.63.168.35 5888 tcp - 0.033167 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024128.827944 CMszfi4H5Ws59aWfXg 192.168.1.107 65225 66.63.168.35 5888 tcp - 0.017240 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024129.345510 CMImImYIMxyVvH1M7 192.168.1.107 65225 66.63.168.35 5888 tcp - 0.017084 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024125.752533 CBkoL83I5LoGHv1Q0g 192.168.1.107 65222 66.63.168.35 5888 tcp - 0.017089 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024130.884355 C1dLaN32JMaPsyImK8 192.168.1.107 65226 66.63.168.35 5888 tcp - 0.017141 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024131.404848 CwRvCL1WAUxbSs6l45 192.168.1.107 65226 66.63.168.35 5888 tcp - 0.016976 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024127.809576 CNr3IJ1alXBAxeI2Kj 192.168.1.107 65224 66.63.168.35 5888 tcp - 0.017161 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024132.943527 Cr3duY17M7X7JVdsfd 192.168.1.107 65227 66.63.168.35 5888 tcp - 0.027116 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024133.474747 C59ocq3EahRi3R3Uk3 192.168.1.107 65227 66.63.168.35 5888 tcp - 0.029495 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024129.866185 CBYfFi47HIMF3GYxyb 192.168.1.107 65225 66.63.168.35 5888 tcp - 0.017151 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024135.032953 CIgoER34GH15PyOx2f 192.168.1.107 65228 66.63.168.35 5888 tcp - 0.028298 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024135.564495 CphmXKOYlcavtuvO3 192.168.1.107 65228 66.63.168.35 5888 tcp - 0.027973 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024131.926118 CpA1F5PtaZcyQFVK7 192.168.1.107 65226 66.63.168.35 5888 tcp - 0.017393 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024137.127774 CXNFAz1g5awsQjTiHh 192.168.1.107 65229 66.63.168.35 5888 tcp - 0.016915 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024137.638601 CLKBGu3gVXIkiYJmPc 192.168.1.107 65229 66.63.168.35 5888 tcp - 0.016995 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024134.005125 C4Q8hD1yqKlSksJnX5 192.168.1.107 65227 66.63.168.35 5888 tcp - 0.027224 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024139.171694 Cv9EpE4ogmVfCEaaz9 192.168.1.107 65230 66.63.168.35 5888 tcp - 0.016885 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024139.693434 CGeUbG3NI2M2dW3Sg 192.168.1.107 65230 66.63.168.35 5888 tcp - 0.016845 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024136.097499 Cu9vVh42JHC2miWr7h 192.168.1.107 65228 66.63.168.35 5888 tcp - 0.027972 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024141.235383 CHJ3lMFi0HSj4pSgb 192.168.1.107 65231 66.63.168.35 5888 tcp - 0.026355 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024141.766993 Cb5GWWor61oucgzih 192.168.1.107 65231 66.63.168.35 5888 tcp - 0.026405 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024138.150593 CPheCN3vsQp6fz9MZk 192.168.1.107 65229 66.63.168.35 5888 tcp - 0.017102 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024143.325994 CDbYmF12Wv1l68BIQl 192.168.1.107 65232 66.63.168.35 5888 tcp - 0.033335 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024143.861383 CwTkME2I0wvguOR8n3 192.168.1.107 65232 66.63.168.35 5888 tcp - 0.033298 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024140.214189 CWYqJx2T1BskFb5yh9 192.168.1.107 65230 66.63.168.35 5888 tcp - 0.016894 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024145.430864 C3GZm93IG52OuBMZsd 192.168.1.107 65233 66.63.168.35 5888 tcp - 0.017131 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024145.951271 CBA2QYf6q0RXsj6Ka 192.168.1.107 65233 66.63.168.35 5888 tcp - 0.017104 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024142.298265 CMDGqX2Kx4RhMDz6Tf 192.168.1.107 65231 66.63.168.35 5888 tcp - 0.026514 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024147.492543 C2DP5q4DFJ6I56wcUj 192.168.1.107 65234 66.63.168.35 5888 tcp - 0.017270 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024148.012737 C86kGj4yGB7lIfwKr 192.168.1.107 65234 66.63.168.35 5888 tcp - 0.017198 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024144.395646 CAzE4C2apnsc5Ejon6 192.168.1.107 65232 66.63.168.35 5888 tcp - 0.033403 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024149.550718 Cfgaep12uiSkfb8djh 192.168.1.107 65235 66.63.168.35 5888 tcp - 0.036436 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024150.082479 Ccb0Y42gd9yZSiF638 192.168.1.107 65235 66.63.168.35 5888 tcp - 0.036492 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024146.472797 CCwyLX1OBY7EhCuQm3 192.168.1.107 65233 66.63.168.35 5888 tcp - 0.017119 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024151.658623 Cv8vDx0vSBuWUEmJl 192.168.1.107 65236 66.63.168.35 5888 tcp - 0.025686 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024152.180056 CwLkHxX969OHjWkT5 192.168.1.107 65236 66.63.168.35 5888 tcp - 0.025706 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024148.530102 CZ79ks2Xaz1854Tac6 192.168.1.107 65234 66.63.168.35 5888 tcp - 0.017058 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024153.729092 Cp8NMh2hWW1Qb3z9S 192.168.1.107 65237 66.63.168.35 5888 tcp - 0.028387 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024154.251868 COOGAI7DolZvV5Rj1 192.168.1.107 65237 66.63.168.35 5888 tcp - 0.028491 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024150.620821 CuEPAj4ruAGBXlyMvl 192.168.1.107 65235 66.63.168.35 5888 tcp - 0.036443 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024155.813518 Cuit6U1iu1AyOAV6Rj 192.168.1.107 65238 66.63.168.35 5888 tcp - 0.025320 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024156.345058 CYy1I41o5ihzS0opr5 192.168.1.107 65238 66.63.168.35 5888 tcp - 0.025547 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024152.702056 CtxMloKSw7xbst9De 192.168.1.107 65236 66.63.168.35 5888 tcp - 0.025627 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024157.904883 CnetQN3Tv5CCBjaZvk 192.168.1.107 65239 66.63.168.35 5888 tcp - 0.033171 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024158.439640 CWmJBf4GAopKaLsd7i 192.168.1.107 65239 66.63.168.35 5888 tcp - 0.033108 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024154.782517 CmKEi83uj69NS8DRr 192.168.1.107 65237 66.63.168.35 5888 tcp - 0.028436 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024160.009426 CGLMGRgrNCcDfLqg 192.168.1.107 65240 66.63.168.35 5888 tcp - 0.016912 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024160.519835 C95tym196xzEb1GTt8 192.168.1.107 65240 66.63.168.35 5888 tcp - 0.016799 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024156.877382 Cb89cGeCjmbNOk8qj 192.168.1.107 65238 66.63.168.35 5888 tcp - 0.025294 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024162.058817 Cf6fWG3pod4Z4eWngh 192.168.1.107 65241 66.63.168.35 5888 tcp - 0.026421 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024162.586084 CIRQq42Y4hdX20oE2g 192.168.1.107 65241 66.63.168.35 5888 tcp - 0.026451 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024158.971529 Cx8EgO3DC297HC7Cv8 192.168.1.107 65239 66.63.168.35 5888 tcp - 0.033190 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024164.143925 CLkxTU2K3X8wMcVkMc 192.168.1.107 65242 66.63.168.35 5888 tcp - 0.016986 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024164.665636 ChAV1v3538j2I9ffda 192.168.1.107 65242 66.63.168.35 5888 tcp - 0.017049 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024161.038960 CLW05NQ44V6CjXI1l 192.168.1.107 65240 66.63.168.35 5888 tcp - 0.017039 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024166.204085 CSzfiB4ttGW9BlHBI5 192.168.1.107 65243 66.63.168.35 5888 tcp - 0.025470 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024166.735480 Ckh1zO20mBMfXnQhMa 192.168.1.107 65243 66.63.168.35 5888 tcp - 0.025470 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024163.116674 CHDFTf1I3m4NYgfHv4 192.168.1.107 65241 66.63.168.35 5888 tcp - 0.026351 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024168.291852 CWmnVM1B7oDMDRKeof 192.168.1.107 65244 66.63.168.35 5888 tcp - 0.026340 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024168.825042 C92VOV1D8PcsxgOp5b 192.168.1.107 65244 66.63.168.35 5888 tcp - 0.026294 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024165.186082 CQQ0wx3ADt8hjVe5Bd 192.168.1.107 65242 66.63.168.35 5888 tcp - 0.016905 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024170.389051 C0ApKI1dMaPcPsC2hl 192.168.1.107 65245 66.63.168.35 5888 tcp - 0.029543 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024170.925189 C59Ywh41WiquRbec7d 192.168.1.107 65245 66.63.168.35 5888 tcp - 0.029619 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024167.261714 CyTXYi3pjCMnN9OP8h 192.168.1.107 65243 66.63.168.35 5888 tcp - 0.025589 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024172.486242 Cds8G92rW00y0DaNY1 192.168.1.107 65246 66.63.168.35 5888 tcp - 0.028348 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024173.017412 C9DcqQ1eI1ccEW35A2 192.168.1.107 65246 66.63.168.35 5888 tcp - 0.028272 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024169.358754 CeqzOp3VgZiCS8Wvb4 192.168.1.107 65244 66.63.168.35 5888 tcp - 0.026190 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024174.574127 CYlzjE2XD2MrNVVoRd 192.168.1.107 65247 66.63.168.35 5888 tcp - 0.028153 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024175.106424 CSt4ek4pcfTEYAAYFj 192.168.1.107 65247 66.63.168.35 5888 tcp - 0.028249 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024171.455629 CurnovwmeK61RryQh 192.168.1.107 65245 66.63.168.35 5888 tcp - 0.029549 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024176.658836 CrG7Bx1nxu1yEUAxsf 192.168.1.107 65248 66.63.168.35 5888 tcp - 0.017131 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024177.170197 CmpU8q3F4C3iae1nf5 192.168.1.107 65248 66.63.168.35 5888 tcp - 0.017127 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024173.545312 CZWSk52eJUFsGtxwFe 192.168.1.107 65246 66.63.168.35 5888 tcp - 0.028319 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024178.705640 CFYSaZ3CbVrPTZNpBl 192.168.1.107 65249 66.63.168.35 5888 tcp - 0.025486 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024179.232886 C7Ymhj3XikWbeAymHg 192.168.1.107 65249 66.63.168.35 5888 tcp - 0.025283 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024175.629305 CwTcXI1JiVxwU4tzkd 192.168.1.107 65247 66.63.168.35 5888 tcp - 0.028131 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024180.789803 CXcsRLfLQ939qQccb 192.168.1.107 65250 66.63.168.35 5888 tcp - 0.029967 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024181.321308 CGrjtI2q7Z9Uw28CK3 192.168.1.107 65250 66.63.168.35 5888 tcp - 0.029726 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024177.687851 CrijNt2l4m57tDMyU1 192.168.1.107 65248 66.63.168.35 5888 tcp - 0.017431 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024182.883381 CQv1F9OzvrQnZKfl3 192.168.1.107 65251 66.63.168.35 5888 tcp - 0.029539 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024183.415321 CEIj1M1mlUJUyIRDZ7 192.168.1.107 65251 66.63.168.35 5888 tcp - 0.029513 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024179.760320 CD6JqH3JWXh6hVNtDh 192.168.1.107 65249 66.63.168.35 5888 tcp - 0.025380 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024184.979453 CTrAZV37UWIAJu0ZB2 192.168.1.107 65252 66.63.168.35 5888 tcp - 0.017062 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024185.490562 CSiiCN3Qej9nxxwpN6 192.168.1.107 65252 66.63.168.35 5888 tcp - 0.017055 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024181.853083 CYNCk34VKsKJgVtlO5 192.168.1.107 65250 66.63.168.35 5888 tcp - 0.029750 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024187.022342 CpHbWQrMjEH5B2rA5 192.168.1.107 65253 66.63.168.35 5888 tcp - 0.025429 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024187.543529 Cadsm11GeYWzZoiV67 192.168.1.107 65253 66.63.168.35 5888 tcp - 0.025330 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024183.946479 CWJFQt4SZUvYKRdCsd 192.168.1.107 65251 66.63.168.35 5888 tcp - 0.029756 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024189.122191 C27InHqJc2d9IrXUk 192.168.1.107 65254 66.63.168.35 5888 tcp - 0.017175 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024189.687014 C6ESdk30YNY4DzfIL1 192.168.1.107 65254 66.63.168.35 5888 tcp - 0.017137 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024186.001055 CPbPBq3eIPdb5F7VE6 192.168.1.107 65252 66.63.168.35 5888 tcp - 0.017052 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024191.267252 CxSjeO38bmbe6UPWUg 192.168.1.107 65255 66.63.168.35 5888 tcp - 0.028222 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024191.790707 C3KeeE3HIPri42G1G7 192.168.1.107 65255 66.63.168.35 5888 tcp - 0.028159 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024188.076645 CeLhKl4QZrBvDHfs2k 192.168.1.107 65253 66.63.168.35 5888 tcp - 0.025501 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024193.354139 Cpq7PT8eWmkHns7Xe 192.168.1.107 65256 66.63.168.35 5888 tcp - 0.025518 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024193.881387 CU0camEtaMAICpZ4i 192.168.1.107 65256 66.63.168.35 5888 tcp - 0.025500 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024190.201761 C01zd3z29pWJoOmac 192.168.1.107 65254 66.63.168.35 5888 tcp - 0.017099 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024195.439637 CimnOk4yEFV1swj2z9 192.168.1.107 65257 66.63.168.35 5888 tcp - 0.017002 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024195.951584 CWxJuU397yKoOLO8F5 192.168.1.107 65257 66.63.168.35 5888 tcp - 0.017094 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024136.679983 CiCYuQ1X8dbhHX5hM8 192.168.1.135 45997 192.168.1.107 51923 udp - - - - S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024192.325229 C76a7ZrwwIPbtL5j5 192.168.1.107 65255 66.63.168.35 5888 tcp - 0.028195 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024137.704386 CmRSjf4EFnpzi4kkG9 192.168.1.135 37962 192.168.1.107 51923 udp - - - - S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024197.498703 CjAK5n2rGij0gK7m7j 192.168.1.107 65258 66.63.168.35 5888 tcp - 0.026364 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024138.525060 CiGpJt3IFHJJKGsBDi 192.168.1.135 35749 192.168.1.107 51923 udp - 0.000216 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024198.027641 CWrNkG4RsUV4xrKmK4 192.168.1.107 65258 66.63.168.35 5888 tcp - 0.026357 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024194.410705 CsTWes10XIo8T0R0qd 192.168.1.107 65256 66.63.168.35 5888 tcp - 0.025527 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024136.490237 C3QSPE2QJ9AABme9j3 192.168.1.107 51923 239.255.255.250 1900 udp - 3.021324 7700 0 S0 T F 0 D 44 8932 0 0 - Benign From_benign-To_benign-Multicast
+1677024139.547609 CgUTeV5F74WxtpVG8 192.168.1.135 37877 192.168.1.107 51923 udp - - - - S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024199.592888 CP3sr62kgKUyJjwuEk 192.168.1.107 65259 66.63.168.35 5888 tcp - 0.018391 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024200.114009 CJ35SZONga0dG6uYl 192.168.1.107 65259 66.63.168.35 5888 tcp - 0.016875 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024196.472202 CZTDzH2e2AgCZ6Fg8d 192.168.1.107 65257 66.63.168.35 5888 tcp - 0.017071 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024201.656049 C0pM940AUSCN4SPqd 192.168.1.107 65260 66.63.168.35 5888 tcp - 0.030139 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024202.187286 CSIepOsZfjiX8ZdP6 192.168.1.107 65260 66.63.168.35 5888 tcp - 0.030325 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024198.557944 CAOmCN1pmhM4IYvEkh 192.168.1.107 65258 66.63.168.35 5888 tcp - 0.026309 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024203.751953 C4Vymr4VvusSYqXJSf 192.168.1.107 65261 66.63.168.35 5888 tcp - 0.028014 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024204.281733 CXdAcbJV9zJhQ20a5 192.168.1.107 65261 66.63.168.35 5888 tcp - 0.028373 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024200.635036 CSLyD73hbu2hENff7e 192.168.1.107 65259 66.63.168.35 5888 tcp - 0.016822 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024205.847416 CmQ12d2gIjRMwYVGh5 192.168.1.107 65262 66.63.168.35 5888 tcp - 0.029335 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024206.374143 CFUCQzrQWy03LMv7e 192.168.1.107 65262 66.63.168.35 5888 tcp - 0.029317 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024202.720874 C8ybjF2I6CteSFgjul 192.168.1.107 65260 66.63.168.35 5888 tcp - 0.030021 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024207.935570 Ci8SZO2IM8i6KvC3xj 192.168.1.107 65263 66.63.168.35 5888 tcp - 0.028231 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024208.459844 CW1IbF2TDy7gq7s6je 192.168.1.107 65263 66.63.168.35 5888 tcp - 0.028214 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024204.814547 CMSAHs2gFfRAd5Yz8c 192.168.1.107 65261 66.63.168.35 5888 tcp - 0.028041 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024137.109010 CfwO6F2nNDPnjdBnxc 192.168.1.132 1900 192.168.1.107 51923 udp - 12.993050 14208 0 S0 T T 0 D 48 15552 0 0 - Benign From_benign-To_benign
+1677024141.624193 CVXwmB3ebvm6ZNJ0zi 192.168.1.107 3 192.168.1.132 3 icmp - 8.478225 9720 0 OTH T T 0 - 30 10560 0 0 - Benign From_benign-To_benign
+1677024210.025714 CEcCm92TmrybrAVOK9 192.168.1.107 65264 66.63.168.35 5888 tcp - 0.026472 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024210.553014 CRd5wH7dAbK5SXXZ2 192.168.1.107 65264 66.63.168.35 5888 tcp - 0.026476 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024206.905998 C8vQrH1KHtGgBNx166 192.168.1.107 65262 66.63.168.35 5888 tcp - 0.029285 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024212.111469 CU7p9C3ckImTRHTjK3 192.168.1.107 65265 66.63.168.35 5888 tcp - 0.017064 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024212.631577 CFo7Ch4DFQTsLzycv8 192.168.1.107 65265 66.63.168.35 5888 tcp - 0.017217 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024208.992037 C0t1f02WhS78e2iwm9 192.168.1.107 65263 66.63.168.35 5888 tcp - 0.028256 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024214.171896 CnDh1C3ofKB3uhAXhb 192.168.1.107 65266 66.63.168.35 5888 tcp - 0.031862 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024214.703308 CMpwDg1POTYdkzAysb 192.168.1.107 65266 66.63.168.35 5888 tcp - 0.031800 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024211.083551 CuHRGA24hudHOA8qW3 192.168.1.107 65264 66.63.168.35 5888 tcp - 0.026600 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024216.271541 CBjwrCXwlbia2eIzj 192.168.1.107 65267 66.63.168.35 5888 tcp - 0.029879 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024216.804416 CN9RxnoZveb6OOxCd 192.168.1.107 65267 66.63.168.35 5888 tcp - 0.029530 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024213.152617 ChL7lAOQ1zfHRI7A2 192.168.1.107 65265 66.63.168.35 5888 tcp - 0.017427 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024218.365840 C92Iso3fo6stUdsHs4 192.168.1.107 65268 66.63.168.35 5888 tcp - 0.016971 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024218.888426 CA5m7L3Yp41yeURoA7 192.168.1.107 65268 66.63.168.35 5888 tcp - 0.017115 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024215.238071 C4R9Ez19u889IdGqc6 192.168.1.107 65266 66.63.168.35 5888 tcp - 0.031776 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024220.419741 CBXb3s1Vq9XQY1ddB3 192.168.1.107 65269 66.63.168.35 5888 tcp - 0.036423 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024220.954364 CABmbY3FYCDAn7kYgi 192.168.1.107 65269 66.63.168.35 5888 tcp - 0.036376 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024217.334798 CujtGq4xwntBW8Nm07 192.168.1.107 65267 66.63.168.35 5888 tcp - 0.029511 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024222.536243 COecwL364RCDpKbTMj 192.168.1.107 65270 66.63.168.35 5888 tcp - 0.017209 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024223.056785 CoTInj2uXtXvz9eB4g 192.168.1.107 65270 66.63.168.35 5888 tcp - 0.017218 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024219.401100 CpFbas4spLkkmL7Ckf 192.168.1.107 65268 66.63.168.35 5888 tcp - 0.017003 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024224.587955 Cu2bB23EnZmOytWY0j 192.168.1.107 65271 66.63.168.35 5888 tcp - 0.027363 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024225.108660 Cp3bZ37O44UgZbmn7 192.168.1.107 65271 66.63.168.35 5888 tcp - 0.027358 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024221.497027 CDpGpD1ecZYSVzn4mb 192.168.1.107 65269 66.63.168.35 5888 tcp - 0.036397 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024226.663214 Cu0pI52kEOnS66zjn6 192.168.1.107 65272 66.63.168.35 5888 tcp - 0.017489 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024227.183383 C830ht2SEqc31EMjR7 192.168.1.107 65272 66.63.168.35 5888 tcp - 0.016991 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024223.568831 Cs0VUZ3NpxEo3d9Mog 192.168.1.107 65270 66.63.168.35 5888 tcp - 0.017411 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024228.719701 C6Csu71Rw6MnUYsSJk 192.168.1.107 65273 66.63.168.35 5888 tcp - 0.017170 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024229.229677 CC8hZ71AVk1JbvEuAc 192.168.1.107 65273 66.63.168.35 5888 tcp - 0.017084 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024225.629798 Ctpyrx4LRiokPOTqRb 192.168.1.107 65271 66.63.168.35 5888 tcp - 0.027404 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024230.768542 C5WEve3YdDQKTLvTxa 192.168.1.107 65274 66.63.168.35 5888 tcp - 0.036667 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024231.307500 CFvYkG21NOetY3YQNb 192.168.1.107 65274 66.63.168.35 5888 tcp - 0.036706 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024227.701841 Cj4Weg1Tp5EFu5GHdj 192.168.1.107 65272 66.63.168.35 5888 tcp - 0.017121 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024232.883070 CfkVLu2NGxyMXtQaa 192.168.1.107 65275 66.63.168.35 5888 tcp - 0.027232 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024233.414773 CiWzRa28pCVYEYZqT7 192.168.1.107 65275 66.63.168.35 5888 tcp - 0.027159 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024229.748589 Cu83vz2HlTr3zKmvVc 192.168.1.107 65273 66.63.168.35 5888 tcp - 0.017190 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024234.973562 CWWBBqb5gGJ2aVAhf 192.168.1.107 65276 66.63.168.35 5888 tcp - 0.025151 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024235.505601 CDUBF22I1XrhmzV5ei 192.168.1.107 65276 66.63.168.35 5888 tcp - 0.025098 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024231.845103 CfgEex16p1Kug3txgd 192.168.1.107 65274 66.63.168.35 5888 tcp - 0.036579 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024237.069329 CdSu6G1PzWQFKO5ff7 192.168.1.107 65277 66.63.168.35 5888 tcp - 0.026431 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024237.598421 C80SCY2rM07oCudZnc 192.168.1.107 65277 66.63.168.35 5888 tcp - 0.026375 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024233.945833 CJXDSE3d5JoCfYZjRg 192.168.1.107 65275 66.63.168.35 5888 tcp - 0.027139 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024239.149243 Cw77b52u6ecYNu4Ki1 192.168.1.107 65278 66.63.168.35 5888 tcp - 0.025244 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024239.671957 CjooIg4pcGOz5JDHP1 192.168.1.107 65278 66.63.168.35 5888 tcp - 0.025227 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024236.036783 Cg1xrv2UQNaGZbyvHk 192.168.1.107 65276 66.63.168.35 5888 tcp - 0.025128 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024241.220744 C9egUw4HEXLR38nCih 192.168.1.107 65279 66.63.168.35 5888 tcp - 0.036468 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024241.752547 CJHfo04N16hlnyT6Hj 192.168.1.107 65279 66.63.168.35 5888 tcp - 0.036524 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024238.120897 Cl6Csg1VvQLc1lwPFd 192.168.1.107 65277 66.63.168.35 5888 tcp - 0.026291 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024243.331473 CgX6pa4N4xenNcnyVg 192.168.1.107 65280 66.63.168.35 5888 tcp - 0.017088 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024243.854628 Cb64B038kC6twtlY3g 192.168.1.107 65280 66.63.168.35 5888 tcp - 0.017055 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024240.194592 C3O8yj4MkP0QZWMS1f 192.168.1.107 65278 66.63.168.35 5888 tcp - 0.025230 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024245.398824 CX5qJN1aTqB6chz2mf 192.168.1.107 65281 66.63.168.35 5888 tcp - 0.016928 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024245.909911 CDQsSF22PA0U8L6Q1d 192.168.1.107 65281 66.63.168.35 5888 tcp - 0.016992 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024242.294869 CraGgs1k3Bl49LHNJl 192.168.1.107 65279 66.63.168.35 5888 tcp - 0.036326 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024247.450653 CWFyzVyvaXYE27Gcd 192.168.1.107 65282 66.63.168.35 5888 tcp - 0.017100 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024247.962445 CFC4mf2E26V70eHNek 192.168.1.107 65282 66.63.168.35 5888 tcp - 0.017164 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024244.377289 CjvpRT1FEKiUVCnvJ 192.168.1.107 65280 66.63.168.35 5888 tcp - 0.017142 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024249.505509 CsBAkn4rZ03NH6csrg 192.168.1.107 65283 66.63.168.35 5888 tcp - 0.017277 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024250.021831 CSyRHY10gC5HuHRYQk 192.168.1.107 65283 66.63.168.35 5888 tcp - 0.017301 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024246.423363 CUbx0o8YbflV9ird 192.168.1.107 65281 66.63.168.35 5888 tcp - 0.016957 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024251.553361 CFQlub4HHQjPVuJcYf 192.168.1.107 65284 66.63.168.35 5888 tcp - 0.025658 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024252.070038 CJHbg93BAEORRIhDJj 192.168.1.107 65284 66.63.168.35 5888 tcp - 0.025658 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024248.482820 CtUn6s1BlU1ptONBli 192.168.1.107 65282 66.63.168.35 5888 tcp - 0.016969 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024253.624181 COKFxU36PjE0HA5FLh 192.168.1.107 65285 66.63.168.35 5888 tcp - 0.030396 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024254.156012 CP4Reu4BWaXqdfPQM6 192.168.1.107 65285 66.63.168.35 5888 tcp - 0.030385 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024250.533729 Cdc637O2GQZusWcU4 192.168.1.107 65283 66.63.168.35 5888 tcp - 0.017108 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024255.718774 C0dmBO1eojF2YRm9qg 192.168.1.107 65286 66.63.168.35 5888 tcp - 0.027955 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024256.249056 C9zwBY1ypIAIWDoF9a 192.168.1.107 65286 66.63.168.35 5888 tcp - 0.028017 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024252.597652 ClxQKYStBhjjAoK61 192.168.1.107 65284 66.63.168.35 5888 tcp - 0.025695 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024257.801233 CJkEp22kPke4PeJzdj 192.168.1.107 65287 66.63.168.35 5888 tcp - 0.028269 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024258.331678 CnNGen3K0HaLsIrZI 192.168.1.107 65287 66.63.168.35 5888 tcp - 0.028469 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024254.687087 Cpy5MJ3VY3FwpgBp8i 192.168.1.107 65285 66.63.168.35 5888 tcp - 0.030343 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024250.148118 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp dns 0.099583 506 0 S0 T F 0 D 22 1122 0 0 - Benign From_benign-To_benign-Multicast
+1677024259.894248 CkfbQ44KhtzfQF0of9 192.168.1.107 65288 66.63.168.35 5888 tcp - 0.017075 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024260.414006 C73kZ77Yg27vhFBo5 192.168.1.107 65288 66.63.168.35 5888 tcp - 0.017091 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024256.770510 CuSaIp1Imw7n3l4SLg 192.168.1.107 65286 66.63.168.35 5888 tcp - 0.027955 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024261.956255 C5cv4V1MTzlrROG575 192.168.1.107 65289 66.63.168.35 5888 tcp - 0.037776 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024262.497033 CgUk1721E9GjCa7WM6 192.168.1.107 65289 66.63.168.35 5888 tcp - 0.037498 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024258.862831 C6z4ev4tb5cULVf5Md 192.168.1.107 65287 66.63.168.35 5888 tcp - 0.028380 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024203.323807 CTxPmyQW2fMBdp2ld 192.168.1.107 56508 162.159.136.232 443 udp ssl,quic 1.067294 18672 5264 SF T F 0 Dd 30 19512 22 5880 - Benign From_benign-To_benign-Cloudflare
+1677024264.078034 C6PNErnUrbakg2Ita 192.168.1.107 65290 66.63.168.35 5888 tcp - 0.038575 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024264.621462 C8jCXfcRq4nUkN08i 192.168.1.107 65290 66.63.168.35 5888 tcp - 0.038427 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024260.934882 CW8L3P19TGmZ3hIjp6 192.168.1.107 65288 66.63.168.35 5888 tcp - 0.017105 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024266.207677 CHzEA13keN2JQiHOvb 192.168.1.107 65291 66.63.168.35 5888 tcp - 0.017305 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024266.728451 C3Ybzq4pOgPQvHHso2 192.168.1.107 65291 66.63.168.35 5888 tcp - 0.017079 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024263.037982 CPG2iv1a5t18pUrV3c 192.168.1.107 65289 66.63.168.35 5888 tcp - 0.037534 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024268.259044 CBbnm74KaKpjbJWwZ4 192.168.1.107 65292 66.63.168.35 5888 tcp - 0.033469 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024268.793146 CER16Y3cDRNPrQBrcd 192.168.1.107 65292 66.63.168.35 5888 tcp - 0.033542 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024265.162333 CA8sSm14GjGjczNQMl 192.168.1.107 65290 66.63.168.35 5888 tcp - 0.038450 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024270.368129 C9InCn1Cx1a7Rm7Khc 192.168.1.107 65293 66.63.168.35 5888 tcp - 0.033316 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024270.902587 CzW1Zb1Rx43g6M6u41 192.168.1.107 65293 66.63.168.35 5888 tcp - 0.033392 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024267.241453 CKPY5W1TGrG0GdF87 192.168.1.107 65291 66.63.168.35 5888 tcp - 0.017074 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024272.474489 C9YRbA4kJ2JhSA5lnh 192.168.1.107 65294 66.63.168.35 5888 tcp - 0.027278 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024273.002030 ClFUXz2FneV4831F8j 192.168.1.107 65294 66.63.168.35 5888 tcp - 0.027220 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024269.328356 CNpSAG4JpqR50IHtb2 192.168.1.107 65292 66.63.168.35 5888 tcp - 0.033536 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024274.551123 C7ktIz2CLbDxxRsvF7 192.168.1.107 65295 66.63.168.35 5888 tcp - 0.016970 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024275.063500 CCteUB14tz498wx2U 192.168.1.107 65295 66.63.168.35 5888 tcp - 0.016768 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024271.435278 CpGKxH2Av9eTawxX9b 192.168.1.107 65293 66.63.168.35 5888 tcp - 0.038480 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024276.602490 CNp0j8ysA1oVpj6mb 192.168.1.107 65296 66.63.168.35 5888 tcp - 0.017162 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024277.114334 CFD7Pd2N6sUFlTzi17 192.168.1.107 65296 66.63.168.35 5888 tcp - 0.016934 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024273.522319 CSU5tm4QzIa69pkL25 192.168.1.107 65294 66.63.168.35 5888 tcp - 0.027123 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024278.653453 CiHdth1694SqDgY4O1 192.168.1.107 65297 66.63.168.35 5888 tcp - 0.037419 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024279.195418 CZeSSz4DNXK3Clih9i 192.168.1.107 65297 66.63.168.35 5888 tcp - 0.037272 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024275.584415 CL67gYng1m4EAdjGd 192.168.1.107 65295 66.63.168.35 5888 tcp - 0.017009 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024280.776126 C9pE3L2jQZzs0JHvxe 192.168.1.107 65298 66.63.168.35 5888 tcp - 0.036804 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024281.322826 Ce0uTF4gW9ZMPgxYu2 192.168.1.107 65298 66.63.168.35 5888 tcp - 0.036834 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024277.635012 CrCuqspjPDFgWz124 192.168.1.107 65296 66.63.168.35 5888 tcp - 0.016954 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024282.893605 CTVcnv3dCJGPm04P44 192.168.1.107 65299 66.63.168.35 5888 tcp - 0.026435 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024283.424814 CEdrnK1t62V72D3Qq4 192.168.1.107 65299 66.63.168.35 5888 tcp - 0.026441 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024279.738032 CwKgb61vFnNoLbh2R1 192.168.1.107 65297 66.63.168.35 5888 tcp - 0.037319 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024284.991180 CQOuwj47GTmZSmwil6 192.168.1.107 65300 66.63.168.35 5888 tcp - 0.030238 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024285.521550 CvYx1FuRCG8lPp0ui 192.168.1.107 65300 66.63.168.35 5888 tcp - 0.030280 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024281.855120 CYE7W31KFu5NibqsE3 192.168.1.107 65298 66.63.168.35 5888 tcp - 0.036675 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024287.084016 CTT8K51TpGUwIVY306 192.168.1.107 65301 66.63.168.35 5888 tcp - 0.017032 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024287.604233 C5osZ51IOmCyzeDWJd 192.168.1.107 65301 66.63.168.35 5888 tcp - 0.017096 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024283.958143 CsLl1J3Y5xJt8MZC48 192.168.1.107 65299 66.63.168.35 5888 tcp - 0.026433 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024289.143900 CKtvgP1Rf9ggywB0k8 192.168.1.107 65302 66.63.168.35 5888 tcp - 0.027954 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024289.672578 C6G3G21GGCLbIBLdF 192.168.1.107 65302 66.63.168.35 5888 tcp - 0.027970 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024286.052128 Ci0pfj1R2cuxTbfHBa 192.168.1.107 65300 66.63.168.35 5888 tcp - 0.030305 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024291.232767 C19pd23luKBImRCBwe 192.168.1.107 65303 66.63.168.35 5888 tcp - 0.017077 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024291.744497 CJhJgj3MB8NvdAXTp4 192.168.1.107 65303 66.63.168.35 5888 tcp - 0.017062 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024288.125874 CAHL4K3E8ehn5UCkyl 192.168.1.107 65301 66.63.168.35 5888 tcp - 0.016874 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024293.282177 C2QLkI4okO5DYk2a3 192.168.1.107 65304 66.63.168.35 5888 tcp - 0.026330 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024293.805394 CUEL052UsRuTISTYe6 192.168.1.107 65304 66.63.168.35 5888 tcp - 0.026653 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024290.202125 C6yoTT38g6s9ekMuD9 192.168.1.107 65302 66.63.168.35 5888 tcp - 0.027937 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024295.358397 CROTZIBrm08VDZt24 192.168.1.107 65305 66.63.168.35 5888 tcp - 0.025429 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024295.892212 CP7cMk4iCYvs0EvIlj 192.168.1.107 65305 66.63.168.35 5888 tcp - 0.025105 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024292.264804 CeqX3X3iFJLcxBdDOe 192.168.1.107 65303 66.63.168.35 5888 tcp - 0.017115 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024297.442223 CBStKt2OrSk5x23Abg 192.168.1.107 65306 66.63.168.35 5888 tcp - 0.017098 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024297.957226 Cu5d1w2j1tMCGlpBO6 192.168.1.107 65306 66.63.168.35 5888 tcp - 0.017200 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024294.331528 CYV8Lm4XT9kF4wGmRl 192.168.1.107 65304 66.63.168.35 5888 tcp - 0.026443 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024299.498765 CUUj9G338cN6McPJ2g 192.168.1.107 65307 66.63.168.35 5888 tcp - 0.017178 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024300.020223 CFFFF1gtHyp6tNNR 192.168.1.107 65307 66.63.168.35 5888 tcp - 0.017201 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024296.415855 C6GiasylnlzQT0RTa 192.168.1.107 65305 66.63.168.35 5888 tcp - 0.025461 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024301.564991 Crh4mdvucVrcxJXgf 192.168.1.107 65308 66.63.168.35 5888 tcp - 0.027323 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024302.093154 CiLeLr3JVsAcvYxgEb 192.168.1.107 65308 66.63.168.35 5888 tcp - 0.027301 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024298.479932 Cd3eJA38ugDf3WLSTa 192.168.1.107 65306 66.63.168.35 5888 tcp - 0.017113 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024303.649347 C555QK2CGZbRfOFrnk 192.168.1.107 65309 66.63.168.35 5888 tcp - 0.029525 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024304.171537 CkEIgDGvKpyWBMccb 192.168.1.107 65309 66.63.168.35 5888 tcp - 0.029535 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024300.530829 Co9oYy4HbbLJnuHnf7 192.168.1.107 65307 66.63.168.35 5888 tcp - 0.017156 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024305.736698 C3JlIO1beBSzKroa9i 192.168.1.107 65310 66.63.168.35 5888 tcp - 0.028405 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024306.267690 C41yDg2JhqXAFfDbP 192.168.1.107 65310 66.63.168.35 5888 tcp - 0.028431 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024302.621630 C1ahzxDDUM7mfpJV5 192.168.1.107 65308 66.63.168.35 5888 tcp - 0.027305 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024307.828823 Cygtfw3fnKuySyO8L4 192.168.1.107 65311 66.63.168.35 5888 tcp - 0.028400 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024308.359722 CWh0lt4H8Lb68tWGX3 192.168.1.107 65311 66.63.168.35 5888 tcp - 0.028829 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024304.706580 CAvyQpuzcX0Xpzm8c 192.168.1.107 65309 66.63.168.35 5888 tcp - 0.029528 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024250.120199 CMvTd02Pju86FLjSn1 192.168.1.107 68 192.168.1.1 67 udp dhcp 0.000883 616 600 SF T T 0 Dd 2 672 2 656 - Benign From_benign-To_benign-DHCP
+1677024309.916497 CHoUA73bcwKc8PQcXl 192.168.1.107 65312 66.63.168.35 5888 tcp - 0.026238 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024310.449272 Chk50K31JReHZCJ1fi 192.168.1.107 65312 66.63.168.35 5888 tcp - 0.026277 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024306.799241 CwCSmy30bVKzdmBv5l 192.168.1.107 65310 66.63.168.35 5888 tcp - 0.028345 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024312.015243 CbZwRy4V8r02o3ev9 192.168.1.107 65313 66.63.168.35 5888 tcp - 0.017075 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024312.536656 Ckt1zMKzdGigBQIO5 192.168.1.107 65313 66.63.168.35 5888 tcp - 0.016942 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024308.882148 C5Ytlh414ZVvlHbJ19 192.168.1.107 65311 66.63.168.35 5888 tcp - 0.028504 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024314.076263 CO2WNQ2S1FHzlVvlQg 192.168.1.107 65314 66.63.168.35 5888 tcp - 0.016974 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024314.601993 ChI6f82KiU06Bja9Ze 192.168.1.107 65314 66.63.168.35 5888 tcp - 0.017122 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024310.981532 CO3cDLZcvsGUc9lJa 192.168.1.107 65312 66.63.168.35 5888 tcp - 0.026398 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024316.136096 C1lJ6r97Gf58FdTvb 192.168.1.107 65315 66.63.168.35 5888 tcp - 0.025158 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024256.695846 Cc2F1o1HBq56u6twkk 192.168.1.135 51195 192.168.1.107 55176 udp - 0.000215 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024316.666948 C97PZkvPlBkG8K2v6 192.168.1.107 65315 66.63.168.35 5888 tcp - 0.025191 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024257.514991 C5atJ6pMD6dVGn1ij 192.168.1.135 35166 192.168.1.107 55176 udp - 0.000215 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024313.057509 CRvxOD2iDI1AFbd4c7 192.168.1.107 65313 66.63.168.35 5888 tcp - 0.017007 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024258.539082 CTCTyJzCqckUaUzMc 192.168.1.135 47478 192.168.1.107 55176 udp - 0.000255 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024318.226767 CuU87Q3wl9oxoJCrNk 192.168.1.107 65316 66.63.168.35 5888 tcp - 0.031417 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024318.760305 CmCQJV1QlyiPsHwVHi 192.168.1.107 65316 66.63.168.35 5888 tcp - 0.027023 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024256.491359 C5AtJw2lXdW5qAiXla 192.168.1.107 55176 239.255.255.250 1900 udp - 3.021959 7700 0 S0 T F 0 D 44 8932 0 0 - Benign From_benign-To_benign-Multicast
+1677024259.563179 Cvrzb8psENALhtTjg 192.168.1.135 47764 192.168.1.107 55176 udp - 0.000217 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024315.114484 CbOTnc3bAGlCCpYytk 192.168.1.107 65314 66.63.168.35 5888 tcp - 0.017276 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024320.317758 Cfzg1418Xe6snoINy4 192.168.1.107 65317 66.63.168.35 5888 tcp - 0.017145 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024320.837458 CcpzYUuUkFjTwEJjk 192.168.1.107 65317 66.63.168.35 5888 tcp - 0.016917 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024317.199036 C5K4Go4JNeahIJTlhi 192.168.1.107 65315 66.63.168.35 5888 tcp - 0.025098 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024322.377592 Cgim1ckimrjYE45N3 192.168.1.107 65318 66.63.168.35 5888 tcp - 0.017071 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024322.897251 CkM5TD4kPAosYdOUL 192.168.1.107 65318 66.63.168.35 5888 tcp - 0.016987 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024319.287975 CxAr4f2vTBy7PJK1n9 192.168.1.107 65316 66.63.168.35 5888 tcp - 0.027178 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024324.433079 Ct9hkx4niSHz7Quubf 192.168.1.107 65319 66.63.168.35 5888 tcp - 0.017044 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024250.396302 Cm4RaB20m1oE6F14C3 192.168.1.107 50466 239.255.255.250 1900 udp - 15.025854 8778 0 S0 T F 0 D 66 10626 0 0 - Benign From_benign-To_benign-Multicast
+1677024324.953571 C9IfMb1uIHI5LP7eza 192.168.1.107 65319 66.63.168.35 5888 tcp - 0.016880 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024321.359455 Cft2S31KxT68sPxNNl 192.168.1.107 65317 66.63.168.35 5888 tcp - 0.017153 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024326.492164 CzF3Wl2xxCTKg6Uhie 192.168.1.107 65320 66.63.168.35 5888 tcp - 0.028356 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024327.028948 CmxV1q3JzqoWINh9Jc 192.168.1.107 65320 66.63.168.35 5888 tcp - 0.028362 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024323.415024 CyYxHw201M2XY1yamk 192.168.1.107 65318 66.63.168.35 5888 tcp - 0.016916 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024328.590106 Cq32kO3CD5SwzTSgAc 192.168.1.107 65321 66.63.168.35 5888 tcp - 0.029371 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024329.125420 CYlbch6Z2nikkPES 192.168.1.107 65321 66.63.168.35 5888 tcp - 0.029355 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024325.472272 C1jLGM1RlXS0lMdtM1 192.168.1.107 65319 66.63.168.35 5888 tcp - 0.016883 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024330.687224 C7STGL3FlEM4GfRrXj 192.168.1.107 65322 66.63.168.35 5888 tcp - 0.017162 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024331.208734 CYsJ4D3BVztLhFpDFa 192.168.1.107 65322 66.63.168.35 5888 tcp - 0.017033 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024327.560432 C4aFhg33UpH3ii9cDj 192.168.1.107 65320 66.63.168.35 5888 tcp - 0.028301 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024332.741432 CZCFDA3IC3R6VU5Hu1 192.168.1.107 65323 66.63.168.35 5888 tcp - 0.017169 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024258.210216 Cs7nfh1mIzD1UXiCBf 192.168.1.132 1900 192.168.1.107 55176 udp - 15.505919 14208 0 S0 T T 0 D 48 15552 0 0 - Benign From_benign-To_benign
+1677024262.579274 Cw74Km24Utvfs937kb 192.168.1.107 3 192.168.1.132 3 icmp - 11.137121 11664 0 OTH T T 0 - 36 12672 0 0 - Benign From_benign-To_benign
+1677024333.252377 CGUFRF14MI9ddEq4Hb 192.168.1.107 65323 66.63.168.35 5888 tcp - 0.017116 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024329.656694 CRK4jy3C687cCyQbSf 192.168.1.107 65321 66.63.168.35 5888 tcp - 0.029438 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024334.797976 Ci4D1W1E7eUu3yddD7 192.168.1.107 65324 66.63.168.35 5888 tcp - 0.017097 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024335.317830 CAQL1A2Rh8p9TgfuIg 192.168.1.107 65324 66.63.168.35 5888 tcp - 0.017095 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024331.722297 CmPcHV3j5YxPgErUQl 192.168.1.107 65322 66.63.168.35 5888 tcp - 0.016990 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024336.916042 C1wFZJ2fpkXRylrKE1 192.168.1.107 65325 66.63.168.35 5888 tcp - 0.017024 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024337.436080 C2WIqB17R7vss9R0Vg 192.168.1.107 65325 66.63.168.35 5888 tcp - 0.016991 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024333.775154 CewV1A1gjh9BhrcfUi 192.168.1.107 65323 66.63.168.35 5888 tcp - 0.017096 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024338.971586 CioMal4U90OgomgoSe 192.168.1.107 65326 66.63.168.35 5888 tcp - 0.017118 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024250.990183 CAUBj42JaaBHuU3m82 192.168.1.132 1900 192.168.1.107 50466 udp - 28.903578 21312 0 S0 T T 0 D 72 23328 0 0 - Benign From_benign-To_benign
+1677024339.483750 CfOfMUSyoN4jknlz6 192.168.1.107 65326 66.63.168.35 5888 tcp - 0.017131 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024335.833389 Cjd1zM3Si7raOPp361 192.168.1.107 65324 66.63.168.35 5888 tcp - 0.017149 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024341.024162 CKQ7BsigyygzGKVg1 192.168.1.107 65327 66.63.168.35 5888 tcp - 0.017143 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024341.545951 C0pIqX1EZTT8p6W2x9 192.168.1.107 65327 66.63.168.35 5888 tcp - 0.017185 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024337.953839 Cyhf6f13Fz5x2w0eT1 192.168.1.107 65325 66.63.168.35 5888 tcp - 0.016972 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024343.083774 CAPMdBagnHhUQdkFe 192.168.1.107 65328 66.63.168.35 5888 tcp - 0.017092 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024343.604757 CK5qKO1VnA8R167zD2 192.168.1.107 65328 66.63.168.35 5888 tcp - 0.016959 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024340.003527 CRdyDY3d5E7cjx6EMl 192.168.1.107 65326 66.63.168.35 5888 tcp - 0.017152 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024345.149348 CKqxrm3YyjOFf9685 192.168.1.107 65329 66.63.168.35 5888 tcp - 0.027218 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024345.670120 CqCzTX3Z8xHAddCcKf 192.168.1.107 65329 66.63.168.35 5888 tcp - 0.027335 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024342.065909 CZYrzi3OhNvgnU91Jj 192.168.1.107 65327 66.63.168.35 5888 tcp - 0.017224 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024347.223617 CfqH8lBi1QuXASCy3 192.168.1.107 65330 66.63.168.35 5888 tcp - 0.026277 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024347.754841 Cntw3Tq39hCRmTEm7 192.168.1.107 65330 66.63.168.35 5888 tcp - 0.026211 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024344.128296 Cx699GtOiAUUXbMN7 192.168.1.107 65328 66.63.168.35 5888 tcp - 0.016868 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024349.312160 CNunVhmPS1MoMYpvg 192.168.1.107 65331 66.63.168.35 5888 tcp - 0.017113 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024349.834418 CCXRau3HjWUUIfuoEa 192.168.1.107 65331 66.63.168.35 5888 tcp - 0.017056 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024346.191527 CasdhI3nJ46SWizBjk 192.168.1.107 65329 66.63.168.35 5888 tcp - 0.027096 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024351.380628 CHgN7t4BIoHzqNFcl2 192.168.1.107 65332 66.63.168.35 5888 tcp - 0.017119 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024351.890839 CJ7E2S11qcCx84Abb1 192.168.1.107 65332 66.63.168.35 5888 tcp - 0.016858 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024348.285936 Cq5QuG2bCHZulEVdu2 192.168.1.107 65330 66.63.168.35 5888 tcp - 0.026304 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024353.426632 C6v5YG3zN1FTJTWyEj 192.168.1.107 65333 66.63.168.35 5888 tcp - 0.028527 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024353.955924 CNQI1c3uMXmN4zDa2b 192.168.1.107 65333 66.63.168.35 5888 tcp - 0.028386 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024350.358204 C5kk6q3YWkZZWsYFj8 192.168.1.107 65331 66.63.168.35 5888 tcp - 0.017184 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024355.517515 CBvP0Q3nc2eqei7nCh 192.168.1.107 65334 66.63.168.35 5888 tcp - 0.038264 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024356.056661 CwN1VS2z2wlHn5uGT8 192.168.1.107 65334 66.63.168.35 5888 tcp - 0.038141 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024352.408712 CxdcdE2KH0fvmq0vE7 192.168.1.107 65332 66.63.168.35 5888 tcp - 0.016798 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024357.639716 CC9KHzahjbnqmVy5 192.168.1.107 65335 66.63.168.35 5888 tcp - 0.036264 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024358.170163 C7lPTM1SF8jAUNfjFc 192.168.1.107 65335 66.63.168.35 5888 tcp - 0.036418 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024354.485690 CGieeTBNFtjydVv7d 192.168.1.107 65333 66.63.168.35 5888 tcp - 0.028363 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024359.745814 CF8Gwx4xORUcvlzHK4 192.168.1.107 65336 66.63.168.35 5888 tcp - 0.028723 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024360.277442 CTBC8u4ufQIYyWzsEh 192.168.1.107 65336 66.63.168.35 5888 tcp - 0.028706 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024356.598634 CV3Po71ZVMZ2koqS27 192.168.1.107 65334 66.63.168.35 5888 tcp - 0.038137 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024361.845165 C7Rlkz1PV5ylHiEGri 192.168.1.107 65337 66.63.168.35 5888 tcp - 0.028507 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024362.377988 CiM2hp3SWZow8hkdr5 192.168.1.107 65337 66.63.168.35 5888 tcp - 0.028621 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024358.708320 COsvIA3KhZWWR8e2Xk 192.168.1.107 65335 66.63.168.35 5888 tcp - 0.036212 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024363.928712 CpJjiO2NOsu9xIQLf 192.168.1.107 65338 66.63.168.35 5888 tcp - 0.017300 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024364.440512 CJTO9ArGgeCpGo9n5 192.168.1.107 65338 66.63.168.35 5888 tcp - 0.017188 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024360.808117 CrFbFc4CjltXgvOSTb 192.168.1.107 65336 66.63.168.35 5888 tcp - 0.028674 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024365.976971 CGrmUSh78tFRCtcNe 192.168.1.107 65339 66.63.168.35 5888 tcp - 0.017162 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024366.498023 CaLO5v1fS6sChe4n4c 192.168.1.107 65339 66.63.168.35 5888 tcp - 0.017126 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024362.899393 CKiU3a4ylB3ga4YZn9 192.168.1.107 65337 66.63.168.35 5888 tcp - 0.028480 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024368.038388 CVUynq4uRcL7LV2462 192.168.1.107 65340 66.63.168.35 5888 tcp - 0.037482 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024368.578169 C7G8xi1WVmXF85BJpf 192.168.1.107 65340 66.63.168.35 5888 tcp - 0.037325 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024364.953954 CU6blE2mZzhHfDb7Ck 192.168.1.107 65338 66.63.168.35 5888 tcp - 0.017118 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024370.159009 CvjObr17JhKYsT7uOl 192.168.1.107 65341 66.63.168.35 5888 tcp - 0.038281 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024370.699307 CSMTkz4V8BQYyv4b22 192.168.1.107 65341 66.63.168.35 5888 tcp - 0.038055 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024367.019970 CXTVcI3RW4LdNqBHnj 192.168.1.107 65339 66.63.168.35 5888 tcp - 0.017402 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024372.277173 C63V8q4p8XzX1soqCg 192.168.1.107 65342 66.63.168.35 5888 tcp - 0.028416 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024372.806503 CmEMYW2OYEqV9sklsg 192.168.1.107 65342 66.63.168.35 5888 tcp - 0.028016 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024369.116381 CvCh9718wUOERDVA9e 192.168.1.107 65340 66.63.168.35 5888 tcp - 0.037391 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024374.365112 CdB5CCTSBkCJ8yquf 192.168.1.107 65343 66.63.168.35 5888 tcp - 0.037674 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024374.906663 CtxCfn23B5eHksZHW4 192.168.1.107 65343 66.63.168.35 5888 tcp - 0.037633 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024371.238185 CdXeOt1S76dlzgmi0c 192.168.1.107 65341 66.63.168.35 5888 tcp - 0.038036 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024376.489586 CT3VAU1gX64Bz6ZlFb 192.168.1.107 65344 66.63.168.35 5888 tcp - 0.037470 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024377.021666 Ca4SiJ3YhYD2ILLXCh 192.168.1.107 65344 66.63.168.35 5888 tcp - 0.037542 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024373.335041 CIUVfd2ipJR3GTjRb3 192.168.1.107 65342 66.63.168.35 5888 tcp - 0.028215 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024378.587702 CZbJLS2gA6wnpWxo46 192.168.1.107 65345 66.63.168.35 5888 tcp - 0.036364 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024379.128552 CMrel81HFFqsDxdsEd 192.168.1.107 65345 66.63.168.35 5888 tcp - 0.036429 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024375.447685 CisTOf2RDrZP4adOW5 192.168.1.107 65343 66.63.168.35 5888 tcp - 0.037606 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024380.699402 Ct0fLrEhllgBdWhr9 192.168.1.107 65346 66.63.168.35 5888 tcp - 0.036196 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024381.231916 C1gd8D4SqsZ58tIqxg 192.168.1.107 65346 66.63.168.35 5888 tcp - 0.036223 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024377.550230 CJ3xzf2isZHeKBw4h9 192.168.1.107 65344 66.63.168.35 5888 tcp - 0.037337 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024382.811369 CGsStK1sd1qhZemE49 192.168.1.107 65347 66.63.168.35 5888 tcp - 0.016898 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024383.323017 C0O6dK2rJf4QE5Yxk 192.168.1.107 65347 66.63.168.35 5888 tcp - 0.016966 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024379.661829 C4q6zWjlaTQSPEVSk 192.168.1.107 65345 66.63.168.35 5888 tcp - 0.036364 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024384.861921 CDEQKH20b1kLC0yghc 192.168.1.107 65348 66.63.168.35 5888 tcp - 0.030304 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024385.394558 CcI2PY2GhecG7JAcb5 192.168.1.107 65348 66.63.168.35 5888 tcp - 0.030390 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024381.769260 CDQmrZQSNXxNpDUr9 192.168.1.107 65346 66.63.168.35 5888 tcp - 0.036321 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024386.983547 CiSqNxJiWgrNlGfR7 192.168.1.107 65349 66.63.168.35 5888 tcp - 0.026187 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024387.515046 C28KJ5ZRJ9y0rTm4c 192.168.1.107 65349 66.63.168.35 5888 tcp - 0.026247 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024383.844771 Cms4SK3TD8LzXhFYag 192.168.1.107 65347 66.63.168.35 5888 tcp - 0.016824 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024389.073009 C3UPKC30GN9adu9WWi 192.168.1.107 65350 66.63.168.35 5888 tcp - 0.027219 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024389.601400 CTpJGJEECt1x73OWe 192.168.1.107 65350 66.63.168.35 5888 tcp - 0.027094 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024385.944386 CC7fPV2tGD82jf5kVc 192.168.1.107 65348 66.63.168.35 5888 tcp - 0.030267 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024391.158245 Cer7Ut3t8QAeDOQT4k 192.168.1.107 65351 66.63.168.35 5888 tcp - 0.033216 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024391.700753 CKXrypfvEJGhxyRkc 192.168.1.107 65351 66.63.168.35 5888 tcp - 0.033334 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024388.046264 CkRsDn4nn6VKiy4yP7 192.168.1.107 65349 66.63.168.35 5888 tcp - 0.026254 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024393.265097 Czrj5oajRnxmoUeGa 192.168.1.107 65352 66.63.168.35 5888 tcp - 0.026622 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024393.794481 C0OCz11cBwctzZrzm 192.168.1.107 65352 66.63.168.35 5888 tcp - 0.026399 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024390.124594 CNpLGQ3MR2ixeYj7j7 192.168.1.107 65350 66.63.168.35 5888 tcp - 0.027170 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024395.357455 CduL5V2oqJlZbNa9tj 192.168.1.107 65353 66.63.168.35 5888 tcp - 0.017491 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024395.878450 CxHCZnBtyDE4OzFrd 192.168.1.107 65353 66.63.168.35 5888 tcp - 0.017451 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024392.231667 Csrja01qhu9zlTpgt5 192.168.1.107 65351 66.63.168.35 5888 tcp - 0.033294 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024397.420805 C1M7xy48TphUWeYcx1 192.168.1.107 65354 66.63.168.35 5888 tcp - 0.017306 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024397.931369 CWfZcu1U75ow6SWTUe 192.168.1.107 65354 66.63.168.35 5888 tcp - 0.017418 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024394.326772 CRUuaopewi0Ic347g 192.168.1.107 65352 66.63.168.35 5888 tcp - 0.026502 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024399.470835 CswSPfy99Lzu5Yfud 192.168.1.107 65355 66.63.168.35 5888 tcp - 0.033548 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024400.003456 C3Ra8G4uc28sJIzf4g 192.168.1.107 65355 66.63.168.35 5888 tcp - 0.033603 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024396.399564 CYMuh34ETrAyGrje94 192.168.1.107 65353 66.63.168.35 5888 tcp - 0.017491 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024401.575670 C7NKgt2Rk0vHxuE7o9 192.168.1.107 65356 66.63.168.35 5888 tcp - 0.029952 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024402.107064 CkQR6G3mQv3Yd0xUGl 192.168.1.107 65356 66.63.168.35 5888 tcp - 0.029986 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024398.449985 CT3Gke24MUPsGgBBCi 192.168.1.107 65354 66.63.168.35 5888 tcp - 0.017479 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024403.667106 C6pYUd0sG19zceNbf 192.168.1.107 65357 66.63.168.35 5888 tcp - 0.026256 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024404.210443 C1aPaZ2Qxpfs07gGi4 192.168.1.107 65357 66.63.168.35 5888 tcp - 0.026186 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024400.539008 Ccs77I11CuZLWDSor8 192.168.1.107 65355 66.63.168.35 5888 tcp - 0.033581 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024405.760229 CmEjJ83Uqyzg3Wsatl 192.168.1.107 65358 66.63.168.35 5888 tcp - 0.017569 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024406.270946 CqWtz34oGLTiO8F132 192.168.1.107 65358 66.63.168.35 5888 tcp - 0.017595 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024402.637020 CmXIPH2L3xjPsqwn78 192.168.1.107 65356 66.63.168.35 5888 tcp - 0.029981 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024407.803867 Cadr9hVjRyr994Dj8 192.168.1.107 65359 66.63.168.35 5888 tcp - 0.017025 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024408.324854 CABOeRELmJlRVXrEh 192.168.1.107 65359 66.63.168.35 5888 tcp - 0.017330 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024404.731675 Cx9hIs2wr9JCZP0GB6 192.168.1.107 65357 66.63.168.35 5888 tcp - 0.026341 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024409.863781 CMlVJZ2cnUKrBTnD1b 192.168.1.107 65360 66.63.168.35 5888 tcp - 0.033307 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024410.399279 CVhIgj2vL62IfRQbmi 192.168.1.107 65360 66.63.168.35 5888 tcp - 0.033185 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024406.784624 Cir2PO3JSSPCPqmP7 192.168.1.107 65358 66.63.168.35 5888 tcp - 0.017771 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024411.973626 CMJkMv2ZRpUQhtJQj8 192.168.1.107 65361 66.63.168.35 5888 tcp - 0.017008 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024412.494370 CF8IaO1uCd4AanyuB5 192.168.1.107 65361 66.63.168.35 5888 tcp - 0.016978 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024408.840725 CDi5l636OgIWqeiai3 192.168.1.107 65359 66.63.168.35 5888 tcp - 0.017078 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024414.032075 Cb35Ld3QrfEL34tZAf 192.168.1.107 65362 66.63.168.35 5888 tcp - 0.033064 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024414.569195 CdTAft3U0g4feHq1C8 192.168.1.107 65362 66.63.168.35 5888 tcp - 0.032998 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024410.936036 CjRVTm3hRImPX0z3Sh 192.168.1.107 65360 66.63.168.35 5888 tcp - 0.033139 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024416.145612 CT2iUj35g7e7io1QUd 192.168.1.107 65363 66.63.168.35 5888 tcp - 0.037830 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024416.687099 CaHy7z2kx6kiGPvrh4 192.168.1.107 65363 66.63.168.35 5888 tcp - 0.037822 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024413.014483 CJUeRr3DJjzBbD03Ij 192.168.1.107 65361 66.63.168.35 5888 tcp - 0.017045 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024418.275081 CCxlwh3yXc9jYF4R8i 192.168.1.107 65364 66.63.168.35 5888 tcp - 0.017029 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024418.796524 CJa6cv26QnrEXHxkYk 192.168.1.107 65364 66.63.168.35 5888 tcp - 0.017078 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024415.104579 CMkZAB1COnCTfPUwRl 192.168.1.107 65362 66.63.168.35 5888 tcp - 0.033102 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024420.332775 CIcfPv1cvtAfnJ9hyd 192.168.1.107 65365 66.63.168.35 5888 tcp - 0.016984 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024420.843555 C8WFYRxM0f03BGum5 192.168.1.107 65365 66.63.168.35 5888 tcp - 0.017199 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024417.234198 Ce2YjSn2n19UCOip2 192.168.1.107 65363 66.63.168.35 5888 tcp - 0.037824 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024422.373618 CgzzQj1K7TbaJZu7zl 192.168.1.107 65366 66.63.168.35 5888 tcp - 0.017041 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024422.894428 CdHnua2yEgjjgIJMja 192.168.1.107 65366 66.63.168.35 5888 tcp - 0.017034 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024419.311374 CXnIf449Guj9ue9cM6 192.168.1.107 65364 66.63.168.35 5888 tcp - 0.017199 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024424.433890 CFy4Ps15mMkfTDslD7 192.168.1.107 65367 66.63.168.35 5888 tcp - 0.033186 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024424.967118 CydzxOiPVCw41ShPd 192.168.1.107 65367 66.63.168.35 5888 tcp - 0.033071 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024365.818093 C40x7z4KMH1kkvLt2a 192.168.1.107 138 192.168.1.255 138 udp - 0.000217 1407 0 S0 T T 0 D 7 1603 0 0 - Benign From_benign-To_benign
+1677024421.354935 Cgm10v1CrreKhafkad 192.168.1.107 65365 66.63.168.35 5888 tcp - 0.016863 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024426.531915 CZEsqDdhP9NuDzju3 192.168.1.107 65368 66.63.168.35 5888 tcp - 0.038659 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024427.070868 CmxmdVrdzCaV208B2 192.168.1.107 65368 66.63.168.35 5888 tcp - 0.038626 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024423.415229 C9IibT2pr2ytkoXXz8 192.168.1.107 65366 66.63.168.35 5888 tcp - 0.016936 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024428.649989 CgtOyb43hJMWrlD2m2 192.168.1.107 65369 66.63.168.35 5888 tcp - 0.028679 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024429.181199 C74hHt9kLMFnv2m5 192.168.1.107 65369 66.63.168.35 5888 tcp - 0.028689 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024425.496909 CijKh639s2rg9ek3r5 192.168.1.107 65367 66.63.168.35 5888 tcp - 0.033202 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024430.750856 CQooOd20PbEZDapZBf 192.168.1.107 65370 66.63.168.35 5888 tcp - 0.032222 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024431.285449 CBukQT33QCMOrBXQ82 192.168.1.107 65370 66.63.168.35 5888 tcp - 0.032235 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024427.610744 CKgbep3vA4NsAPOyVa 192.168.1.107 65368 66.63.168.35 5888 tcp - 0.038737 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024432.853119 Cu5tlj4oMrGJ9IgML4 192.168.1.107 65371 66.63.168.35 5888 tcp - 0.016990 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024433.363254 CtlIu227ZlRY3txZX7 192.168.1.107 65371 66.63.168.35 5888 tcp - 0.017013 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024429.721148 CMzAJb3i2A8BnxVQb 192.168.1.107 65369 66.63.168.35 5888 tcp - 0.028740 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024434.894086 Cwwhzj21MdLI286PQf 192.168.1.107 65372 66.63.168.35 5888 tcp - 0.031724 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024435.426987 C1Xrtl3ctWC9n7yPsg 192.168.1.107 65372 66.63.168.35 5888 tcp - 0.031870 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024431.816506 C3CCnd3OkG5DDdB1a 192.168.1.107 65370 66.63.168.35 5888 tcp - 0.032227 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024436.992278 CQLuSz1RdOETq9BMsb 192.168.1.107 65379 66.63.168.35 5888 tcp - 0.016919 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024377.529634 CpjYpTxfzYsagXTQc 192.168.1.135 34673 192.168.1.107 55177 udp - 0.000238 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024437.504110 CYI7u24yyGU8M3420l 192.168.1.107 65379 66.63.168.35 5888 tcp - 0.017181 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024378.552884 CfM2HkDh0MvcNWf7a 192.168.1.135 33121 192.168.1.107 55177 udp - - - - S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024433.874884 C29PaPn1K7oqoay0a 192.168.1.107 65371 66.63.168.35 5888 tcp - 0.017002 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024376.491392 CxeBHuM6z8VojZhM9 192.168.1.107 55177 239.255.255.250 1900 udp - 3.021133 7700 0 S0 T F 0 D 44 8932 0 0 - Benign From_benign-To_benign-Multicast
+1677024439.042093 C5P2Cn397yn9ZTF1gd 192.168.1.107 65380 66.63.168.35 5888 tcp - 0.017037 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024379.576863 CYFzZt0KMeLN6HBpc 192.168.1.135 50538 192.168.1.107 55177 udp - 0.000217 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024439.553398 CI54kQ3yyuLqGi0Auc 192.168.1.107 65380 66.63.168.35 5888 tcp - 0.016916 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024435.959271 CBrirh3rQdnOeMFJh6 192.168.1.107 65372 66.63.168.35 5888 tcp - 0.031841 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024441.083264 CtnrJw33FYPnF5wmlk 192.168.1.107 65381 66.63.168.35 5888 tcp - 0.026411 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024441.605458 CIBMBbEgvlIKc4TTf 192.168.1.107 65381 66.63.168.35 5888 tcp - 0.026343 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024438.021893 Ci7vlw2OMXzSX7OU3 192.168.1.107 65379 66.63.168.35 5888 tcp - 0.017000 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024443.166806 CBGipOKfVck7SHVD6 192.168.1.107 65382 66.63.168.35 5888 tcp - 0.028407 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024443.697135 CRyblb27Qsd11xZupl 192.168.1.107 65382 66.63.168.35 5888 tcp - 0.028299 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024440.063861 C6aL2o3IBfHXi7Sj29 192.168.1.107 65380 66.63.168.35 5888 tcp - 0.016986 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024445.262677 CJYd6O2ITZDEFusTQ2 192.168.1.107 65383 66.63.168.35 5888 tcp - 0.025160 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024445.784297 CUAvai8rlGPGac8li 192.168.1.107 65383 66.63.168.35 5888 tcp - 0.025143 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024442.137758 CtCQbr8ER5K4Rhsdh 192.168.1.107 65381 66.63.168.35 5888 tcp - 0.026455 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024447.337478 CjDKpq4kQi6IobtaB2 192.168.1.107 65384 66.63.168.35 5888 tcp - 0.016965 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024447.857145 CHk64Y1dYF2wpbolnk 192.168.1.107 65384 66.63.168.35 5888 tcp - 0.017056 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024444.229862 CqCjY34uomUzSvlgRc 192.168.1.107 65382 66.63.168.35 5888 tcp - 0.028338 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024377.099469 CXSniX1stzCkG9J9w9 192.168.1.132 1900 192.168.1.107 55177 udp - 12.690321 14208 0 S0 T T 0 D 48 15552 0 0 - Benign From_benign-To_benign
+1677024381.614808 Ctqb4nHmMjULXNjRb 192.168.1.107 3 192.168.1.132 3 icmp - 8.175376 9720 0 OTH T T 0 - 30 10560 0 0 - Benign From_benign-To_benign
+1677024449.392949 CkrrZc2XKGj5lM92td 192.168.1.107 65385 66.63.168.35 5888 tcp - 0.017278 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024449.914975 Clw1V61BclLZWG6z18 192.168.1.107 65385 66.63.168.35 5888 tcp - 0.017303 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024446.308114 C9oyhsVvgFbNi07Vb 192.168.1.107 65383 66.63.168.35 5888 tcp - 0.025196 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024451.451561 CESvqx268dK2HJdLmd 192.168.1.107 65386 66.63.168.35 5888 tcp - 0.025892 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024451.979081 CnaCJr3jy3sg74mLw7 192.168.1.107 65386 66.63.168.35 5888 tcp - 0.025822 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024448.374800 CFnjsE3abZUV1qCt4j 192.168.1.107 65384 66.63.168.35 5888 tcp - 0.016952 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024453.532615 CZNlKJdtGkKfhyTeb 192.168.1.107 65387 66.63.168.35 5888 tcp - 0.025669 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024454.053976 CMQqQl1kkxuVv3vj9e 192.168.1.107 65387 66.63.168.35 5888 tcp - 0.025631 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024450.433126 CU8LOm3T94intI3qlc 192.168.1.107 65385 66.63.168.35 5888 tcp - 0.017335 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024455.606243 CIrcrg27S6ftqCDFTg 192.168.1.107 65388 66.63.168.35 5888 tcp - 0.029000 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024456.138258 Ck9H4e3BtcWhAabzk8 192.168.1.107 65388 66.63.168.35 5888 tcp - 0.028872 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024452.506192 CrX3XQ1gdqR8nbkL0k 192.168.1.107 65386 66.63.168.35 5888 tcp - 0.026119 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024457.699322 CpGYrs2rF62cS3Ryyh 192.168.1.107 65389 66.63.168.35 5888 tcp - 0.031996 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024458.241283 CtIeQAAO1PnC1wiD2 192.168.1.107 65389 66.63.168.35 5888 tcp - 0.028117 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024454.575261 CpZ6N63ZuSxWAewi56 192.168.1.107 65387 66.63.168.35 5888 tcp - 0.025586 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024459.794387 CkemSMieztATVtybe 192.168.1.107 65390 66.63.168.35 5888 tcp - 0.028366 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024460.328811 CdfUvM2NelMzIC9YLk 192.168.1.107 65390 66.63.168.35 5888 tcp - 0.028344 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024456.670582 CxdCwA2OjCWpPht0oc 192.168.1.107 65388 66.63.168.35 5888 tcp - 0.028790 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024461.897428 CEPkn32lhN5BDQtXx7 192.168.1.107 65391 66.63.168.35 5888 tcp - 0.017294 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024462.422164 CR4DSX2T9XM8BVRh8d 192.168.1.107 65391 66.63.168.35 5888 tcp - 0.016982 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024458.762785 CDCt013NsiWLV0usR1 192.168.1.107 65389 66.63.168.35 5888 tcp - 0.027908 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024463.950530 CtjmQp2a2KiY1zDRMj 192.168.1.107 65392 66.63.168.35 5888 tcp - 0.025168 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024464.477267 CtWlic1rTBvPHnyn18 192.168.1.107 65392 66.63.168.35 5888 tcp - 0.025132 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024460.861805 CLKH5lrm57G8dAkDl 192.168.1.107 65390 66.63.168.35 5888 tcp - 0.028298 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024466.032863 CpBjlX1tJorjy6PAHd 192.168.1.107 65393 66.63.168.35 5888 tcp - 0.016956 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024466.544225 CtuzlE4rFftwL7mtH6 192.168.1.107 65393 66.63.168.35 5888 tcp - 0.016891 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024462.933340 CAxj0f3sO3f9TSFTWe 192.168.1.107 65391 66.63.168.35 5888 tcp - 0.017001 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024468.081865 CyeSnU36XnbyawSq74 192.168.1.107 65394 66.63.168.35 5888 tcp - 0.016833 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024468.591754 CoivjM3a8QtfwSiw3b 192.168.1.107 65394 66.63.168.35 5888 tcp - 0.016721 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024465.006761 CGKSv62WMUJ9RVqwCc 192.168.1.107 65392 66.63.168.35 5888 tcp - 0.025165 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024470.128963 CgfXrP3vOsumVuNQP4 192.168.1.107 65395 66.63.168.35 5888 tcp - 0.016785 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024470.650722 CTtk5b3b6QVBNMaivk 192.168.1.107 65395 66.63.168.35 5888 tcp - 0.016768 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024467.064445 CvrJHF7p96MeUuMDh 192.168.1.107 65393 66.63.168.35 5888 tcp - 0.016699 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024472.195579 CiUmyv3QePR5XoXTKc 192.168.1.107 65396 66.63.168.35 5888 tcp - 0.028046 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024472.727430 C3tPok1Hu61xsvraVl 192.168.1.107 65396 66.63.168.35 5888 tcp - 0.027851 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024469.109847 C74PSn1ZyfR09RCJEk 192.168.1.107 65394 66.63.168.35 5888 tcp - 0.016725 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024474.286369 CgRwCf2rJCD2GjqRLi 192.168.1.107 65397 66.63.168.35 5888 tcp - 0.037352 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024474.827150 CzjB0T2hXZKLulUCF3 192.168.1.107 65397 66.63.168.35 5888 tcp - 0.036900 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024471.169369 CcxebJqp1c5uqxgK9 192.168.1.107 65395 66.63.168.35 5888 tcp - 0.016780 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024476.407399 CJhYex4Jj6OeN4jS4g 192.168.1.107 65398 66.63.168.35 5888 tcp - 0.017208 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024476.929181 Cl2gUO3XSFI1idJ8ff 192.168.1.107 65398 66.63.168.35 5888 tcp - 0.017637 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024473.258826 Cm9IeJ3wRJFMgrhlK8 192.168.1.107 65396 66.63.168.35 5888 tcp - 0.028069 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024478.467292 CHAhuWcCsUVhgjQE2 192.168.1.107 65399 66.63.168.35 5888 tcp - 0.036394 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024479.010184 CpRr0B1i14D2SXNSbk 192.168.1.107 65399 66.63.168.35 5888 tcp - 0.036400 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024475.368227 CsJb4Z1z6d2CQTTehi 192.168.1.107 65397 66.63.168.35 5888 tcp - 0.037050 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024480.580754 C5TqTK3O98FmDdgwc 192.168.1.107 65400 66.63.168.35 5888 tcp - 0.028773 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024481.111841 C7dN4221EwH5Rs0z98 192.168.1.107 65400 66.63.168.35 5888 tcp - 0.028743 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024477.449679 CXHyp72E6f0BDMVUSe 192.168.1.107 65398 66.63.168.35 5888 tcp - 0.017348 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024482.663271 CxhT3P0rMJ8WM6z28 192.168.1.107 65401 66.63.168.35 5888 tcp - 0.017265 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024483.183413 CcrdJS3QXdeutVa5Qd 192.168.1.107 65401 66.63.168.35 5888 tcp - 0.017341 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024479.541173 Cwribh2byNIqLSPMUh 192.168.1.107 65399 66.63.168.35 5888 tcp - 0.036291 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024484.724910 Con3YI3IGga1l41fVc 192.168.1.107 65402 66.63.168.35 5888 tcp - 0.028769 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024485.254860 CDxJg51QtUflFj5t5j 192.168.1.107 65402 66.63.168.35 5888 tcp - 0.028841 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024481.631832 C6dOAJJSs77kbERyd 192.168.1.107 65400 66.63.168.35 5888 tcp - 0.028888 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024486.815943 CMCqWJPivK8NQEeX9 192.168.1.107 65403 66.63.168.35 5888 tcp - 0.027254 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024487.346826 ClChLj2YFTsGJCrHli 192.168.1.107 65403 66.63.168.35 5888 tcp - 0.027282 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024483.705082 CdBHP13IiLWF6W6Og6 192.168.1.107 65401 66.63.168.35 5888 tcp - 0.017293 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024488.905867 CHDfCu7ELCboJvr82 192.168.1.107 65404 66.63.168.35 5888 tcp - 0.036454 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024489.448573 CisPKSlzMwypouHXc 192.168.1.107 65404 66.63.168.35 5888 tcp - 0.036414 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024485.784649 CmDRi42GYncMTecD15 192.168.1.107 65402 66.63.168.35 5888 tcp - 0.028783 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024491.023723 CuafqT4MaVlX5oyWa 192.168.1.107 65405 66.63.168.35 5888 tcp - 0.028973 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024491.556672 Cqes4j1KmOz0gHkJu 192.168.1.107 65405 66.63.168.35 5888 tcp - 0.029108 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024487.877907 C328tC48qhH6jQbb53 192.168.1.107 65403 66.63.168.35 5888 tcp - 0.027297 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024493.117931 CtcQ2114coQDo6MFX8 192.168.1.107 65406 66.63.168.35 5888 tcp - 0.026812 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024493.649393 CAq0sl1g1MA4DbiAv2 192.168.1.107 65406 66.63.168.35 5888 tcp - 0.026740 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024489.982389 CAkper29mm95thB9Nj 192.168.1.107 65404 66.63.168.35 5888 tcp - 0.036389 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024495.207960 C6vR9RMtuHfky5Fmj 192.168.1.107 65407 66.63.168.35 5888 tcp - 0.027159 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024495.739665 ChidgK3TiE44H5gg91 192.168.1.107 65407 66.63.168.35 5888 tcp - 0.026857 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024492.087845 CPyibv3VumBtrCqUB5 192.168.1.107 65405 66.63.168.35 5888 tcp - 0.029044 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024497.299476 CpiHgd3NJbsTixANNb 192.168.1.107 65408 66.63.168.35 5888 tcp - 0.028287 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024497.830937 CLFNqXOA2mP6dhaIj 192.168.1.107 65408 66.63.168.35 5888 tcp - 0.028288 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024494.174996 CnlJKt4gxYAqSHWRX 192.168.1.107 65406 66.63.168.35 5888 tcp - 0.026718 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024499.383177 CihvunIoPuNH76XS5 192.168.1.107 65409 66.63.168.35 5888 tcp - 0.017050 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024499.894817 CoTxxx2ypFUy47iQn3 192.168.1.107 65409 66.63.168.35 5888 tcp - 0.017069 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024496.270853 CEno9G1VRUXilVNjq8 192.168.1.107 65407 66.63.168.35 5888 tcp - 0.026841 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024501.434464 CpkRDM1ejvlKVWL7Ql 192.168.1.107 65410 66.63.168.35 5888 tcp - 0.017217 0 0 REJ T F 0 Sr 2 104 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024501.956000 Cc1sqh3Z4rgYABoge2 192.168.1.107 65410 66.63.168.35 5888 tcp - - - - S0 T F 0 S 1 52 0 0 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024501.634660 CmYDwr3NtWCzDPFXS4 192.168.1.107 3 192.168.1.132 3 icmp - 0.098926 1944 0 OTH T T 0 - 6 2112 0 0 - Benign From_benign-To_benign
+1677024500.416080 C1XqGj35ImKinwSxVc 192.168.1.107 65409 66.63.168.35 5888 tcp - 0.017130 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024499.591948 CuMkt91HVAvPnGiEef 192.168.1.135 48851 192.168.1.107 51002 udp - 0.000001 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024498.568196 CdnYbJ2nU0ydS1iNAd 192.168.1.135 47382 192.168.1.107 51002 udp - 0.000001 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024497.546142 CgNM252lVLL17GUQ5g 192.168.1.135 44890 192.168.1.107 51002 udp - 0.000208 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024497.122769 C4IAbo2CYDsPNTRlY4 192.168.1.132 1900 192.168.1.107 51002 udp - 4.610215 7104 0 S0 T T 0 D 24 7776 0 0 - Benign From_benign-To_benign
+1677024498.353948 CwlHse4RQcPQRxW3x4 192.168.1.107 65408 66.63.168.35 5888 tcp - 0.028506 0 0 REJ T F 0 Sr 2 96 2 80 - Malicious From_malicious-To_malicious-Malware_data_exfiltration-RemcosRAT
+1677024496.500788 CFLFSP3zVlxQJIheja 192.168.1.107 51002 239.255.255.250 1900 udp - 3.021500 7700 0 S0 T F 0 D 44 8932 0 0 - Benign From_benign-To_benign-Multicast
+1677024494.131672 C68Cut3WHGKMFF4JEa 192.168.1.107 51001 142.251.36.131 443 udp ssl,quic 0.133042 3838 5188 SF T F 0 Dd 18 4342 20 5748 - Benign From_benign-To_benign-1e100
+1677024047.230592 CRtnEk375pEcaQcqq 192.168.1.107 49322 142.250.102.188 5228 tcp - 449.856715 27 26 OTH T F 0 DTadtAT 24 1032 24 1252 - Benign From_benign-To_benign-1e100
+1677024034.132464 Cb7CWQ0H7ToEXYEhd 192.168.1.107 49761 87.250.251.15 443 tcp - 450.665278 1 0 OTH T F 0 DTaT 22 902 22 1144 - Unknown (empty)
+1677024024.047746 CgjkWr48R8a4dRKPVf 192.168.1.107 49708 5.255.255.88 443 tcp - 450.594201 1 0 OTH T F 0 DTaT 22 902 22 1144 - Unknown (empty)
+1677024496.520050 CKTklt2dHuPzFghif5 192.168.1.135 38482 192.168.1.107 51002 udp - 0.000216 1040 0 S0 T T 0 D 2 1096 0 0 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024002.966990 CWuwGW3u99uhiJZAek 192.168.1.107 49323 192.168.1.135 8009 tcp - 496.012311 11000 11000 OTH T T 0 DTdtATtTt 400 38000 200 30000 - Benign From_benign-To_benign-Device_chromecast_tv_assistant
+1677024033.849842 CfyNXH2BVoPq6CZkWe 192.168.1.107 49752 77.88.55.55 443 tcp - 450.657534 1 0 OTH T F 0 DTaT 22 902 22 1144 - Unknown (empty)
+1677024005.317605 Csmqcc1SQBkWaMDiah 192.168.1.107 64746 162.159.136.234 443 tcp - 495.305609 572 11677 OTH T F 0 DTadtAtT 76 4184 84 26714 - Benign From_benign-To_benign-Application_discord
diff --git a/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/dhcp.log.labeled b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/dhcp.log.labeled
new file mode 100644
index 000000000..b4bf790b3
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/dhcp.log.labeled
@@ -0,0 +1,10 @@
+#separator \x09
+#set_separator ,
+#empty_field (empty)
+#unset_field -
+#path dhcp
+#open 2024-08-16-09-45-01
+#fields ts uids client_addr server_addr mac host_name client_fqdn domain requested_addr assigned_addr lease_time client_message server_message msg_types duration label detailedlabel
+#types time set[string] addr addr string string string string addr addr interval string string vector[string] interval string string
+1677024250.120199 CMvTd02Pju86FLjSn1 192.168.1.107 192.168.1.1 08:00:27:5e:9a:a6 NWin3 NWin3.StratoLab.org - - 192.168.1.107 600.000000 - - REQUEST,REQUEST,ACK,ACK 0.000883 Benign From_benign-To_benign-DHCP
+#close 2024-08-16-09-45-01
\ No newline at end of file
diff --git a/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/dns.log.labeled b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/dns.log.labeled
new file mode 100644
index 000000000..c9bf96143
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/dns.log.labeled
@@ -0,0 +1,31 @@
+#separator \x09
+#set_separator ,
+#empty_field (empty)
+#unset_field -
+#path dns
+#open 2024-08-16-09-45-01
+#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto trans_id rtt query qclass qclass_name qtype qtype_name rcode rcode_name AA TC RD RA Z answers TTLs rejected label detailedlabel
+#types time string addr port addr port enum count interval string count string count string count string bool bool bool bool count vector[string] vector[interval] bool string string
+1677024250.148118 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148119 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148119 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148119 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148119 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148119 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148119 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148119 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148220 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148220 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.148220 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247383 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247384 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247624 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247624 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247624 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247624 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247624 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247624 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247624 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247625 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+1677024250.247701 CyYZ9pbydD79wo9we 192.168.1.107 56510 224.0.0.252 5355 udp 48298 - nwin3 1 C_INTERNET 255 * - - F F F F 0 - - F Benign From_benign-To_benign-Multicast
+#close 2024-08-16-09-45-01
\ No newline at end of file
diff --git a/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/notice.log.labeled b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/notice.log.labeled
new file mode 100644
index 000000000..ce963763d
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/notice.log.labeled
@@ -0,0 +1,11 @@
+#separator \x09
+#set_separator ,
+#empty_field (empty)
+#unset_field -
+#path notice
+#open 2024-08-16-09-45-01
+#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc proto note msg sub src dst p n peer_descr actions email_dest suppress_for remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude label detailedlabel
+#types time string addr port addr port string string string enum enum string string addr addr port count string set[enum] set[string] interval string string string double double string string
+1677024063.062951 - - - - - - - - - CaptureLoss::Too_Little_Traffic Only observed 0 TCP ACKs and was expecting at least 1. - - - - - - Notice::ACTION_LOG (empty) 3600.000000 - - - - - (empty) (empty)
+1677024501.956000 - - - - - - - - - CaptureLoss::Too_Little_Traffic Only observed 0 TCP ACKs and was expecting at least 1. - - - - - - Notice::ACTION_LOG (empty) 3600.000000 - - - - - (empty) (empty)
+#close 2024-08-16-09-45-01
\ No newline at end of file
diff --git a/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/quic.log.labeled b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/quic.log.labeled
new file mode 100644
index 000000000..3d24258eb
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/quic.log.labeled
@@ -0,0 +1,11 @@
+#separator \x09
+#set_separator ,
+#empty_field (empty)
+#unset_field -
+#path quic
+#open 2024-08-16-09-45-01
+#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p version client_initial_dcid client_scid server_scid server_name client_protocol history label detailedlabel
+#types time string addr port addr port string string string string string string string string string
+1677024203.323807 CTxPmyQW2fMBdp2ld 192.168.1.107 56508 162.159.136.232 443 1 bf9f35805f9c8e38 (empty) 0197e3a65ca1c4b5fd97fca626a1f94b9d117fba discord.com h3 ISIZZZZIIIIisiIIhh Benign From_benign-To_benign-Cloudflare
+1677024494.131672 C68Cut3WHGKMFF4JEa 192.168.1.107 51001 142.251.36.131 443 1 15d5e665e3a46b8f (empty) d5d5e665e3a46b8f clientservices.googleapis.com h3 ISIZZZZisiHH Benign From_benign-To_benign-1e100
+#close 2024-08-16-09-45-01
\ No newline at end of file
diff --git a/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/ssl.log.labeled b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/ssl.log.labeled
new file mode 100644
index 000000000..f7d51f0e2
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/ssl.log.labeled
@@ -0,0 +1,11 @@
+#separator \x09
+#set_separator ,
+#empty_field (empty)
+#unset_field -
+#path ssl
+#open 2024-08-16-09-45-01
+#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p version cipher curve server_name resumed last_alert next_protocol established ssl_history cert_chain_fps client_cert_chain_fps sni_matches_cert validation_status label detailedlabel
+#types time string addr port addr port string string string string bool string string bool string vector[string] vector[string] bool string string string
+1677024203.323807 CTxPmyQW2fMBdp2ld 192.168.1.107 56508 162.159.136.232 443 TLSv13 TLS_AES_128_GCM_SHA256 x25519 discord.com T - - F Cs - - - - Benign From_benign-To_benign-Cloudflare
+1677024494.131672 C68Cut3WHGKMFF4JEa 192.168.1.107 51001 142.251.36.131 443 TLSv13 TLS_AES_128_GCM_SHA256 x25519 clientservices.googleapis.com T - - F Cs - - - - Benign From_benign-To_benign-1e100
+#close 2024-08-16-09-45-01
\ No newline at end of file
diff --git a/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/telemetry.log.labeled b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/telemetry.log.labeled
new file mode 100644
index 000000000..e96f5232a
--- /dev/null
+++ b/dataset/test18-malicious-ctu-sme-11-win/zeek-labeled/telemetry.log.labeled
@@ -0,0 +1,9 @@
+#separator \x09
+#set_separator ,
+#empty_field (empty)
+#unset_field -
+#path telemetry
+#open 2024-08-16-09-45-01
+#fields ts peer metric_type name labels label_values value label detailedlabel
+#types time string string string vector[string] vector[string] double string string
+#close 2024-08-16-09-45-01
\ No newline at end of file
diff --git a/docker/P2P-image/Dockerfile b/docker/P2P-image/Dockerfile
index 53e3d3dd7..7695d5355 100644
--- a/docker/P2P-image/Dockerfile
+++ b/docker/P2P-image/Dockerfile
@@ -76,7 +76,7 @@ RUN pip install --ignore-installed --no-cache-dir -r install/requirements.txt \
&& npm install
# set use_p2p to yes in the config file
-RUN sed -i "s/use_p2p : 'no'/use_p2p : 'yes'/" config/slips.yaml
+RUN sed -i "s/use_p2p : False/use_p2p : True/" config/slips.yaml
# build the pigeon and add pigeon to path
RUN cd p2p4slips \
diff --git a/docker/macosm1-P2P-image/Dockerfile b/docker/macosm1-P2P-image/Dockerfile
index 0acc4515a..6e8c53582 100644
--- a/docker/macosm1-P2P-image/Dockerfile
+++ b/docker/macosm1-P2P-image/Dockerfile
@@ -71,8 +71,8 @@ RUN pip install --ignore-installed --no-cache-dir -r install/requirements.txt \
&& cd modules/kalipso \
&& npm install
-# Change the configuration file to have use_p2p = yes
-RUN sed -i "s/use_p2p : 'no'/use_p2p : 'yes'/" config/slips.yaml
+RUN sed -i "s/use_p2p : False/use_p2p : True/" config/slips.yaml
+
# build the pigeon and add pigeon to path
RUN cd p2p4slips \
diff --git a/docker/macosm1-P2P-image/requirements-macos-m1-docker.txt b/docker/macosm1-P2P-image/requirements-macos-m1-docker.txt
index 017712d08..046e735f7 100644
--- a/docker/macosm1-P2P-image/requirements-macos-m1-docker.txt
+++ b/docker/macosm1-P2P-image/requirements-macos-m1-docker.txt
@@ -1,7 +1,7 @@
maxminddb==2.6.2
numpy==1.26.4
-watchdog==4.0.1
-redis==3.5.3
+watchdog==5.0.0
+redis==5.0.8
urllib3==2.2.2
pandas==2.2.2
tzlocal==5.2
@@ -13,13 +13,13 @@ colorama==0.4.6
Keras
validators==0.33.0
ipwhois==1.2.0
-matplotlib==3.9.1
+matplotlib==3.9.2
recommonmark==0.7.1
scikit_learn
slackclient==2.9.4
psutil==6.0.0
six==1.16.0
-pytest==8.2.2
+pytest==8.3.2
pytest-mock==3.14.0
pytest-xdist==3.6.1
scipy==1.14.0
@@ -33,15 +33,15 @@ whois==1.20240129.2
wheel
flask
tldextract==5.1.2
-tqdm==4.66.4
+tqdm==4.66.5
termcolor==2.4.0
viztracer==0.16.3
yappi==1.6.0
pytest-sugar==1.0.0
-memray==1.13.3
+memray==1.13.4
aid_hash
-black==24.4.2
-ruff==0.5.2
-pre-commit==3.7.1
-coverage==7.6.0
+black==24.8.0
+ruff==0.6.2
+pre-commit==3.8.0
+coverage==7.6.1
pyyaml
diff --git a/docker/macosm1-image/requirements-macos-m1-docker.txt b/docker/macosm1-image/requirements-macos-m1-docker.txt
index 017712d08..046e735f7 100644
--- a/docker/macosm1-image/requirements-macos-m1-docker.txt
+++ b/docker/macosm1-image/requirements-macos-m1-docker.txt
@@ -1,7 +1,7 @@
maxminddb==2.6.2
numpy==1.26.4
-watchdog==4.0.1
-redis==3.5.3
+watchdog==5.0.0
+redis==5.0.8
urllib3==2.2.2
pandas==2.2.2
tzlocal==5.2
@@ -13,13 +13,13 @@ colorama==0.4.6
Keras
validators==0.33.0
ipwhois==1.2.0
-matplotlib==3.9.1
+matplotlib==3.9.2
recommonmark==0.7.1
scikit_learn
slackclient==2.9.4
psutil==6.0.0
six==1.16.0
-pytest==8.2.2
+pytest==8.3.2
pytest-mock==3.14.0
pytest-xdist==3.6.1
scipy==1.14.0
@@ -33,15 +33,15 @@ whois==1.20240129.2
wheel
flask
tldextract==5.1.2
-tqdm==4.66.4
+tqdm==4.66.5
termcolor==2.4.0
viztracer==0.16.3
yappi==1.6.0
pytest-sugar==1.0.0
-memray==1.13.3
+memray==1.13.4
aid_hash
-black==24.4.2
-ruff==0.5.2
-pre-commit==3.7.1
-coverage==7.6.0
+black==24.8.0
+ruff==0.6.2
+pre-commit==3.8.0
+coverage==7.6.1
pyyaml
diff --git a/docs/datasets.md b/docs/datasets.md
index dfbfea981..482885fc9 100644
--- a/docs/datasets.md
+++ b/docs/datasets.md
@@ -67,4 +67,11 @@ Capture
- sudo tcpdump -n -s0 -i eno1 port 53 or \(host 147.32.80.37 and host testing.com\) -v -w test-cc-capture-2.pcap
Connection
-- while [ 1 ]; do curl https://testing.com; sleep $(echo "scale=2; 2+$RANDOM / 20000" | bc); done
\ No newline at end of file
+- while [ 1 ]; do curl https://testing.com; sleep $(echo "scale=2; 2+$RANDOM / 20000" | bc); done
+
+### test18-malicious-ctu-sme-11-win
+This capture is a short part of the Dataset [CTU-SME-11](https://zenodo.org/records/7958259), capture Experiment-VM-Microsoft-Windows7full-3, day 2023-02-22. It consist of only the first 5000 packets.
+
+#### Labels
+The labels were assigned by an expert by hand. The configuration file is `labels.config` and it was labeled using the tool [netflowlabeler](https://github.com/stratosphereips/netflowlabeler).
+
diff --git a/docs/detection_modules.md b/docs/detection_modules.md
index 724f8cfba..69661e26d 100644
--- a/docs/detection_modules.md
+++ b/docs/detection_modules.md
@@ -471,6 +471,10 @@ By default, the files are stored in the Slips directory ```modules/ThreatIntelli
are deleted after slips is done reading them.
+Domains found in remote feeds are considered invalid, and therefore discarded by Slips,
+if they have suffix that doesn't exist in
+https://publicsuffix.org/list/public_suffix_list.dat
+
### Commenting a remote TI feed
If you have a remote file link that you wish to comment and remove from the database
diff --git a/docs/images/slips.gif b/docs/images/slips.gif
index 64a7b87a4..c7cc5defb 100644
Binary files a/docs/images/slips.gif and b/docs/images/slips.gif differ
diff --git a/fides b/fides
index bfac47728..642e1e847 160000
--- a/fides
+++ b/fides
@@ -1 +1 @@
-Subproject commit bfac47728172d3a4bbb27a5bb53ceef424e45e4f
+Subproject commit 642e1e847b0457f38028615a2888eddb3fa187e9
diff --git a/install/install.sh b/install/install.sh
index 6df042fc3..e256b0a38 100755
--- a/install/install.sh
+++ b/install/install.sh
@@ -1,74 +1,151 @@
#!/bin/sh
+
+print_green() {
+ # Prints text in green color
+ echo "[+] \e[32m$1\e[0m\n"
+}
+
+exit_on_cmd_failure() {
+ if [ $? -ne 0 ]; then
+ echo "Problem installing Slips. Aborting."
+ exit 1
+ fi
+}
+
+
+
+# Function to check if zeek or bro is available
+check_zeek_or_bro() {
+ if which zeek > /dev/null 2>&1 || which bro > /dev/null 2>&1; then
+ return 0 # Indicate success (found)
+ else
+ return 1 # Indicate failure (not found)
+ fi
+}
+
+
+
+# to disable prompts
+export DEBIAN_FRONTEND=noninteractive
+
+print_green "Setting up local timezone"
+
+ln -snf /usr/share/zoneinfo/$TZ /etc/localtime
+echo $TZ > /etc/timezone
+
+exit_on_cmd_failure
+
+print_green "Running apt update"
sudo apt-get update
-echo "[+] Installing slips dependencies ...\n"
-sudo apt-get install cmake make gcc g++ flex bison libpcap-dev libssl-dev python3 python3-dev swig zlib1g-dev
-sudo apt install -y --no-install-recommends \
+
+exit_on_cmd_failure
+
+print_green "Installing Slips dependencies ..."
+ sudo apt-get install -y --no-install-recommends \
+ cmake \
+ make \
+ gcc\
+ g++ \
+ flex \
+ bison \
+ libpcap-dev \
+ libssl-dev \
+ swig \
+ zlib1g-dev \
wget \
ca-certificates \
git \
curl \
gnupg \
+ lsb-release \
+ software-properties-common \
+ build-essential \
+ file \
+ lsof \
+ iptables \
+ iproute2 \
+ nfdump \
+ tshark \
+ whois \
+ yara \
+ net-tools \
+ vim \
+ less \
+ unzip \
+ python3-certifi \
+ python3-dev \
+ python3-tzlocal \
+ python3-pip \
+ golang \
+ notify-osd \
+ libnotify-bin \
+ net-tools \
lsb-release
-echo 'deb http://download.opensuse.org/repositories/security:/zeek/xUbuntu_20.04/ /' | sudo tee /etc/apt/sources.list.d/security:zeek.list
-curl -fsSL https://download.opensuse.org/repositories/security:zeek/xUbuntu_20.04/Release.key | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/security_zeek.gpg > /dev/null
-curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
-echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list
+exit_on_cmd_failure
-sudo apt-get update
+print_green "Installing Zeek"
+UBUNTU_VERSION=$(lsb_release -r | awk '{print $2}')
+ZEEK_REPO_URL="download.opensuse.org/repositories/security:/zeek/xUbuntu_${UBUNTU_VERSION}"
+
+# Add the repository to the sources list
+echo "deb http://${ZEEK_REPO_URL}/ /" | tee /etc/apt/sources.list.d/security:zeek.list \
+&& curl -fsSL "https://${ZEEK_REPO_URL}/Release.key" | gpg --dearmor | tee /etc/apt/trusted.gpg.d/security_zeek.gpg > /dev/null \
+&& sudo apt update && sudo apt install -y --no-install-recommends zeek
-echo "[+] Installing Slips dependencies ...\n"
-sudo apt install -y --no-install-recommends \
- python3 \
- redis \
- zeek \
- python3-pip \
- python3-certifi \
- python3-dev \
- build-essential \
- file \
- lsof \
- net-tools \
- iproute2 \
- iptables \
- python3-tzlocal \
- nfdump \
- tshark \
- git \
- whois \
- golang \
- notify-osd \
- yara \
- libnotify-bin
-
-echo "[+] Installing zeek ..."
# create a symlink to zeek so that slips can find it
-sudo ln -s /opt/zeek/bin/zeek /usr/local/bin/bro
+ln -s /opt/zeek/bin/zeek /usr/local/bin/bro
export PATH=$PATH:/usr/local/zeek/bin
echo "export PATH=$PATH:/usr/local/zeek/bin" >> ~/.bashrc
+# dont continue with slips installation if zeek isn't installed
+if ! check_zeek_or_bro; then
+ echo "Problem installing Slips. Aborting."
+ exit 1
+fi
+
+
+print_green "Installing Redis"
+curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
+echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" > /etc/apt/sources.list.d/redis.list
+sudo apt-get update
+sudo apt install -y --no-install-recommends redis
+
+exit_on_cmd_failure
+
+print_green "Installing Python requirements"
+
+python3 -m pip install --upgrade pip \
+&& pip3 install --ignore-installed -r install/requirements.txt \
+&& pip3 install --ignore-installed six
+
+exit_on_cmd_failure
-echo "[+] Executing 'python3 -m pip install --upgrade pip'"
-python3 -m pip install --upgrade pip
-echo "[+] Executing 'pip3 install -r install/requirements.txt'"
-pip3 install -r install/requirements.txt
-echo "[+] Executing pip3 install --ignore-installed six"
-pip3 install --ignore-installed six
# For Kalipso
-echo "[+] Downloading nodejs v19 and npm dependencies"
-curl -fsSL https://deb.nodesource.com/setup_21.x | sudo -E bash - && sudo apt install -y --no-install-recommends nodejs
-cd ./modules/kalipso && npm install
-cd ../..
+print_green "Installing nodejs and npm dependencies"
+curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | sudo bash - \
+ && export NVM_DIR="$HOME/.nvm" \
+ && [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" \
+ && nvm install 22 \
+ && cd modules/kalipso && npm install \
+ && cd ../..
-echo "[+] Installing p2p4slips\n"
+exit_on_cmd_failure
+
+print_green "Installing p2p4slips"
# build the pigeon and Add pigeon to path
git submodule init && git submodule update && cd p2p4slips && go build && export PATH=$PATH:$(pwd) >> ~/.bashrc && cd ..
+exit_on_cmd_failure
# running slips for the first time
-echo "[+] Executing 'redis-server --daemonize yes'\n"
-redis-server --daemonize yes
\ No newline at end of file
+print_green "Executing 'redis-server --daemonize yes'"
+redis-server --daemonize yes
+
+exit_on_cmd_failure
+
+print_green "Successfully installed Slips."
diff --git a/install/requirements.txt b/install/requirements.txt
index 017712d08..046e735f7 100644
--- a/install/requirements.txt
+++ b/install/requirements.txt
@@ -1,7 +1,7 @@
maxminddb==2.6.2
numpy==1.26.4
-watchdog==4.0.1
-redis==3.5.3
+watchdog==5.0.0
+redis==5.0.8
urllib3==2.2.2
pandas==2.2.2
tzlocal==5.2
@@ -13,13 +13,13 @@ colorama==0.4.6
Keras
validators==0.33.0
ipwhois==1.2.0
-matplotlib==3.9.1
+matplotlib==3.9.2
recommonmark==0.7.1
scikit_learn
slackclient==2.9.4
psutil==6.0.0
six==1.16.0
-pytest==8.2.2
+pytest==8.3.2
pytest-mock==3.14.0
pytest-xdist==3.6.1
scipy==1.14.0
@@ -33,15 +33,15 @@ whois==1.20240129.2
wheel
flask
tldextract==5.1.2
-tqdm==4.66.4
+tqdm==4.66.5
termcolor==2.4.0
viztracer==0.16.3
yappi==1.6.0
pytest-sugar==1.0.0
-memray==1.13.3
+memray==1.13.4
aid_hash
-black==24.4.2
-ruff==0.5.2
-pre-commit==3.7.1
-coverage==7.6.0
+black==24.8.0
+ruff==0.6.2
+pre-commit==3.8.0
+coverage==7.6.1
pyyaml
diff --git a/managers/process_manager.py b/managers/process_manager.py
index 5a4d6395a..44cd8e592 100644
--- a/managers/process_manager.py
+++ b/managers/process_manager.py
@@ -96,11 +96,6 @@ def is_pbar_supported(self) -> bool:
):
return False
- if self.main.stdout != "":
- # this means that stdout was redirected to a file,
- # no need to print the progress bar
- return False
-
if (
self.main.args.growing
or self.main.args.input_module
@@ -110,9 +105,9 @@ def is_pbar_supported(self) -> bool:
return True
- def start_output_process(self, current_stdout, stderr, slips_logfile):
+ def start_output_process(self, stderr, slips_logfile, stdout=""):
output_process = Output(
- stdout=current_stdout,
+ stdout=stdout,
stderr=stderr,
slips_logfile=slips_logfile,
verbose=self.main.args.verbose or 0,
@@ -132,7 +127,6 @@ def start_progress_bar(self):
self.main.args.output,
self.main.redis_port,
self.termination_event,
- stdout=self.main.stdout,
pipe=self.pbar_recv_pipe,
slips_mode=self.main.mode,
pbar_finished=self.pbar_finished,
@@ -717,6 +711,7 @@ def shutdown_gracefully(self):
hitlist = self.get_hitlist_in_order()
to_kill_first: List[Process] = hitlist[0]
to_kill_last: List[Process] = hitlist[1]
+
self.termination_event.set()
# to make sure we only warn the user once about
diff --git a/managers/redis_manager.py b/managers/redis_manager.py
index ecadc4dc3..6acfa0a64 100644
--- a/managers/redis_manager.py
+++ b/managers/redis_manager.py
@@ -71,8 +71,7 @@ def load_redis_db(self, redis_port):
def load_db(self):
self.input_type = "database"
- # self.input_information = 'database'
- self.main.db.start(6379)
+ self.main.db.init_redis_server()
# this is where the db will be loaded
redis_port = 32850
@@ -212,17 +211,10 @@ def print_port_in_use(self, port: int):
f"\nOr kill your open redis ports using: ./slips.py -k "
)
- def check_if_port_is_in_use(self, port: int) -> bool:
- if port == 6379:
- # even if it's already in use, slips should override it
- return False
-
+ def close_slips_if_port_in_use(self, port: int):
if utils.is_port_in_use(port):
self.print_port_in_use(port)
self.main.terminate_slips()
- return True
-
- return False
def get_pid_of_redis_server(self, port: int) -> int:
"""
@@ -336,6 +328,31 @@ def get_port_of_redis_server(self, pid: int) -> Union[int, bool]:
# pid wasn't found using the above cmd
return False
+ def get_redis_port(self) -> int:
+ """
+ returns teh redis server port to use based on the given args -P,
+ -m or the default port
+ if all ports are unavailable, this function terminates slips
+ """
+ if self.main.args.port:
+ redis_port = int(self.main.args.port)
+ # if the default port is already in use, slips should override it
+ if redis_port != 6379:
+ # close slips if port is in use
+ self.close_slips_if_port_in_use(redis_port)
+ elif self.main.args.multiinstance:
+ redis_port = self.get_random_redis_port()
+ if not redis_port:
+ # all ports are unavailable
+ inp = input("Press Enter to close all ports.\n")
+ if inp == "":
+ self.close_all_ports()
+ self.main.terminate_slips()
+ else:
+ # even if this port is in use, it will be overwritten by slips
+ redis_port = 6379
+ return redis_port
+
def flush_redis_server(self, pid: int = None, port: int = None):
"""
Flush the redis server on this pid, only 1 param should be
diff --git a/modules/flowalerts/conn.py b/modules/flowalerts/conn.py
index 0638f522b..93cb76b6d 100644
--- a/modules/flowalerts/conn.py
+++ b/modules/flowalerts/conn.py
@@ -814,8 +814,8 @@ def is_dns_conn():
timestamp,
)
- def analyze(self):
- if msg := self.flowalerts.get_msg("new_flow"):
+ def analyze(self, msg):
+ if utils.is_msg_intended_for(msg, "new_flow"):
new_flow = json.loads(msg["data"])
profileid = new_flow["profileid"]
twid = new_flow["twid"]
@@ -937,7 +937,7 @@ def analyze(self):
flow_type, smac, profileid, twid, uid, timestamp
)
- if msg := self.flowalerts.get_msg("tw_closed"):
+ if utils.is_msg_intended_for(msg, "tw_closed"):
profileid_tw = msg["data"].split("_")
profileid = f"{profileid_tw[0]}_{profileid_tw[1]}"
twid = profileid_tw[-1]
diff --git a/modules/flowalerts/dns.py b/modules/flowalerts/dns.py
index be5ad35cc..209f00d8e 100644
--- a/modules/flowalerts/dns.py
+++ b/modules/flowalerts/dns.py
@@ -86,7 +86,7 @@ def is_cname_contacted(self, answers, contacted_ips) -> bool:
check if any ip of the given CNAMEs is contacted
"""
for CNAME in answers:
- if not validators.domain(CNAME):
+ if not utils.is_valid_domain(CNAME):
# it's an ip
continue
ips = self.db.get_domain_resolution(CNAME)
@@ -444,9 +444,8 @@ def check_dns_arpa_scan(self, domain, stime, profileid, twid, uid):
self.dns_arpa_queries.pop(profileid)
return True
- def analyze(self):
- msg = self.flowalerts.get_msg("new_dns")
- if not msg:
+ def analyze(self, msg):
+ if not utils.is_msg_intended_for(msg, "new_dns"):
return False
data = json.loads(msg["data"])
diff --git a/modules/flowalerts/downloaded_file.py b/modules/flowalerts/downloaded_file.py
index 63616dd22..fb64224c3 100644
--- a/modules/flowalerts/downloaded_file.py
+++ b/modules/flowalerts/downloaded_file.py
@@ -3,6 +3,7 @@
from slips_files.common.abstracts.flowalerts_analyzer import (
IFlowalertsAnalyzer,
)
+from slips_files.common.slips_utils import utils
class DownloadedFile(IFlowalertsAnalyzer):
@@ -33,9 +34,8 @@ def check_malicious_ssl(self, ssl_info):
self.set_evidence.malicious_ssl(ssl_info, ssl_info_from_db)
- def analyze(self):
- msg = self.flowalerts.get_msg("new_downloaded_file")
- if not msg:
+ def analyze(self, msg):
+ if not utils.is_msg_intended_for(msg, "new_downloaded_file"):
return
ssl_info = json.loads(msg["data"])
diff --git a/modules/flowalerts/flowalerts.py b/modules/flowalerts/flowalerts.py
index feed114d7..4904221ff 100644
--- a/modules/flowalerts/flowalerts.py
+++ b/modules/flowalerts/flowalerts.py
@@ -24,7 +24,6 @@ class FlowAlerts(IModule):
def init(self):
self.subscribe_to_channels()
self.whitelist = Whitelist(self.logger, self.db)
-
self.dns = DNS(self.db, flowalerts=self)
self.software = Software(self.db, flowalerts=self)
self.notice = Notice(self.db, flowalerts=self)
@@ -36,42 +35,45 @@ def init(self):
self.conn = Conn(self.db, flowalerts=self)
def subscribe_to_channels(self):
- self.c1 = self.db.subscribe("new_flow")
- self.c2 = self.db.subscribe("new_ssh")
- self.c3 = self.db.subscribe("new_notice")
- self.c4 = self.db.subscribe("new_ssl")
- self.c5 = self.db.subscribe("tw_closed")
- self.c6 = self.db.subscribe("new_dns")
- self.c7 = self.db.subscribe("new_downloaded_file")
- self.c8 = self.db.subscribe("new_smtp")
- self.c9 = self.db.subscribe("new_software")
- self.c10 = self.db.subscribe("new_weird")
- self.c11 = self.db.subscribe("new_tunnel")
-
- self.channels = {
- "new_flow": self.c1,
- "new_ssh": self.c2,
- "new_notice": self.c3,
- "new_ssl": self.c4,
- "tw_closed": self.c5,
- "new_dns": self.c6,
- "new_downloaded_file": self.c7,
- "new_smtp": self.c8,
- "new_software": self.c9,
- "new_weird": self.c10,
- "new_tunnel": self.c11,
- }
+ channels = (
+ "new_flow",
+ "new_ssh",
+ "new_notice",
+ "new_ssl",
+ "tw_closed",
+ "new_dns",
+ "new_downloaded_file",
+ "new_smtp",
+ "new_software",
+ "new_tunnel",
+ )
+ for channel in channels:
+ channel_obj = self.db.subscribe(channel)
+ self.channels.update({channel: channel_obj})
def pre_main(self):
utils.drop_root_privs()
+ self.analyzers_map = {
+ "new_downloaded_file": self.downloaded_file.analyze,
+ "new_notice": self.notice.analyze,
+ "new_smtp": self.smtp.analyze,
+ "new_flow": [self.conn.analyze, self.ssl.analyze],
+ "new_dns": self.dns.analyze,
+ "tw_closed": self.conn.analyze,
+ "new_ssh": self.ssh.analyze,
+ "new_software": self.software.analyze,
+ "new_tunnel": self.tunnel.analyze,
+ "new_ssl": self.ssl.analyze,
+ }
def main(self):
- self.conn.analyze()
- self.notice.analyze()
- self.dns.analyze()
- self.smtp.analyze()
- self.ssl.analyze()
- self.ssh.analyze()
- self.downloaded_file.analyze()
- self.tunnel.analyze()
- self.software.analyze()
+ for channel, analyzers in self.analyzers_map.items():
+ msg = self.get_msg(channel)
+ if not msg:
+ continue
+
+ if isinstance(analyzers, list):
+ for analyzer in analyzers:
+ analyzer(msg)
+ else:
+ analyzers(msg)
diff --git a/modules/flowalerts/notice.py b/modules/flowalerts/notice.py
index d19510347..82ea98195 100644
--- a/modules/flowalerts/notice.py
+++ b/modules/flowalerts/notice.py
@@ -3,6 +3,7 @@
from slips_files.common.abstracts.flowalerts_analyzer import (
IFlowalertsAnalyzer,
)
+from slips_files.common.slips_utils import utils
class Notice(IFlowalertsAnalyzer):
@@ -54,9 +55,8 @@ def check_password_guessing(self, flow, uid, twid):
self.set_evidence.pw_guessing(msg, timestamp, twid, uid, by="Zeek")
- def analyze(self):
- msg = self.flowalerts.get_msg("new_notice")
- if not msg:
+ def analyze(self, msg):
+ if not utils.is_msg_intended_for(msg, "new_notice"):
return False
data = json.loads(msg["data"])
diff --git a/modules/flowalerts/smtp.py b/modules/flowalerts/smtp.py
index 1b40988f5..7f5520297 100644
--- a/modules/flowalerts/smtp.py
+++ b/modules/flowalerts/smtp.py
@@ -68,9 +68,8 @@ def check_smtp_bruteforce(self, profileid, twid, flow):
# remove all 3 logins that caused this alert
self.smtp_bruteforce_cache[profileid] = ([], [])
- def analyze(self):
- msg = self.flowalerts.get_msg("new_smtp")
- if not msg:
+ def analyze(self, msg):
+ if not utils.is_msg_intended_for(msg, "new_smtp"):
return
smtp_info = json.loads(msg["data"])
diff --git a/modules/flowalerts/software.py b/modules/flowalerts/software.py
index db5448771..88a147d81 100644
--- a/modules/flowalerts/software.py
+++ b/modules/flowalerts/software.py
@@ -3,6 +3,7 @@
from slips_files.common.abstracts.flowalerts_analyzer import (
IFlowalertsAnalyzer,
)
+from slips_files.common.slips_utils import utils
class Software(IFlowalertsAnalyzer):
@@ -59,9 +60,8 @@ def check_multiple_ssh_versions(
)
return True
- def analyze(self):
- msg = self.flowalerts.get_msg("new_software")
- if not msg:
+ def analyze(self, msg):
+ if not utils.is_msg_intended_for(msg, "new_software"):
return
msg = json.loads(msg["data"])
diff --git a/modules/flowalerts/ssh.py b/modules/flowalerts/ssh.py
index 4ee0dc63d..1e9f2f3c3 100644
--- a/modules/flowalerts/ssh.py
+++ b/modules/flowalerts/ssh.py
@@ -6,6 +6,7 @@
IFlowalertsAnalyzer,
)
from slips_files.common.parsers.config_parser import ConfigParser
+from slips_files.common.slips_utils import utils
class SSH(IFlowalertsAnalyzer):
@@ -152,9 +153,8 @@ def check_ssh_password_guessing(
# reset the counter
del self.password_guessing_cache[cache_key]
- def analyze(self):
- msg = self.flowalerts.get_msg("new_ssh")
- if not msg:
+ def analyze(self, msg):
+ if not utils.is_msg_intended_for(msg, "new_ssh"):
return
data = msg["data"]
diff --git a/modules/flowalerts/ssl.py b/modules/flowalerts/ssl.py
index d3dad0c37..b1c0a2422 100644
--- a/modules/flowalerts/ssl.py
+++ b/modules/flowalerts/ssl.py
@@ -244,11 +244,12 @@ def detect_doh(
self.set_evidence.doh(daddr, profileid, twid, timestamp, uid)
self.db.set_ip_info(daddr, {"is_doh_server": True})
- def analyze(self):
+ def analyze(self, msg: dict):
if not self.ssl_thread_started:
self.ssl_waiting_thread.start()
self.ssl_thread_started = True
- if msg := self.flowalerts.get_msg("new_ssl"):
+
+ if utils.is_msg_intended_for(msg, "new_ssl"):
data = msg["data"]
data = json.loads(data)
flow = data["flow"]
@@ -297,6 +298,6 @@ def analyze(self):
uid,
)
- if msg := self.get_msg("new_flow"):
+ if utils.is_msg_intended_for(msg, "new_flow"):
new_flow = json.loads(msg["data"])
self.check_non_ssl_port_443_conns(new_flow)
diff --git a/modules/flowalerts/tunnel.py b/modules/flowalerts/tunnel.py
index e9ca1aefb..dede8dfe1 100644
--- a/modules/flowalerts/tunnel.py
+++ b/modules/flowalerts/tunnel.py
@@ -3,6 +3,7 @@
from slips_files.common.abstracts.flowalerts_analyzer import (
IFlowalertsAnalyzer,
)
+from slips_files.common.slips_utils import utils
class Tunnel(IFlowalertsAnalyzer):
@@ -24,10 +25,7 @@ def check_gre_tunnel(self, tunnel_info: dict):
self.set_evidence.GRE_tunnel(tunnel_info)
- def analyze(self):
- msg = self.flowalerts.get_msg("new_tunnel")
- if not msg:
- return
-
- msg = json.loads(msg["data"])
- self.check_gre_tunnel(msg)
+ def analyze(self, msg):
+ if utils.is_msg_intended_for(msg, "new_tunnel"):
+ msg = json.loads(msg["data"])
+ self.check_gre_tunnel(msg)
diff --git a/modules/ip_info/asn_info.py b/modules/ip_info/asn_info.py
index 0bb7f319e..f30f06601 100644
--- a/modules/ip_info/asn_info.py
+++ b/modules/ip_info/asn_info.py
@@ -117,6 +117,7 @@ def cache_ip_range(self, ip: str):
ipwhois.exceptions.HTTPLookupError,
ipwhois.exceptions.ASNRegistryError,
ipwhois.exceptions.ASNParseError,
+ ipwhois.exceptions.HTTPRateLimitError,
):
# private ip or RDAP lookup failed. don't cache
# or ASN lookup failed with no more methods to try
diff --git a/modules/p2ptrust/trust/base_model.py b/modules/p2ptrust/trust/base_model.py
index 30339db64..6ce499438 100644
--- a/modules/p2ptrust/trust/base_model.py
+++ b/modules/p2ptrust/trust/base_model.py
@@ -16,6 +16,7 @@ def __init__(self, logger: Output, trustdb):
IObservable.__init__(self)
self.add_observer(self.logger)
self.reliability_weight = 0.7
+ self.name = "BaseModel"
def print(self, text, verbose=1, debug=0):
"""
diff --git a/modules/p2ptrust/trust/trustdb.py b/modules/p2ptrust/trust/trustdb.py
index 663a46ac6..9fdbf639e 100644
--- a/modules/p2ptrust/trust/trustdb.py
+++ b/modules/p2ptrust/trust/trustdb.py
@@ -116,10 +116,6 @@ def insert_slips_score(
):
if timestamp is None:
timestamp = time.time()
- else:
- k = 3
- timestamp = time.time()
- print("###################3Slips score timeout: ", timestamp)
parameters = (ip, score, confidence, timestamp)
self.conn.execute(
"INSERT INTO slips_reputation (ipaddress, score, confidence, update_time) "
@@ -243,115 +239,105 @@ def get_ip_of_peer(self, peerid):
return last_update_time, ip
return False, False
- def get_opinion_on_ip(self, ipaddress: str):
+ def get_reports_for_ip(self, ipaddress):
"""
- :param ipaddress: The ip we're asking other peers about
+ Returns a list of all reports for the given IP address.
"""
reports_cur = self.conn.execute(
- "SELECT reports.reporter_peerid AS reporter_peerid,"
- " MAX(reports.update_time) AS report_timestamp,"
- " reports.score AS report_score,"
- " reports.confidence AS report_confidence,"
- " reports.reported_key AS reported_ip "
+ "SELECT reports.reporter_peerid, reports.update_time, reports.score, "
+ " reports.confidence, reports.reported_key "
"FROM reports "
- "WHERE reports.reported_key = ?"
- " AND reports.key_type = 'ip' "
- "GROUP BY reports.reporter_peerid;",
+ "WHERE reports.reported_key = ? AND reports.key_type = 'ip'"
+ "ORDER BY reports.update_time DESC;",
(ipaddress,),
)
+ return reports_cur.fetchall()
+
+ def get_reporter_ip(self, reporter_peerid, report_timestamp):
+ """
+ Returns the IP address of the reporter at the time of the report.
+ """
+ ip_cur = self.conn.execute(
+ "SELECT MAX(update_time), ipaddress "
+ "FROM peer_ips "
+ "WHERE update_time <= ? AND peerid = ? "
+ "ORDER BY update_time DESC "
+ "LIMIT 1;",
+ (report_timestamp, reporter_peerid),
+ )
+ if res := ip_cur.fetchone():
+ return res[1]
+ return None
+ def get_reporter_reliability(self, reporter_peerid):
+ """
+ Returns the latest reliability score for the given peer.
+ """
+ go_reliability_cur = self.conn.execute(
+ "SELECT reliability "
+ "FROM go_reliability "
+ "WHERE peerid = ? "
+ "ORDER BY update_time DESC "
+ "LIMIT 1;"
+ )
+ if res := go_reliability_cur.fetchone():
+ return res[0]
+ return None
+
+ def get_reporter_reputation(self, reporter_ipaddress):
+ """
+ Returns the latest reputation score and confidence for the given IP address.
+ """
+ slips_reputation_cur = self.conn.execute(
+ "SELECT score, confidence "
+ "FROM slips_reputation "
+ "WHERE ipaddress = ? "
+ "ORDER BY update_time DESC "
+ "LIMIT 1;",
+ (reporter_ipaddress,),
+ )
+ if res := slips_reputation_cur.fetchone():
+ return res
+ return None, None
+
+ def get_opinion_on_ip(self, ipaddress):
+ """
+ Returns a list of tuples, where each tuple contains the report score, report confidence,
+ reporter reliability, reporter score, and reporter confidence for a given IP address.
+ """
+ reports = self.get_reports_for_ip(ipaddress)
reporters_scores = []
- # iterate over all peers that reported the ip
for (
reporter_peerid,
report_timestamp,
report_score,
report_confidence,
reported_ip,
- ) in reports_cur.fetchall():
- # get the ip address the reporting peer had when doing the report
- ip_cur = self.conn.execute(
- "SELECT MAX(update_time) AS ip_update_time, ipaddress "
- "FROM peer_ips "
- "WHERE update_time <= ? AND peerid = ?;",
- (report_timestamp, reporter_peerid),
+ ) in reports:
+ reporter_ipaddress = self.get_reporter_ip(
+ reporter_peerid, report_timestamp
)
- _, reporter_ipaddress = ip_cur.fetchone()
- # TODO: handle empty response
-
- # prevent peers from reporting about themselves
if reporter_ipaddress == ipaddress:
continue
- # get the most recent score and confidence for the given IP-peerID pair
- parameters_dict = {
- "peerid": reporter_peerid,
- "ipaddress": reporter_ipaddress,
- }
-
- # probably what this query means is:
- # get latest reports by this peer, whether using it's peer ID or IP
- # within this time range: last update time until now
-
- slips_reputation_cur = self.conn.execute(
- "SELECT * FROM ( "
- " SELECT b.update_time AS lower_bound, "
- " COALESCE( "
- " MIN(lj.min_update_time), strftime('%s','now')"
- " ) AS upper_bound, "
- " b.ipaddress AS ipaddress, "
- " b.peerid AS peerid "
- " FROM peer_ips b "
- " LEFT JOIN( "
- " SELECT a.update_time AS min_update_time "
- " FROM peer_ips a "
- " WHERE a.peerid = :peerid OR a.ipaddress = :ipaddress "
- " ORDER BY min_update_time "
- " ) lj "
- " ON lj.min_update_time > b.update_time "
- " WHERE b.peerid = :peerid AND b.ipaddress = :ipaddress "
- " GROUP BY lower_bound "
- " ORDER BY lower_bound DESC "
- " ) x "
- "LEFT JOIN slips_reputation sr USING (ipaddress) "
- "WHERE sr.update_time <= x.upper_bound AND "
- " sr.update_time >= x.lower_bound "
- "ORDER BY sr.update_time DESC "
- "LIMIT 1 "
- ";",
- parameters_dict,
+ reporter_reliability = self.get_reporter_reliability(
+ reporter_peerid
)
- data = slips_reputation_cur.fetchone()
- if data is None:
- self.print(f"No slips reputation data for {parameters_dict}")
+ if reporter_reliability is None:
continue
- go_reliability_cur = self.conn.execute(
- "SELECT reliability FROM main.go_reliability WHERE peerid = ? ORDER BY update_time DESC LIMIT 1;",
- (reporter_peerid,),
+ reporter_score, reporter_confidence = self.get_reporter_reputation(
+ reporter_ipaddress
)
- reliability = go_reliability_cur.fetchone()
- if reliability is None:
- self.print(f"No reliability for {reporter_peerid}")
+ if reporter_score is None or reporter_confidence is None:
continue
- reliability = reliability[0]
-
- (
- _,
- _,
- _,
- _,
- _,
- reporter_score,
- reporter_confidence,
- reputation_update_time,
- ) = data
+
reporters_scores.append(
(
report_score,
report_confidence,
- reliability,
+ reporter_reliability,
reporter_score,
reporter_confidence,
)
diff --git a/modules/progress_bar/progress_bar.py b/modules/progress_bar/progress_bar.py
index e0bf40f7a..6ca5692c2 100644
--- a/modules/progress_bar/progress_bar.py
+++ b/modules/progress_bar/progress_bar.py
@@ -33,12 +33,10 @@ class PBar(IModule):
def init(
self,
- stdout: str = None,
pipe: Connection = None,
slips_mode: str = None,
pbar_finished: Event = None,
):
- self.stdout: str = stdout
self.slips_mode: str = slips_mode
self.pipe = pipe
self.done_reading_flows = False
@@ -62,7 +60,7 @@ def initialize_pbar(self, msg: dict):
total=self.total_flows,
leave=True,
colour="green",
- desc="Flows read",
+ desc="Flows Processed",
mininterval=0, # defines how long to wait between each refresh.
unit=" flow",
smoothing=1,
diff --git a/modules/rnn_cc_detection/rnn_cc_detection.py b/modules/rnn_cc_detection/rnn_cc_detection.py
index 2eec94661..68d50e6c1 100644
--- a/modules/rnn_cc_detection/rnn_cc_detection.py
+++ b/modules/rnn_cc_detection/rnn_cc_detection.py
@@ -2,7 +2,6 @@
import json
from typing import Dict
import numpy as np
-from typing import Optional
from tensorflow.keras.models import load_model
from slips_files.common.slips_utils import utils
@@ -186,23 +185,21 @@ def handle_new_letters(self, msg: Dict):
if "tcp" not in tupleid.lower():
return
-
+
if "established" not in state.lower():
return
# to reduce false positives
threshold = 0.99
# function to convert each letter of behavioral model to ascii
- behavioral_model = self.convert_input_for_module(
- pre_behavioral_model
- )
+ behavioral_model = self.convert_input_for_module(pre_behavioral_model)
# predict the score of behavioral model being c&c channel
self.print(
f"predicting the sequence: {pre_behavioral_model}",
3,
0,
)
- score = self.tcpmodel.predict(behavioral_model, verbose = 0)
+ score = self.tcpmodel.predict(behavioral_model, verbose=0)
self.print(
f" >> sequence: {pre_behavioral_model}. "
f"final prediction score: {score[0][0]:.20f}",
@@ -216,9 +213,7 @@ def handle_new_letters(self, msg: Dict):
if len(pre_behavioral_model) >= threshold_confidence:
confidence = 1
else:
- confidence = (
- len(pre_behavioral_model) / threshold_confidence
- )
+ confidence = len(pre_behavioral_model) / threshold_confidence
uid = msg["uid"]
stime = flow["starttime"]
self.set_evidence_cc_channel(
@@ -240,7 +235,6 @@ def handle_new_letters(self, msg: Dict):
# detection
self.db.publish("check_jarm_hash", json.dumps(to_send))
-
def handle_tw_closed(self, msg: Dict):
"""handles msgs from the tw_closed channel"""
profileid_tw = msg["data"].split("_")
@@ -263,5 +257,6 @@ def pre_main(self):
def main(self):
if msg := self.get_msg("new_letters"):
self.handle_new_letters(msg)
- elif msg := self.get_msg("tw_closed"):
+
+ if msg := self.get_msg("tw_closed"):
self.handle_tw_closed(msg)
diff --git a/modules/rnn_cc_detection/strato_letters_exporter.py b/modules/rnn_cc_detection/strato_letters_exporter.py
index a4a4785a3..7cd357cdf 100644
--- a/modules/rnn_cc_detection/strato_letters_exporter.py
+++ b/modules/rnn_cc_detection/strato_letters_exporter.py
@@ -55,4 +55,4 @@ def export(self, profileid: str, twid: str):
outtuple: str
info: List[str, List[float]]
letters = info[0]
- writer.writerow([f"{saddr}-{outtuple}", letters])
+ writer.writerow([f"{saddr}-{outtuple}-{twid}", letters])
diff --git a/modules/timeline/timeline.py b/modules/timeline/timeline.py
index b2ba1b8d1..ca99e0b65 100644
--- a/modules/timeline/timeline.py
+++ b/modules/timeline/timeline.py
@@ -2,6 +2,7 @@
import sys
import time
import json
+from typing import Any
from slips_files.common.parsers.config_parser import ConfigParser
from slips_files.common.slips_utils import utils
@@ -29,335 +30,305 @@ def init(self):
self.is_human_timestamp = conf.timeline_human_timestamp()
self.analysis_direction = conf.analysis_direction()
- def process_timestamp(self, timestamp: float) -> str:
+ def convert_timestamp_to_slips_format(self, timestamp: float) -> str:
if self.is_human_timestamp:
timestamp = utils.convert_format(timestamp, utils.alerts_format)
return str(timestamp)
+ def validate_bytes(self, bytes: Any) -> int:
+ if not isinstance(bytes, int):
+ bytes = 0
+ return bytes
+
+ def is_inbound_traffic(self, profileid, daddr) -> bool:
+ """return True if profileid's IP is the same as the daddr"""
+ profile_ip = profileid.split("_")[1]
+ return self.analysis_direction == "all" and str(daddr) == str(
+ profile_ip
+ )
+
+ def process_dns_altflow(self, alt_flow: dict):
+ answer = alt_flow["answers"]
+ if "NXDOMAIN" in alt_flow["rcode_name"]:
+ answer = "NXDOMAIN"
+ dns_activity = {
+ "query": alt_flow["query"],
+ "answers": answer,
+ }
+ alt_activity = {
+ "info": dns_activity,
+ "critical warning": "",
+ }
+ return alt_activity
+
+ def process_http_altflow(self, alt_flow: dict):
+ http_data_all = {
+ "Request": alt_flow["method"]
+ + " http://"
+ + alt_flow["host"]
+ + alt_flow["uri"],
+ "Status Code": str(alt_flow["status_code"])
+ + "/"
+ + alt_flow["status_msg"],
+ "MIME": str(alt_flow["resp_mime_types"]),
+ "UA": alt_flow["user_agent"],
+ }
+ # if any of fields are empty, do not include them
+ http_data = {
+ k: v for k, v in http_data_all.items() if v != "" and v != "/"
+ }
+ return {"info": http_data}
+
+ def process_ssl_altflow(self, alt_flow: dict):
+ if alt_flow["validation_status"] == "ok":
+ validation = "Yes"
+ resumed = "False"
+ elif not alt_flow["validation_status"] and alt_flow["resumed"] is True:
+ # If there is no validation and it is a resumed ssl.
+ # It means that there was a previous connection with
+ # the validation data. We can not say Say it
+ validation = "??"
+ resumed = "True"
+ else:
+ # If the validation is not ok and not empty
+ validation = "No"
+ resumed = "False"
+ # if there is no CN
+ subject = (
+ alt_flow["subject"].split(",")[0]
+ if alt_flow["subject"]
+ else "????"
+ )
+ # We put server_name instead of dns resolution
+ ssl_activity = {
+ "server_name": subject,
+ "trusted": validation,
+ "resumed": resumed,
+ "version": alt_flow["version"],
+ "dns_resolution": alt_flow["server_name"],
+ }
+ return {"info": ssl_activity}
+
+ def process_ssh_altflow(self, alt_flow: dict):
+ success = (
+ "Successful" if alt_flow["auth_success"] else "Not Successful"
+ )
+ ssh_activity = {
+ "login": success,
+ "auth_attempts": alt_flow["auth_attempts"],
+ "client": alt_flow["client"],
+ "server": alt_flow["client"],
+ }
+ return {"info": ssh_activity}
+
+ def process_altflow(self, profileid, twid, uid) -> dict:
+ alt_flow: dict = self.db.get_altflow_from_uid(profileid, twid, uid)
+ altflow_info = {"info": ""}
+
+ if not alt_flow:
+ return altflow_info
+
+ flow_type = alt_flow["type_"]
+ flow_type_map = {
+ "dns": self.process_dns_altflow,
+ "http": self.process_http_altflow,
+ "ssl": self.process_ssl_altflow,
+ "ssh": self.process_ssh_altflow,
+ }
+ try:
+ altflow_info = flow_type_map[flow_type](alt_flow)
+ except KeyError:
+ pass
+ return altflow_info
+
+ def get_dns_resolution(self, ip):
+ """
+ returns a list or a str with the dns resolution of the given ip
+ """
+ dns_resolution: dict = self.db.get_dns_resolution(ip)
+ dns_resolution: list = dns_resolution.get("domains", [])
+
+ # we should take only one resolution, if there is more
+ # than 3, because otherwise it does not fit in the timeline.
+ if len(dns_resolution) > 3:
+ dns_resolution = dns_resolution[-1]
+
+ if not dns_resolution:
+ dns_resolution = "????"
+ return dns_resolution
+
+ def process_tcp_udp_flow(
+ self, profileid: str, dport_name: str, flow: dict
+ ):
+ dur = round(float(flow["dur"]), 3)
+ daddr = flow["daddr"]
+ state = flow["state"]
+ stime = flow["ts"]
+ dport = flow["dport"]
+ proto = flow["proto"].upper()
+ sbytes = self.validate_bytes(flow["sbytes"])
+ allbytes = self.validate_bytes(flow["allbytes"])
+ timestamp_human = self.convert_timestamp_to_slips_format(stime)
+
+ critical_warning_dport_name = ""
+ if not dport_name:
+ dport_name = "????"
+ critical_warning_dport_name = (
+ "Protocol not recognized by Slips nor Zeek."
+ )
+
+ activity = {
+ "timestamp": timestamp_human,
+ "dport_name": dport_name,
+ "preposition": (
+ "from" if self.is_inbound_traffic(profileid, daddr) else "to"
+ ),
+ "dns_resolution": self.get_dns_resolution(daddr),
+ "daddr": daddr,
+ "dport/proto": f"{str(dport)}/{proto}",
+ "state": state,
+ "warning": "No data exchange!" if not allbytes else "",
+ "info": "",
+ "sent": sbytes,
+ "recv": allbytes - sbytes,
+ "tot": allbytes,
+ "duration": dur,
+ "critical warning": critical_warning_dport_name,
+ }
+ return activity
+
+ def process_icmp_flow(self, profileid: str, dport_name: str, flow: dict):
+ sport = flow["sport"]
+ dport = flow["dport"]
+ stime = flow["ts"]
+ saddr = flow["saddr"]
+ daddr = flow["daddr"]
+ dur = round(float(flow["dur"]), 3)
+ allbytes = self.validate_bytes(flow["allbytes"])
+ timestamp_human = self.convert_timestamp_to_slips_format(stime)
+
+ extra_info = {}
+ warning = ""
+
+ # Zeek format
+ if isinstance(sport, int):
+ icmp_types = {
+ 11: "ICMP Time Exceeded in Transit",
+ 3: "ICMP Destination Net Unreachable",
+ 8: "PING echo",
+ }
+ try:
+ dport_name = icmp_types[sport]
+ except KeyError:
+ dport_name = "ICMP Unknown type"
+ extra_info["type"] = f"0x{str(sport)}"
+
+ # Argus format
+ elif isinstance(sport, str):
+ icmp_types_str = {
+ "0x0008": "PING echo",
+ "0x0103": "ICMP Host Unreachable",
+ "0x0303": "ICMP Port Unreachable",
+ "0x000b": "",
+ "0x0003": "ICMP Destination Net Unreachable",
+ }
+
+ dport_name = icmp_types_str.get(sport, "ICMP Unknown type")
+
+ if sport == "0x0303":
+ warning = f"Unreachable port is {int(dport, 16)}"
+
+ activity = {
+ "timestamp": timestamp_human,
+ "dport_name": dport_name,
+ "preposition": "from",
+ "saddr": saddr,
+ "size": allbytes,
+ "duration": dur,
+ }
+
+ extra_info.update(
+ {
+ "dns_resolution": "",
+ "daddr": daddr,
+ "dport/proto": f"{sport}/ICMP",
+ "state": "",
+ "warning": warning,
+ "sent": "",
+ "recv": "",
+ "tot": "",
+ "critical warning": "",
+ }
+ )
+ activity.update(extra_info)
+ return activity
+
+ def process_igmp_flow(self, profileid: str, dport_name: str, flow: dict):
+ stime = flow["ts"]
+ dur = round(float(flow["dur"]), 3)
+ saddr = flow["daddr"]
+ allbytes = self.validate_bytes(flow["allbytes"])
+ timestamp_human = self.convert_timestamp_to_slips_format(stime)
+ return {
+ "timestamp": timestamp_human,
+ "dport_name": "IGMP",
+ "preposition": "from",
+ "saddr": saddr,
+ "size": allbytes,
+ "duration": dur,
+ }
+
+ def interpret_dport(self, flow) -> str:
+ """tries to get a meaningful name of the dport used
+ in the given flow"""
+ dport_name = flow.get("appproto", "")
+ # suricata does this
+ if not dport_name or dport_name == "failed":
+ dport = flow["dport"]
+ proto = flow["proto"]
+ dport_name = self.db.get_port_info(f"{dport}/{proto.lower()}")
+ dport_name = "" if not dport_name else dport_name.upper()
+ return dport_name
+
def process_flow(self, profileid, twid, flow, timestamp: float):
"""
Process the received flow for this profileid and twid
so its printed by the logprocess later
"""
- timestamp_human = self.process_timestamp(timestamp)
-
try:
- # Convert the common fields to something that can be interpreted
uid = next(iter(flow))
- flow_dict = json.loads(flow[uid])
- profile_ip = profileid.split("_")[1]
- dur = round(float(flow_dict["dur"]), 3)
- saddr = flow_dict["saddr"]
- sport = flow_dict["sport"]
- daddr = flow_dict["daddr"]
- dport = flow_dict["dport"]
- proto = flow_dict["proto"].upper()
- dport_name = flow_dict.get("appproto", "")
- # suricata does this
- dport_name = "" if dport_name == "failed" else dport_name
- if not dport_name:
- dport_name = self.db.get_port_info(
- f"{str(dport)}/{proto.lower()}"
- )
- if dport_name:
- dport_name = dport_name.upper()
- else:
- dport_name = dport_name.upper()
- state = flow_dict["state"]
- allbytes = flow_dict["allbytes"]
- if not isinstance(allbytes, int):
- allbytes = 0
-
- # allbytes_human are sorted wrong in the interface, thus we sticked to original byte size.
- # # Convert the bytes into human readable
- # if int(allbytes) < 1024:
- # # In bytes
- # allbytes_human = '{:.2f}{}'.format(float(allbytes), 'b')
- # elif int(allbytes) > 1024 and int(allbytes) < 1048576:
- # # In Kb
- # allbytes_human = '{:.2f}{}'.format(
- # float(allbytes) / 1024, 'Kb'
- # )
- # elif int(allbytes) > 1048576 and int(allbytes) < 1073741824:
- # # In Mb
- # allbytes_human = '{:.2f}{}'.format(
- # float(allbytes) / 1024 / 1024, 'Mb'
- # )
- # elif int(allbytes) > 1073741824:
- # # In Bg
- # allbytes_human = '{:.2f}{}'.format(
- # float(allbytes) / 1024 / 1024 / 1024, 'Gb'
- # )
-
- sbytes = flow_dict["sbytes"]
- if not isinstance(sbytes, int):
- sbytes = 0
-
- # Now that we have the flow processed. Try to interpret it and create the activity line
- # Record Activity
- activity = {}
+ flow: dict = json.loads(flow[uid])
+ proto = flow["proto"].upper()
+ dport_name = self.interpret_dport(flow)
+ # interpret the given flow and and create an activity line to
+ # display in slips Web interface/Kalipso
# Change the format of timeline in the case of inbound
- # flows for external IP, i.e direction 'all' and destination IP == profile IP.
+ # flows for external IP, i.e direction 'all' and destination IP
+ # == profile IP.
# If not changed, it would have printed 'IP1 https asked to IP1'.
- if "TCP" in proto or "UDP" in proto:
- warning_empty = ""
- critical_warning_dport_name = ""
- if self.analysis_direction == "all" and str(daddr) == str(
- profile_ip
- ):
- dns_resolution = self.db.get_dns_resolution(daddr)
- dns_resolution = dns_resolution.get("domains", [])
-
- # we should take only one resolution, if there is more than 3, because otherwise it does not fit in the timeline.
- if len(dns_resolution) > 3:
- dns_resolution = dns_resolution[-1]
-
- if not dns_resolution:
- dns_resolution = "????"
-
- # Check if the connection sent anything!
- if not allbytes:
- warning_empty = "No data exchange!"
-
- # Check if slips and zeek know dport_name!
- if not dport_name:
- dport_name = "????"
- critical_warning_dport_name = (
- "Protocol not recognized by Slips nor Zeek."
- )
-
- activity = {
- "timestamp": timestamp_human,
- "dport_name": dport_name,
- "preposition": "from",
- "dns_resolution": dns_resolution,
- "saddr": saddr,
- "daddr": daddr,
- "dport/proto": f"{str(dport)}/{proto}",
- "state": state,
- "warning": warning_empty,
- "info": "",
- "sent": sbytes,
- "recv": allbytes - sbytes,
- "tot": allbytes,
- "duration": dur,
- "critical warning": critical_warning_dport_name,
- }
-
- else:
- # Check if the connection sent anything!
- if not allbytes:
- warning_empty = "No data exchange!"
-
- # Check if slips and zeek know dport_name!
- if not dport_name:
- dport_name = "????"
- critical_warning_dport_name = (
- "Protocol not recognized by Slips nor Zeek."
- )
- dns_resolution = self.db.get_dns_resolution(daddr)
- dns_resolution = dns_resolution.get("domains", [])
-
- # we should take only one resolution, if there is more than 3, because otherwise it does not fit in the timeline.
- if len(dns_resolution) > 3:
- dns_resolution = dns_resolution[-1]
-
- if not dns_resolution:
- dns_resolution = "????"
- activity = {
- "timestamp": timestamp_human,
- "dport_name": dport_name,
- "preposition": "to",
- "dns_resolution": dns_resolution,
- "daddr": daddr,
- "dport/proto": f"{str(dport)}/{proto}",
- "state": state,
- "warning": warning_empty,
- "info": "",
- "sent": sbytes,
- "recv": allbytes - sbytes,
- "tot": allbytes,
- "duration": dur,
- "critical warning": critical_warning_dport_name,
- }
-
- elif "ICMP" in proto:
- extra_info = {}
- warning = ""
- if isinstance(sport, int):
- # zeek puts the number
- if sport == 11:
- dport_name = "ICMP Time Excedded in Transit"
-
- elif sport == 3:
- dport_name = "ICMP Destination Net Unreachable"
-
- elif sport == 8:
- dport_name = "PING echo"
-
- else:
- dport_name = "ICMP Unknown type"
- extra_info = {
- "type": f"0x{str(sport)}",
- }
-
- elif isinstance(sport, str):
- # Argus puts in hex the values of the ICMP
- if "0x0008" in sport:
- dport_name = "PING echo"
- elif "0x0103" in sport:
- dport_name = "ICMP Host Unreachable"
- elif "0x0303" in sport:
- dport_name = "ICMP Port Unreachable"
- warning = f"unreachable port is {int(dport, 16)}"
- elif "0x000b" in sport:
- dport_name = ""
- elif "0x0003" in sport:
- dport_name = "ICMP Destination Net Unreachable"
- else:
- dport_name = "ICMP Unknown type"
-
- activity = {
- "timestamp": timestamp_human,
- "dport_name": dport_name,
- "preposition": "from",
- "saddr": saddr,
- "size": allbytes,
- "duration": dur,
- }
-
- extra_info.update(
- {
- "dns_resolution": "",
- "daddr": daddr,
- "dport/proto": f"{sport}/ICMP",
- "state": "",
- "warning": warning,
- "sent": "",
- "recv": "",
- "tot": "",
- "critical warning": "",
- }
- )
-
- activity.update(extra_info)
-
- elif "IGMP" in proto:
- dport_name = "IGMP"
- activity = {
- "timestamp": timestamp_human,
- "dport_name": dport_name,
- "preposition": "from",
- "saddr": saddr,
- "size": allbytes,
- "duration": dur,
- }
+ proto_handlers = {
+ "TCP": self.process_tcp_udp_flow,
+ "UDP": self.process_tcp_udp_flow,
+ "ICMP": self.process_icmp_flow,
+ "IPV6-ICMP": self.process_icmp_flow,
+ "IPV4-ICMP": self.process_icmp_flow,
+ "IGMP": self.process_igmp_flow,
+ }
+ if proto in proto_handlers:
+ activity = proto_handlers[proto](profileid, dport_name, flow)
+ else:
+ activity = {}
#################################
# Now process the alternative flows
- # Sometimes we need to wait a little to give time to Zeek to find the related flow since they are read very fast together.
+ # Sometimes we need to wait a little to give time to Zeek to find
+ # the related flow since they are read very fast together.
# This should be improved algorithmically probably
time.sleep(0.05)
- alt_flow: dict = self.db.get_altflow_from_uid(profileid, twid, uid)
-
- alt_activity = {}
- http_data = {}
- if alt_flow:
- flow_type = alt_flow["type_"]
- self.print(
- f"Received an altflow of type {flow_type}: {alt_flow}",
- 3,
- 0,
- )
- if "dns" in flow_type:
- answer = alt_flow["answers"]
- if "NXDOMAIN" in alt_flow["rcode_name"]:
- answer = "NXDOMAIN"
- dns_activity = {
- "query": alt_flow["query"],
- "answers": answer,
- }
- alt_activity = {
- "info": dns_activity,
- "critical warning": "",
- }
- elif flow_type == "http":
- http_data_all = {
- "Request": alt_flow["method"]
- + " http://"
- + alt_flow["host"]
- + alt_flow["uri"],
- "Status Code": str(alt_flow["status_code"])
- + "/"
- + alt_flow["status_msg"],
- "MIME": str(alt_flow["resp_mime_types"]),
- "UA": alt_flow["user_agent"],
- }
- # if any of fields are empty, do not include them
- http_data = {
- k: v
- for k, v in http_data_all.items()
- if v != "" and v != "/"
- }
- alt_activity = {"info": http_data}
- elif flow_type == "ssl":
- if alt_flow["validation_status"] == "ok":
- validation = "Yes"
- resumed = "False"
- elif (
- not alt_flow["validation_status"]
- and alt_flow["resumed"] is True
- ):
- # If there is no validation and it is a resumed ssl.
- # It means that there was a previous connection with
- # the validation data. We can not say Say it
- validation = "??"
- resumed = "True"
- else:
- # If the validation is not ok and not empty
- validation = "No"
- resumed = "False"
- # if there is no CN
- subject = (
- alt_flow["subject"].split(",")[0]
- if alt_flow["subject"]
- else "????"
- )
- # We put server_name instead of dns resolution
- ssl_activity = {
- "server_name": subject,
- "trusted": validation,
- "resumed": resumed,
- "version": alt_flow["version"],
- "dns_resolution": alt_flow["server_name"],
- }
- alt_activity = {"info": ssl_activity}
- elif flow_type == "ssh":
- success = (
- "Successful"
- if alt_flow["auth_success"]
- else "Not Successful"
- )
- ssh_activity = {
- "login": success,
- "auth_attempts": alt_flow["auth_attempts"],
- "client": alt_flow["client"],
- "server": alt_flow["client"],
- }
- alt_activity = {"info": ssh_activity}
-
- elif activity:
- alt_activity = {"info": ""}
-
- # Combine the activity of normal flows and activity of alternative flows and store in the DB for this profileid and twid
+ alt_activity = self.process_altflow(profileid, twid, uid)
+ # Combine the activity of normal flows and activity of alternative
+ # flows and store in the DB for this profileid and twid
activity.update(alt_activity)
- if activity:
- self.db.add_timeline_line(profileid, twid, activity, timestamp)
- self.print(
- f"Activity of Profileid: {profileid}, TWid {twid}: "
- f"{activity}",
- 3,
- 0,
- )
+ self.db.add_timeline_line(profileid, twid, activity, timestamp)
except Exception:
exception_line = sys.exc_info()[2].tb_lineno
@@ -374,7 +345,6 @@ def main(self):
# Main loop function
if msg := self.get_msg("new_flow"):
mdata = msg["data"]
- # Convert from json to dict
mdata = json.loads(mdata)
profileid = mdata["profileid"]
twid = mdata["twid"]
diff --git a/modules/update_manager/update_manager.py b/modules/update_manager/update_manager.py
index 860696f3d..7b2589aac 100644
--- a/modules/update_manager/update_manager.py
+++ b/modules/update_manager/update_manager.py
@@ -12,7 +12,6 @@
)
import requests
-import validators
from exclusiveprocess import (
Lock,
CannotAcquireLock,
@@ -859,7 +858,8 @@ def parse_ja3_feed(self, url, ja3_feed_path: str) -> bool:
)
except IndexError:
self.print(
- f"IndexError Description column: {description_column}. Line: {line}",
+ f"IndexError Description column: "
+ f"{description_column}. Line: {line}",
0,
1,
)
@@ -883,7 +883,8 @@ def parse_ja3_feed(self, url, ja3_feed_path: str) -> bool:
)
else:
self.print(
- f"The data {data} is not valid. It was found in {filename}.",
+ f"The data {data} is not valid. "
+ f"It was found in {filename}.",
3,
3,
)
@@ -960,7 +961,7 @@ def parse_json_ti_feed(self, link_to_download, ti_file_path: str) -> bool:
if diff > self.interval:
continue
domain = ioc["DomainAddress"]
- if not validators.domain(domain):
+ if not utils.is_valid_domain(domain):
continue
malicious_domains_dict[domain] = json.dumps(
{
diff --git a/slips/daemon.py b/slips/daemon.py
index 1f3762d39..de909483f 100644
--- a/slips/daemon.py
+++ b/slips/daemon.py
@@ -28,12 +28,15 @@ def __init__(self, slips):
self.read_configuration()
if not self.slips.args.stopdaemon:
self.prepare_output_dir()
+ self.pid = self.read_pidfile()
+
+ def read_pidfile(self) -> Optional[int]:
# Get the pid from pidfile
try:
- with open(self.pidfile, "r") as pidfile:
- self.pid = int(pidfile.read().strip())
+ with open(self.pidfile) as pidfile:
+ return int(pidfile.read().strip())
except (IOError, FileNotFoundError):
- self.pid = None
+ return None
def print(self, text):
"""Prints output to logsfile specified in slips.yaml"""
@@ -45,7 +48,8 @@ def create_std_streams(self):
std_streams = [self.stderr, self.stdout, self.logsfile]
for file in std_streams:
- # we don't want to clear the stdout or the logsfile when we stop the daemon using -S
+ # we don't want to clear the stdout or the logsfile when we stop
+ # the daemon using -S
if "-S" in sys.argv and file != self.stderr:
continue
# create the file if it doesn't exist or clear it if it exists
@@ -297,7 +301,7 @@ def stop(self):
self.logsfile = "slips.log"
self.prepare_std_streams(output_dir)
self.logger = self.slips.proc_man.start_output_process(
- self.stdout, self.stderr, self.logsfile
+ self.stderr, self.logsfile, stdout=self.stdout
)
self.slips.add_observer(self.logger)
self.db = DBManager(
diff --git a/slips/main.py b/slips/main.py
index cdee34ce2..15929b374 100644
--- a/slips/main.py
+++ b/slips/main.py
@@ -48,6 +48,7 @@ def __init__(self, testing=False):
self.input_type = False
self.proc_man = ProcessManager(self)
# in testing mode we manually set the following params
+ # TODO use mocks instead of this testing param
if not testing:
self.args = self.conf.get_args()
self.pid = os.getpid()
@@ -499,7 +500,7 @@ def setup_print_levels(self):
if self.args.debug is None:
self.args.debug = self.conf.debug()
- # Limit any debuggisity to > 0
+ # Debug levels must be > 0
self.args.debug = max(self.args.debug, 0)
def print_version(self):
@@ -570,46 +571,48 @@ def is_total_flows_unknown(self) -> bool:
or self.input_type in ("stdin", "pcap", "interface")
)
+ def get_slips_logfile(self) -> str:
+ if self.mode == "daemonized":
+ return self.daemon.stdout
+ elif self.mode == "interactive":
+ return os.path.join(self.args.output, "slips.log")
+
+ def get_slips_error_file(self) -> str:
+ if self.mode == "daemonized":
+ return self.daemon.stderr
+ elif self.mode == "interactive":
+ return os.path.join(self.args.output, "errors.log")
+
def start(self):
"""Main Slips Function"""
try:
self.print_version()
print("https://stratosphereips.org")
print("-" * 27)
-
self.setup_print_levels()
-
+ stderr: str = self.get_slips_error_file()
+ slips_logfile: str = self.get_slips_logfile()
# if stdout is redirected to a file,
# tell output.py to redirect it's output as well
- (
- current_stdout,
- stderr,
- slips_logfile,
- ) = self.checker.check_output_redirection()
- self.stdout = current_stdout
self.logger = self.proc_man.start_output_process(
- current_stdout, stderr, slips_logfile
+ stderr, slips_logfile
)
self.add_observer(self.logger)
- # get the port that is going to be used for this instance of slips
- if self.args.port:
- self.redis_port = int(self.args.port)
- # close slips if port is in use
- self.redis_man.check_if_port_is_in_use(self.redis_port)
- elif self.args.multiinstance:
- self.redis_port = self.redis_man.get_random_redis_port()
- if not self.redis_port:
- # all ports are unavailable
- inp = input("Press Enter to close all ports.\n")
- if inp == "":
- self.redis_man.close_all_ports()
- self.terminate_slips()
- else:
- # even if this port is in use, it will be overwritten by slips
- self.redis_port = 6379
+ self.redis_port: int = self.redis_man.get_redis_port()
+ # dont start the redis server if it's already started
+ start_redis_server = not utils.is_port_in_use(self.redis_port)
+ try:
+ self.db = DBManager(
+ self.logger,
+ self.args.output,
+ self.redis_port,
+ start_redis_server=start_redis_server,
+ )
+ except RuntimeError as e:
+ self.print(str(e), 1, 1)
+ self.terminate_slips()
- self.db = DBManager(self.logger, self.args.output, self.redis_port)
self.db.set_input_metadata(
{
"output_dir": self.args.output,
diff --git a/slips_files/common/abstracts/flowalerts_analyzer.py b/slips_files/common/abstracts/flowalerts_analyzer.py
index 9eee8d3f1..485d31670 100644
--- a/slips_files/common/abstracts/flowalerts_analyzer.py
+++ b/slips_files/common/abstracts/flowalerts_analyzer.py
@@ -34,11 +34,8 @@ def init(self):
initializing the module
"""
- def get_msg(self, channel_name):
- return self.flowalerts.get_msg(channel_name)
-
@abstractmethod
- def analyze(self) -> bool:
+ def analyze(self, msg: dict) -> bool:
"""
Analyzes a certain flow type and runs all supported detections
returns True if there was a detection
diff --git a/slips_files/common/abstracts/module.py b/slips_files/common/abstracts/module.py
index 5c3cc6ccb..4a9f5684c 100644
--- a/slips_files/common/abstracts/module.py
+++ b/slips_files/common/abstracts/module.py
@@ -2,7 +2,10 @@
import traceback
from abc import ABC, abstractmethod
from multiprocessing import Process, Event
-from typing import Dict
+from typing import (
+ Dict,
+ Optional,
+)
from slips_files.core.output import Output
from slips_files.common.slips_utils import utils
@@ -44,7 +47,7 @@ def __init__(
# set its own channels
# tracks whether or not in the last iteration there was a msg
# received in that channel
- self.channel_tracker = self.init_channel_tracker()
+ self.channel_tracker: Dict[str, dict] = self.init_channel_tracker()
@property
@abstractmethod
@@ -76,7 +79,9 @@ def init_channel_tracker(self) -> Dict[str, bool]:
"""
tracker = {}
for channel_name in self.channels:
- tracker[channel_name] = False
+ tracker[channel_name] = {
+ "msg_received": False,
+ }
return tracker
@abstractmethod
@@ -90,6 +95,15 @@ def init(self, **kwargs):
initializing the module
"""
+ def is_msg_received_in_any_channel(self) -> bool:
+ """
+ return True if a msg was received in any channel of the ones
+ this module is subscribed to
+ """
+ return any(
+ info["msg_received"] for info in self.channel_tracker.values()
+ )
+
def should_stop(self) -> bool:
"""
The module should stop on the following 2 conditions
@@ -98,13 +112,12 @@ def should_stop(self) -> bool:
2. the termination event is set by the process_manager.py
"""
if (
- any(self.channel_tracker.values())
+ self.is_msg_received_in_any_channel()
or not self.termination_event.is_set()
):
# this module is still receiving msgs,
# don't stop
return False
-
return True
def print(self, text, verbose=1, debug=0, log_to_logfiles_only=False):
@@ -157,16 +170,21 @@ def pre_main(self):
executed once before the main loop
"""
- def get_msg(self, channel_name):
- message = self.db.get_message(self.channels[channel_name])
- if utils.is_msg_intended_for(message, channel_name):
- self.channel_tracker[channel_name] = True
+ def get_msg(self, channel: str) -> Optional[dict]:
+ message = self.db.get_message(self.channels[channel])
+ if utils.is_msg_intended_for(message, channel):
+ self.channel_tracker[channel]["msg_received"] = True
+ self.db.incr_msgs_received_in_channel(self.name, channel)
return message
- else:
- self.channel_tracker[channel_name] = False
- return False
- def run(self):
+ self.channel_tracker[channel]["msg_received"] = False
+
+ def print_traceback(self):
+ exception_line = sys.exc_info()[2].tb_lineno
+ self.print(f"Problem in pre_main() line {exception_line}", 0, 1)
+ self.print(traceback.format_exc(), 0, 1)
+
+ def run(self) -> bool:
"""
This is the loop function, it runs non-stop as long as
the module is running
@@ -180,24 +198,32 @@ def run(self):
self.shutdown_gracefully()
return True
except Exception:
- exception_line = sys.exc_info()[2].tb_lineno
- self.print(f"Problem in pre_main() line {exception_line}", 0, 1)
- self.print(traceback.format_exc(), 0, 1)
+ self.print_traceback()
return True
- try:
- while not self.should_stop():
- # keep running main() in a loop as long as the module is
- # online
+ keyboard_int_ctr = 0
+ while True:
+ try:
+ if self.should_stop():
+ self.shutdown_gracefully()
+ return True
+
# if a module's main() returns 1, it means there's an
# error and it needs to stop immediately
error: bool = self.main()
if error:
self.shutdown_gracefully()
- except KeyboardInterrupt:
- self.shutdown_gracefully()
- except Exception:
- self.print(f"Problem in {self.name}", 0, 1)
- self.print(traceback.format_exc(), 0, 1)
- return True
+ except KeyboardInterrupt:
+ keyboard_int_ctr += 1
+
+ if keyboard_int_ctr >= 2:
+ # on the second ctrl+c Slips immediately stop
+ return True
+
+ # on the first ctrl + C keep looping until the should_stop
+ # returns true
+ continue
+ except Exception:
+ self.print_traceback()
+ return False
diff --git a/slips_files/common/parsers/config_parser.py b/slips_files/common/parsers/config_parser.py
index ce2de6820..a35c0f5f6 100644
--- a/slips_files/common/parsers/config_parser.py
+++ b/slips_files/common/parsers/config_parser.py
@@ -28,11 +28,8 @@ def read_config_file(self, configfile: str) -> dict:
"""
reads slips configuration file, slips.conf/slips.yaml is the default file
"""
- # try:
with open(configfile) as source:
return yaml.safe_load(source)
- # except (IOError, TypeError, yaml.YAMLError):
- # pass
def get_config_file(self):
"""
diff --git a/slips_files/common/slips_utils.py b/slips_files/common/slips_utils.py
index 5f7d37be3..bb2a00441 100644
--- a/slips_files/common/slips_utils.py
+++ b/slips_files/common/slips_utils.py
@@ -110,6 +110,10 @@ def sanitize(self, input_string):
return sanitized_string
+ def is_valid_domain(self, domain: str) -> bool:
+ extracted = tldextract.extract(domain)
+ return bool(extracted.domain) and bool(extracted.suffix)
+
def detect_data_type(self, data):
"""
Detects the type of incoming data:
@@ -131,23 +135,11 @@ def detect_data_type(self, data):
if validators.md5(data):
return "md5"
- if validators.domain(data):
- return "domain"
-
- # some ti files have / at the end of domains, remove it
- if data.endswith("/"):
- data = data[:-1]
-
- domain = data
- if domain.startswith("http://"):
- data = data[7:]
- elif domain.startswith("https://"):
- data = data[8:]
+ if validators.url(data):
+ return "url"
- if validators.domain(data):
+ if self.is_valid_domain(data):
return "domain"
- elif "/" in data:
- return "url"
if validators.sha256(data):
return "sha256"
diff --git a/slips_files/core/database/database_manager.py b/slips_files/core/database/database_manager.py
index fe7b02d04..047280be5 100644
--- a/slips_files/core/database/database_manager.py
+++ b/slips_files/core/database/database_manager.py
@@ -33,6 +33,7 @@ def __init__(
self.rdb = RedisDB(
self.logger, redis_port, start_redis_server, **kwargs
)
+
# in some rare cases we don't wanna start sqlite,
# like when using -S
# we just want to connect to redis to get the PIDs
@@ -551,6 +552,18 @@ def get_outtuples_from_profile_tw(self, *args, **kwargs):
def get_intuples_from_profile_tw(self, *args, **kwargs):
return self.rdb.get_intuples_from_profile_tw(*args, **kwargs)
+ def incr_msgs_received_in_channel(self, *args, **kwargs):
+ return self.rdb.incr_msgs_received_in_channel(*args, **kwargs)
+
+ def get_enabled_modules(self, *args, **kwargs):
+ return self.rdb.get_enabled_modules(*args, **kwargs)
+
+ def get_msgs_received_at_runtime(self, *args, **kwargs):
+ return self.rdb.get_msgs_received_at_runtime(*args, **kwargs)
+
+ def get_msgs_published_in_channel(self, *args, **kwargs):
+ return self.rdb.get_msgs_published_in_channel(*args, **kwargs)
+
def get_dhcp_flows(self, *args, **kwargs):
return self.rdb.get_dhcp_flows(*args, **kwargs)
diff --git a/slips_files/core/database/redis_db/database.py b/slips_files/core/database/redis_db/database.py
index e65fb2843..57959839c 100644
--- a/slips_files/core/database/redis_db/database.py
+++ b/slips_files/core/database/redis_db/database.py
@@ -15,7 +15,12 @@
import ipaddress
import sys
import validators
-from typing import List, Dict, Optional
+from typing import (
+ List,
+ Dict,
+ Optional,
+ Tuple,
+)
RUNNING_IN_DOCKER = os.environ.get("IS_IN_A_DOCKER_CONTAINER", False)
@@ -114,17 +119,22 @@ def __new__(
if cls.redis_port not in cls._instances:
cls._set_redis_options()
cls._read_configuration()
- if cls.start():
- cls._instances[cls.redis_port] = super().__new__(cls)
- # By default the slips internal time is
- # 0 until we receive something
- cls.set_slips_internal_time(0)
- if not cls.get_slips_start_time():
- cls._set_slips_start_time()
- # useful for debugging using 'CLIENT LIST' redis cmd
- cls.r.client_setname("Slips-DB")
- else:
- return False
+ initialized, err = cls.init_redis_server()
+ if not initialized:
+ raise RuntimeError(
+ f"Failed to connect to the redis server "
+ f"on port {cls.redis_port}: {err}"
+ )
+
+ cls._instances[cls.redis_port] = super().__new__(cls)
+ # By default the slips internal time is
+ # 0 until we receive something
+ cls.set_slips_internal_time(0)
+ if not cls.get_slips_start_time():
+ cls._set_slips_start_time()
+ # useful for debugging using 'CLIENT LIST' redis cmd
+ cls.r.client_setname("Slips-DB")
+
return cls._instances[cls.redis_port]
def __init__(
@@ -200,11 +210,21 @@ def get_slips_start_time(cls):
return start_time
@classmethod
- def start(cls) -> bool:
- """Flushes and Starts the DB and"""
+ def init_redis_server(cls) -> Tuple[bool, str]:
+ """
+ starts the redis server, connects to the it, and andjusts redis
+ options.
+ Returns a tuple of (connection status, error message).
+ """
try:
- if not cls.connect_to_redis_server():
- return False
+ if cls.start_server:
+ # starts the redis server using cli.
+ # we don't need that when using -k
+ cls._start_a_redis_server()
+
+ connected, err = cls.connect_to_redis_server()
+ if not connected:
+ return False, err
if (
cls.deletePrevdb
@@ -230,15 +250,19 @@ def start(cls) -> bool:
# occurs without throwing errors in slips
# Even if the DB is not deleted. We need to delete some temp data
cls.r.delete("zeekfiles")
- return True
+ return True, ""
+ except RuntimeError as err:
+ return False, str(err)
+
except redis.exceptions.ConnectionError as ex:
- print(
- f"[DB] Can't connect to redis on port {cls.redis_port}: {ex}"
+ return False, (
+ f"Redis ConnectionError: "
+ f"Can't connect to redis on port "
+ f"{cls.redis_port}: {ex}"
)
- return False
@staticmethod
- def start_redis_instance(port: int, db: int) -> redis.StrictRedis:
+ def _connect(port: int, db: int) -> redis.StrictRedis:
# set health_check_interval to avoid redis ConnectionReset errors:
# if the connection is idle for more than health_check_interval seconds,
# a round trip PING/PONG will be attempted before next redis cmd.
@@ -259,21 +283,45 @@ def start_redis_instance(port: int, db: int) -> redis.StrictRedis:
)
@classmethod
- def connect_to_redis_server(cls) -> bool:
+ def _start_a_redis_server(cls) -> bool:
+ cmd = (
+ f"redis-server {cls._conf_file} --port {cls.redis_port} "
+ f" --daemonize yes"
+ )
+ process = subprocess.Popen(
+ cmd,
+ cwd=os.getcwd(),
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+ stdout, stderr = process.communicate()
+ stderr = stderr.decode("utf-8")
+ stdout = stdout.decode("utf-8")
+
+ # Check for a specific line indicating a successful start
+ # if the redis server is already in use, the return code will be 0
+ # but we dont care because we checked it in main before starting
+ # the DBManager()
+ if process.returncode != 0:
+ raise RuntimeError(
+ f"database._start_a_redis_server: "
+ f"Redis did not start properly.\n{stderr}\n{stdout}"
+ )
+
+ return True
+
+ @classmethod
+ def connect_to_redis_server(cls) -> Tuple[bool, str]:
"""
Connects to the given port and Sets r and rcache
+ Returns a tuple of (bool, error message).
"""
- if cls.start_server:
- # starts the redis server using cli. we don't need that when using -k
- os.system(
- f"redis-server {cls._conf_file} --port {cls.redis_port} > /dev/null 2>&1"
- )
try:
# db 0 changes everytime we run slips
- cls.r = cls.start_redis_instance(cls.redis_port, 0)
-
+ cls.r = cls._connect(cls.redis_port, 0)
# port 6379 db 0 is cache, delete it using -cc flag
- cls.rcache = cls.start_redis_instance(6379, 1)
+ cls.rcache = cls._connect(6379, 1)
# fix ConnectionRefused error by giving redis time to open
time.sleep(1)
@@ -282,9 +330,9 @@ def connect_to_redis_server(cls) -> bool:
# when you try to execute a command on the server.
# so make sure it's established first
cls.r.client_list()
- return True
- except redis.exceptions.ConnectionError:
- return False
+ return True, ""
+ except Exception as e:
+ return False, f"database.connect_to_redis_server: {e}"
@classmethod
def close_redis_server(cls, redis_port):
@@ -318,9 +366,15 @@ def _set_slips_start_time(cls):
now = utils.convert_format(datetime.now(), utils.alerts_format)
cls.r.set("slips_start_time", now)
- def publish(self, channel, data):
- """Publish something"""
- self.r.publish(channel, data)
+ def publish(self, channel, msg):
+ """Publish a msg in the given channel"""
+ # keeps track of how many msgs were published in the given channel
+ self.r.hincrby("msgs_published_at_runtime", channel, 1)
+ self.r.publish(channel, msg)
+
+ def get_msgs_published_in_channel(self, channel: str) -> int:
+ """returns the number of msgs published in a channel"""
+ return self.r.hget("msgs_published_at_runtime", channel)
def subscribe(self, channel: str, ignore_subscribe_messages=True):
"""Subscribe to channel"""
@@ -499,7 +553,13 @@ def get_label_count(self, label):
"""
return self.r.zscore("labels", label)
- def get_disabled_modules(self) -> dict:
+ def get_enabled_modules(self) -> List[str]:
+ """
+ Returns a list of the loaded/enabled modules
+ """
+ return self.r.hkeys("PIDs")
+
+ def get_disabled_modules(self) -> List[str]:
if disabled_modules := self.r.hget("analysis", "disabled_modules"):
return json.loads(disabled_modules)
else:
@@ -1163,11 +1223,11 @@ def get_asn_cache(self, first_octet=False):
else:
return self.rcache.hgetall("cached_asn")
- def store_pid(self, process, pid):
+ def store_pid(self, process: str, pid: int):
"""
Stores each started process or module with it's PID
:param pid: int
- :param process: str
+ :param process: module name, str
"""
self.r.hset("PIDs", process, pid)
@@ -1415,3 +1475,16 @@ def store_std_file(self, **kwargs):
def get_stdfile(self, file_type):
return self.r.get(file_type)
+
+ def incr_msgs_received_in_channel(self, module: str, channel: str):
+ """increments the number of msgs received by a module in the given
+ channel by 1"""
+ self.r.hincrby(f"{module}_msgs_received_at_runtime", channel, 1)
+
+ def get_msgs_received_at_runtime(self, module: str) -> Dict[str, int]:
+ """
+ returns a list of channels this module is subscribed to, and how
+ many msgs were received on each one
+ :returns: {channel_name: number_of_msgs, ...}
+ """
+ return self.r.hgetall(f"{module}_msgs_received_at_runtime")
diff --git a/slips_files/core/helpers/checker.py b/slips_files/core/helpers/checker.py
index a72d82149..1f662d7a1 100644
--- a/slips_files/core/helpers/checker.py
+++ b/slips_files/core/helpers/checker.py
@@ -1,7 +1,5 @@
import os
-import subprocess
import sys
-from typing import Tuple
import psutil
@@ -34,7 +32,8 @@ def check_input_type(self) -> tuple:
if self.main.args.input_module:
input_information = "input_module"
input_type = self.main.args.input_module
- # this is the default value of the type of flows slips reads from a module
+ # this is the default value of the type of flows slips reads from
+ # a module
line_type = "zeek"
return input_type, input_information, line_type
@@ -170,12 +169,16 @@ def delete_blocking_chain(self):
child.kill()
def clear_redis_cache(self):
+ redis_cache_default_server_port = 6379
+ redis_cache_server_pid = self.main.redis_man.get_pid_of_redis_server(
+ redis_cache_default_server_port
+ )
print("Deleting Cache DB in Redis.")
self.main.redis_man.clear_redis_cache_database()
self.main.input_information = ""
self.main.zeek_dir = ""
self.main.redis_man.log_redis_server_pid(
- 6379, self.main.redis_man.get_pid_of_redis_server(6379)
+ redis_cache_default_server_port, redis_cache_server_pid
)
self.main.terminate_slips()
@@ -193,38 +196,8 @@ def input_module_exists(self, module):
# this function assumes that the module is created in module/name/name.py
if f"{module}.py" not in os.listdir(f"modules/{module}/"):
print(
- f"{module} is not available in modules/{module}/{module}.py. Stopping slips"
+ f"{module} is not available in modules/{module}/{module}.py. "
+ f"Stopping Slips."
)
return False
-
return True
-
- def check_output_redirection(self) -> Tuple[str, str, str]:
- """
- Determine where slips will place stdout,
- stderr and logfile based on slips mode
- @return (current_stdout, stderr, slips_logfile)
- current_stdout will be '' if it's not redirected to a file
- """
- # lsof will provide a list of all open fds belonging to slips
- command = f"lsof -p {self.main.pid}"
- result = subprocess.run(command.split(), capture_output=True)
- # Get command output
- output = result.stdout.decode("utf-8")
- # if stdout is being redirected we'll find '1w' in one of the lines
- # 1 means stdout, w means write mode
- # by default, stdout is not redirected
- current_stdout = ""
- for line in output.splitlines():
- if "1w" in line:
- # stdout is redirected, get the file
- current_stdout = line.split(" ")[-1]
- break
-
- if self.main.mode == "daemonized":
- stderr = self.main.daemon.stderr
- slips_logfile = self.main.daemon.stdout
- else:
- stderr = os.path.join(self.main.args.output, "errors.log")
- slips_logfile = os.path.join(self.main.args.output, "slips.log")
- return current_stdout, stderr, slips_logfile
diff --git a/slips_files/core/helpers/filemonitor.py b/slips_files/core/helpers/filemonitor.py
index a00629fd5..61c19f34f 100644
--- a/slips_files/core/helpers/filemonitor.py
+++ b/slips_files/core/helpers/filemonitor.py
@@ -20,14 +20,14 @@
import json
import time
from watchdog.events import RegexMatchingEventHandler
-from slips_files.common.imports import *
+from slips_files.common.slips_utils import utils
class FileEventHandler(RegexMatchingEventHandler):
REGEX = [r".*\.log$", r".*\.conf$"]
def __init__(self, dir_to_monitor, input_type, db):
- super().__init__(self.REGEX)
+ super().__init__(regexes=self.REGEX)
self.dir_to_monitor = dir_to_monitor
utils.drop_root_privs()
self.db = db
diff --git a/slips_files/core/helpers/whitelist/whitelist_parser.py b/slips_files/core/helpers/whitelist/whitelist_parser.py
index 17f8666ac..7a2e65cfb 100644
--- a/slips_files/core/helpers/whitelist/whitelist_parser.py
+++ b/slips_files/core/helpers/whitelist/whitelist_parser.py
@@ -94,7 +94,7 @@ def set_number_of_columns(self, line: str) -> None:
self.NUMBER_OF_WHITELIST_COLUMNS: int = len(line.split(","))
def update_whitelisted_domains(self, domain: str, info: Dict[str, str]):
- if not validators.domain(domain):
+ if not utils.is_valid_domain(domain):
return
self.whitelisted_domains[domain] = info
diff --git a/slips_files/core/input.py b/slips_files/core/input.py
index 188536358..3815051e4 100644
--- a/slips_files/core/input.py
+++ b/slips_files/core/input.py
@@ -201,8 +201,7 @@ def is_ignored_file(self, filepath: str) -> bool:
:param filepath: full path to a zeek log file
"""
filename_without_ext = Path(filepath).stem
- if filename_without_ext not in SUPPORTED_LOGFILES:
- return True
+ return filename_without_ext not in SUPPORTED_LOGFILES
def get_file_handle(self, filename):
# Update which files we know about
@@ -221,11 +220,14 @@ def get_file_handle(self, filename):
# delete the old .log file, that has a timestamp in its name.
except FileNotFoundError:
# for example dns.log
- # zeek changes the dns.log file name every 1d, it adds a timestamp to it
- # it doesn't create the new dns.log until a new dns request occurs
- # if slips tries to read from the old dns.log now it won't find it
- # because it's been renamed and the new one isn't created yet
- # simply continue until the new log file is created and added to the zeek_files list
+ # zeek changes the dns.log file name every 1d, it adds a
+ # timestamp to it it doesn't create the new dns.log until a
+ # new dns request
+ # occurs
+ # if slips tries to read from the old dns.log now it won't
+ # find it because it's been renamed and the new one isn't
+ # created yet simply continue until the new log file is
+ # created and added to the zeek_files list
return False
return file_handler
@@ -337,7 +339,6 @@ def get_earliest_line(self):
# and there is still no files for us.
# To cover this case, just refresh the list of files
self.zeek_files = self.db.get_all_zeek_files()
- # time.sleep(1)
return False, False
# to fix the problem of evidence being generated BEFORE their corresponding flows are added to our db
@@ -484,8 +485,8 @@ def read_zeek_folder(self):
def print_lines_read(self):
self.print(
- f"We read everything. No more input. "
- f"Stopping input process. Sent {self.lines} lines"
+ f"Done reading all flows. Stopping the input process. "
+ f"Sent {self.lines} lines for the profiler process."
)
def stdin(self):
@@ -522,7 +523,8 @@ def read_from_stdin(self) -> bool:
return True
def handle_binetflow(self):
- # the number of flows returned by get_flows_number contains the header, so subtract that
+ # the number of flows returned by get_flows_number contains the header
+ # , so subtract that
self.total_flows = self.get_flows_number(self.given_path) - 1
self.db.set_input_metadata({"total_flows": self.total_flows})
@@ -686,7 +688,8 @@ def handle_pcap_and_interface(self) -> int:
connlog_path = os.path.join(self.zeek_dir, "conn.log")
self.print(
- f"Number of zeek generated flows in conn.log: {self.get_flows_number(connlog_path)}",
+ f"Number of zeek generated flows in conn.log: "
+ f"{self.get_flows_number(connlog_path)}",
2,
0,
)
@@ -899,7 +902,6 @@ def handle_cyst(self):
self.give_profiler(line_info)
self.lines += 1
self.print("Done reading 1 CYST flow.\n ", 0, 3)
- time.sleep(2)
self.is_done_processing()
diff --git a/slips_files/core/output.py b/slips_files/core/output.py
index 9f55ffedd..9926946ae 100644
--- a/slips_files/core/output.py
+++ b/slips_files/core/output.py
@@ -17,7 +17,6 @@
from multiprocessing.connection import Connection
from multiprocessing import Event
import sys
-import io
from pathlib import Path
from datetime import datetime
import os
@@ -46,7 +45,6 @@ def __init__(
self,
verbose=1,
debug=0,
- stdout="",
stderr="output/errors.log",
slips_logfile="output/slips.log",
input_type=False,
@@ -54,12 +52,14 @@ def __init__(
has_pbar: bool = False,
pbar_finished: Event = None,
stop_daemon: bool = None,
+ stdout="",
):
super().__init__()
# when running slips using -e , this var is set and we only
# print all msgs with debug lvl less than it
self.verbose = verbose
self.debug = debug
+ self.stdout = stdout
self.input_type = input_type
self.has_pbar = has_pbar
self.pbar_finished: Event = pbar_finished
@@ -85,10 +85,6 @@ def __init__(
utils.change_logfiles_ownership(
self.slips_logfile, self.UID, self.GID
)
- self.stdout = stdout
- if stdout != "":
- self.change_stdout()
-
if self.verbose > 2:
print(f"Verbosity: {self.verbose}. Debugging: {self.debug}")
@@ -149,23 +145,6 @@ def log_line(self, msg: dict):
slips_logfile.write(f"{date_time} [{sender}] {msg}\n")
self.slips_logfile_lock.release()
- def change_stdout(self):
- """
- to be able to print the stats to the output file
- """
- # io.TextIOWrapper creates a file object of this file
- # Pass 0 to open() to switch output buffering off
- # (only allowed in binary mode)
- # write_through= True, to flush the buffer to disk, from there the
- # file can read it.
- # without it, the file writer keeps the information in a local buffer
- # that's not accessible to the file.
- stdout = io.TextIOWrapper(
- open(self.stdout, "wb", 0), write_through=True
- )
- sys.stdout = stdout
- return stdout
-
def print(self, sender: str, txt: str, end="\n"):
"""
prints the given txt whether using tqdm or using print()
diff --git a/slips_files/core/profiler.py b/slips_files/core/profiler.py
index 0bf49f15a..f9077425e 100644
--- a/slips_files/core/profiler.py
+++ b/slips_files/core/profiler.py
@@ -465,6 +465,7 @@ def main(self):
# stop and no new fows are coming
if self.check_for_stop_msg(msg):
return 1
+
line: dict = msg["line"]
input_type: str = msg["input_type"]
total_flows: int = msg.get("total_flows", 0)
diff --git a/tests/common_test_utils.py b/tests/common_test_utils.py
index 01547926b..84ae7c182 100644
--- a/tests/common_test_utils.py
+++ b/tests/common_test_utils.py
@@ -4,6 +4,7 @@
import binascii
import subprocess
import base64
+from typing import Dict
IS_IN_A_DOCKER_CONTAINER = os.environ.get("IS_IN_A_DOCKER_CONTAINER", False)
@@ -62,6 +63,23 @@ def create_output_dir(dirname):
return path
+def msgs_published_are_eq_msgs_received_by_each_module(db) -> bool:
+ """
+ This functions checks that all modules received all msgs that were
+ published for the channels they subscribed to
+ """
+ for module in db.get_enabled_modules():
+ # get channels subscribed to by this module
+ msg_tracker: Dict[str, int] = db.get_msgs_received_at_runtime(module)
+
+ for channel, msgs_received in msg_tracker.items():
+ msgs_received: int
+ channel: str
+ assert db.get_msgs_published_in_channel(channel) == msgs_received
+
+ return True
+
+
def check_for_text(txt, output_dir):
"""function to parse slips_output file and check for a given string"""
slips_output = os.path.join(output_dir, "slips_output.txt")
@@ -101,29 +119,16 @@ def has_ignored_errors(line):
return True
-def has_errors(output_dir):
+def assert_no_errors(output_dir):
"""function to parse slips_output file and check for errors"""
error_files = ("slips_output.txt", "errors.log")
error_files = [os.path.join(output_dir, file) for file in error_files]
- # we can't redirect stderr to a file and check it because we catch all exceptions in slips
+ # we can't redirect stderr to a file and check it because we catch all
+ # exceptions in slips
for file in error_files:
with open(file, "r") as f:
for line in f:
if has_ignored_errors(line):
continue
-
- if has_error_keywords(line):
- return True
-
- return False
-
-
-alerts_file = "alerts.log"
-
-
-def run_slips(cmd):
- """runs slips and waits for it to end"""
- slips = subprocess.Popen(cmd, stdin=subprocess.PIPE, shell=True)
- return_code = slips.wait()
- return return_code
+ assert not has_error_keywords(line), line
diff --git a/tests/integration_tests/test_config_files.py b/tests/integration_tests/test_config_files.py
index 02d6fed17..c410ec440 100644
--- a/tests/integration_tests/test_config_files.py
+++ b/tests/integration_tests/test_config_files.py
@@ -7,7 +7,7 @@
from tests.common_test_utils import (
is_evidence_present,
create_output_dir,
- has_errors,
+ assert_no_errors,
check_for_text,
)
from tests.module_factory import ModuleFactory
@@ -54,47 +54,51 @@ def test_conf_file(pcap_path, expected_profiles, output_dir, redis_port):
f"-P {redis_port} "
f"> {output_file} 2>&1"
)
+ print("running slips ...")
# this function returns when slips is done
os.system(command)
-
- assert has_errors(output_dir) is False
-
+ print("Slip is done, checking for errors in the output dir.")
+ assert_no_errors(output_dir)
+ print("Comparing profiles with expected profiles")
database = ModuleFactory().create_db_manager_obj(
- redis_port, output_dir=output_dir
+ redis_port, output_dir=output_dir, start_redis_server=False
)
profiles = database.get_profiles_len()
# expected_profiles is more than 50 because we're using direction = all
assert profiles > expected_profiles
-
+ print("Checking for a random evidence")
log_file = os.path.join(output_dir, alerts_file)
# testing disabled_detections param in the configuration file
disabled_evidence = "a connection without DNS resolution"
assert is_evidence_present(log_file, disabled_evidence) is False
-
+ print("Testing time_window_width param.")
# testing time_window_width param in the configuration file
assert check_for_text("in the last 115740 days", output_dir) is True
+ print("Make sure slips didn't delete zeek files.")
# test delete_zeek_files param
zeek_output_dir = database.get_zeek_output_dir()[2:]
assert zeek_output_dir not in os.listdir()
-
+ print("Test storing a copy of zeek files.")
# test store_a_copy_of_zeek_files
assert "zeek_files" in os.listdir(output_dir)
-
+ print("Checking metadata directory")
# test metadata_dir
assert "metadata" in os.listdir(output_dir)
metadata_path = os.path.join(output_dir, "metadata")
for file in ("test.yaml", "whitelist.conf", "info.txt"):
+ print(f"checking if {file} in the metadata path {metadata_path}")
assert file in os.listdir(metadata_path)
+ print("Checking malicious label count")
# test label=malicious
assert int(database.get_label_count("malicious")) > 370
-
# test disable
for module in ["template", "ensembling", "Flow ML Detection"]:
+ print(f"Checking if {module} is disabled")
assert module in database.get_disabled_modules()
-
+ print("Deleting the output directory")
shutil.rmtree(output_dir)
@@ -125,9 +129,9 @@ def test_conf_file2(pcap_path, expected_profiles, output_dir, redis_port):
f"-P {redis_port} "
f"> {output_file} 2>&1"
)
- # this function returns when slips is done
+ print("running slips ...")
os.system(command)
-
- assert has_errors(output_dir) is False
-
+ print("Slip is done, checking for errors in the output dir.")
+ assert_no_errors(output_dir)
+ print("Deleting the output directory")
shutil.rmtree(output_dir)
diff --git a/tests/integration_tests/test_dataset.py b/tests/integration_tests/test_dataset.py
index ab91f4f35..4098ee234 100644
--- a/tests/integration_tests/test_dataset.py
+++ b/tests/integration_tests/test_dataset.py
@@ -7,7 +7,8 @@
run_slips,
is_evidence_present,
create_output_dir,
- has_errors,
+ assert_no_errors,
+ msgs_published_are_eq_msgs_received_by_each_module,
)
from tests.module_factory import ModuleFactory
import pytest
@@ -67,21 +68,27 @@ def test_binetflow(
output_dir = create_output_dir(output_dir)
output_file = os.path.join(output_dir, "slips_output.txt")
- command = f"./slips.py -e 1 -t -o {output_dir} -P {redis_port} -f {binetflow_path} > {output_file} 2>&1"
+ command = (
+ f"./slips.py -e 1 -t "
+ f"-o {output_dir} "
+ f"-P {redis_port} "
+ f"-f {binetflow_path} "
+ f"> {output_file} 2>&1"
+ )
# this function returns when slips is done
run_slips(command)
- assert has_errors(output_dir) is False
+ assert_no_errors(output_dir)
- database = ModuleFactory().create_db_manager_obj(
- redis_port, output_dir=output_dir
+ db = ModuleFactory().create_db_manager_obj(
+ redis_port, output_dir=output_dir, start_redis_server=False
)
- profiles = database.get_profiles_len()
+ profiles = db.get_profiles_len()
assert profiles > expected_profiles
+ assert msgs_published_are_eq_msgs_received_by_each_module(db)
log_file = os.path.join(output_dir, alerts_file)
assert is_evidence_present(log_file, expected_evidence) is True
-
shutil.rmtree(output_dir)
@@ -102,26 +109,27 @@ def test_binetflow(
)
def test_suricata(suricata_path, output_dir, redis_port, expected_evidence):
output_dir = create_output_dir(output_dir)
- # we have an established flow in suricata file to this port 8760/udp
- # {"timestamp":"2021-06-06T15:57:37.272281+0200","flow_id":1630350322382106,"event_type":"flow",
- # "src_ip":"192.168.1.129","src_port":36101,"dest_ip":"122.248.252.67","dest_port":8760,"proto":
- # "UDP","app_proto":"failed","flow":{"pkts_toserver":2,"pkts_toclient":2,"bytes_toserver":256,
- # "bytes_toclient":468,"start":"2021-06-07T04:26:27.668954+0200","end":"2021-06-07T04:26:27.838624+0200"
- # ,"age":0,"state":"established","reason":"shutdown","alerted":false},"host":"stratosphere.org"}
-
output_file = os.path.join(output_dir, "slips_output.txt")
- command = f"./slips.py -e 1 -t -f {suricata_path} -o {output_dir} -P {redis_port} > {output_file} 2>&1"
+ command = (
+ f"./slips.py -e 1 -t "
+ f"-f {suricata_path} "
+ f"-o {output_dir} "
+ f"-P {redis_port} "
+ f"> {output_file} 2>&1"
+ )
# this function returns when slips is done
run_slips(command)
- assert has_errors(output_dir) is False
+ assert_no_errors(output_dir)
- database = ModuleFactory().create_db_manager_obj(
- redis_port, output_dir=output_dir
+ db = ModuleFactory().create_db_manager_obj(
+ redis_port, output_dir=output_dir, start_redis_server=False
)
- profiles = database.get_profiles_len()
- # todo the profiles should be way more than 10, maybe 76, but it varies each run, we need to sy why
+ profiles = db.get_profiles_len()
+ # todo the profiles should be way more than 10, maybe 76, but it varies
+ # each run, we need to sy why
assert profiles > 10
+ assert msgs_published_are_eq_msgs_received_by_each_module(db)
log_file = os.path.join(output_dir, alerts_file)
assert any(is_evidence_present(log_file, ev) for ev in expected_evidence)
@@ -145,19 +153,26 @@ def test_nfdump(nfdump_path, output_dir, redis_port):
# expected_evidence = 'Connection to unknown destination port 902/TCP'
output_file = os.path.join(output_dir, "slips_output.txt")
- command = f"./slips.py -e 1 -t -f {nfdump_path} -o {output_dir} -P {redis_port} > {output_file} 2>&1"
+ command = (
+ f"./slips.py -e 1 -t "
+ f"-f {nfdump_path} "
+ f"-o {output_dir} "
+ f"-P {redis_port} "
+ f"> {output_file} 2>&1"
+ )
# this function returns when slips is done
run_slips(command)
- database = ModuleFactory().create_db_manager_obj(
- redis_port, output_dir=output_dir
+ db = ModuleFactory().create_db_manager_obj(
+ redis_port, output_dir=output_dir, start_redis_server=False
)
- profiles = database.get_profiles_len()
- assert has_errors(output_dir) is False
+ profiles = db.get_profiles_len()
+ assert_no_errors(output_dir)
# make sure slips generated profiles for this file (can't
# put the number of profiles exactly because slips
# doesn't generate a const number of profiles per file)
assert profiles > 0
+ assert msgs_published_are_eq_msgs_received_by_each_module(db)
# log_file = os.path.join(output_dir, alerts_file)
# assert is_evidence_present(log_file, expected_evidence) == True
diff --git a/tests/integration_tests/test_pcap_dataset.py b/tests/integration_tests/test_pcap_dataset.py
index 642a7d253..b2c137efc 100644
--- a/tests/integration_tests/test_pcap_dataset.py
+++ b/tests/integration_tests/test_pcap_dataset.py
@@ -2,7 +2,7 @@
run_slips,
is_evidence_present,
create_output_dir,
- has_errors,
+ assert_no_errors,
)
from tests.module_factory import ModuleFactory
import pytest
@@ -41,7 +41,7 @@ def test_pcap(
command = f"./slips.py -e 1 -t -f {pcap_path} -o {output_dir} -P {redis_port} > {output_file} 2>&1"
# this function returns when slips is done
run_slips(command)
- assert has_errors(output_dir) is False
+ assert_no_errors(output_dir)
db = ModuleFactory().create_db_manager_obj(
redis_port, output_dir=output_dir
diff --git a/tests/integration_tests/test_portscans.py b/tests/integration_tests/test_portscans.py
index af5ba347a..e528ef4ef 100644
--- a/tests/integration_tests/test_portscans.py
+++ b/tests/integration_tests/test_portscans.py
@@ -1,18 +1,15 @@
import pytest
-from ...slips import *
import shutil
import os
-
from tests.common_test_utils import (
run_slips,
is_evidence_present,
create_output_dir,
- has_errors,
+ assert_no_errors,
)
from tests.module_factory import ModuleFactory
-
alerts_file = "alerts.log"
@@ -32,19 +29,24 @@ def test_horizontal(path, output_dir, redis_port):
"""
output_dir = create_output_dir(output_dir)
- expected_evidence = "Horizontal port scan to port 80/TCP. From 10.0.2.112"
+ expected_evidence = (
+ "Horizontal port scan to port 80/TCP. " "From 10.0.2.112"
+ )
output_file = os.path.join(output_dir, "slips_output.txt")
- command = f"./slips.py -e 1 -t -f {path} -o {output_dir} -P {redis_port} > {output_file} 2>&1"
+ command = (
+ f"./slips.py -e 1 -t -f {path} "
+ f" -o {output_dir} "
+ f"-P {redis_port} > {output_file} 2>&1"
+ )
# this function returns when slips is done
run_slips(command)
database = ModuleFactory().create_db_manager_obj(
- redis_port, output_dir=output_dir
+ redis_port, output_dir=output_dir, start_redis_server=False
)
- assert has_errors(output_dir) is False
-
+ assert_no_errors(output_dir)
# make sure slips generated profiles for this file (can't
# put the number of profiles exactly because slips
# doesn't generate a const number of profiles per file)
@@ -52,7 +54,7 @@ def test_horizontal(path, output_dir, redis_port):
assert profiles > 0
log_file = os.path.join(output_dir, alerts_file)
- assert is_evidence_present(log_file, expected_evidence) == True
+ assert is_evidence_present(log_file, expected_evidence)
shutil.rmtree(output_dir)
@@ -72,15 +74,18 @@ def test_vertical(path, output_dir, redis_port):
)
output_file = os.path.join(output_dir, "slips_output.txt")
- command = f"./slips.py -e 1 -t -f {path} -o {output_dir} -P {redis_port} > {output_file} 2>&1"
+ command = (
+ f"./slips.py -e 1 -t -f {path} "
+ f" -o {output_dir}"
+ f" -P {redis_port} > {output_file} 2>&1"
+ )
# this function returns when slips is done
run_slips(command)
database = ModuleFactory().create_db_manager_obj(
- redis_port, output_dir=output_dir
+ redis_port, output_dir=output_dir, start_redis_server=False
)
-
- assert has_errors(output_dir) is False
+ assert_no_errors(output_dir)
# make sure slips generated profiles for this file (can't
# put the number of profiles exactly because slips
@@ -89,6 +94,6 @@ def test_vertical(path, output_dir, redis_port):
assert profiles > 0
log_file = os.path.join(output_dir, alerts_file)
- assert is_evidence_present(log_file, expected_evidence) == True
+ assert is_evidence_present(log_file, expected_evidence)
shutil.rmtree(output_dir)
diff --git a/tests/integration_tests/test_zeek_dataset.py b/tests/integration_tests/test_zeek_dataset.py
index be159fe5a..6b96c5d88 100644
--- a/tests/integration_tests/test_zeek_dataset.py
+++ b/tests/integration_tests/test_zeek_dataset.py
@@ -2,7 +2,7 @@
run_slips,
is_evidence_present,
create_output_dir,
- has_errors,
+ assert_no_errors,
)
from tests.module_factory import ModuleFactory
import pytest
@@ -83,7 +83,7 @@ def test_zeek_dir(
command = f"./slips.py -e 1 -t -f {zeek_dir_path} -o {output_dir} -P {redis_port} > {output_file} 2>&1"
# this function returns when slips is done
run_slips(command)
- assert has_errors(output_dir) is False
+ assert_no_errors(output_dir)
database = ModuleFactory().create_db_manager_obj(
redis_port, output_dir=output_dir
@@ -107,7 +107,8 @@ def test_zeek_dir(
(
"dataset/test9-mixed-zeek-dir/conn.log",
4,
- "non-HTTP established connection",
+ "non-HTTP established connection", # the flows with uid
+ # CAwUdr34dVnyOwbUuj should trigger this
"test9-conn_log_only/",
6659,
),
@@ -133,7 +134,7 @@ def test_zeek_conn_log(
command = f"./slips.py -e 1 -t -f {conn_log_path} -o {output_dir} -P {redis_port} > {output_file} 2>&1"
# this function returns when slips is done
run_slips(command)
- assert has_errors(output_dir) is False
+ assert_no_errors(output_dir)
database = ModuleFactory().create_db_manager_obj(
redis_port, output_dir=output_dir
diff --git a/tests/module_factory.py b/tests/module_factory.py
index a10acb6da..f5c3321ec 100644
--- a/tests/module_factory.py
+++ b/tests/module_factory.py
@@ -1,20 +1,31 @@
import shutil
-from unittest.mock import patch, Mock
+from unittest.mock import (
+ patch,
+ Mock,
+ MagicMock,
+ mock_open,
+)
import os
from modules.flowalerts.conn import Conn
+from slips_files.core.database.database_manager import DBManager
+
+from slips_files.core.helpers.notify import Notify
from modules.flowalerts.dns import DNS
+from multiprocessing.connection import Connection
from modules.flowalerts.downloaded_file import DownloadedFile
+from modules.progress_bar.progress_bar import PBar
from modules.flowalerts.notice import Notice
from modules.flowalerts.smtp import SMTP
from modules.flowalerts.software import Software
from modules.flowalerts.ssh import SSH
from modules.flowalerts.ssl import SSL
from modules.flowalerts.tunnel import Tunnel
+from modules.p2ptrust.trust.trustdb import TrustDB
+from modules.p2ptrust.utils.go_director import GoDirector
from slips.main import Main
from modules.update_manager.update_manager import UpdateManager
from modules.leak_detector.leak_detector import LeakDetector
-from slips_files.core.database.database_manager import DBManager
from slips_files.core.profiler import Profiler
from slips_files.core.output import Output
from modules.threat_intelligence.threat_intelligence import ThreatIntel
@@ -32,27 +43,27 @@
from managers.process_manager import ProcessManager
from managers.redis_manager import RedisManager
from modules.ip_info.asn_info import ASN
-from multiprocessing import Queue, Event, Semaphore
+from multiprocessing import Queue, Event
from slips_files.core.helpers.flow_handler import FlowHandler
from slips_files.core.helpers.symbols_handler import SymbolHandler
from modules.network_discovery.horizontal_portscan import HorizontalPortscan
from modules.network_discovery.network_discovery import NetworkDiscovery
from modules.network_discovery.vertical_portscan import VerticalPortscan
+from modules.p2ptrust.trust.base_model import BaseModel
from modules.arp.arp import ARP
+from slips.daemon import Daemon
+from slips_files.core.helpers.checker import Checker
+from modules.cesnet.cesnet import CESNET
+from slips_files.common.markov_chains import Matrix
from slips_files.core.evidence_structure.evidence import (
Attacker,
Direction,
Evidence,
- EvidenceType,
- IDEACategory,
IoCType,
ProfileID,
Proto,
- Tag,
- ThreatLevel,
TimeWindow,
Victim,
-
)
@@ -71,232 +82,251 @@ def check_zeek_or_bro():
return False
+MODULE_DB_MANAGER = "slips_files.common.abstracts.module.DBManager"
+CORE_DB_MANAGER = "slips_files.common.abstracts.core.DBManager"
+DB_MANAGER = "slips_files.core.database.database_manager.DBManager"
+
+
class ModuleFactory:
def __init__(self):
- # same db as in conftest
self.profiler_queue = Queue()
self.input_queue = Queue()
- self.dummy_termination_event = Event()
- self.logger = Mock() # Output()
+ self.logger = Mock()
def get_default_db(self):
"""default is o port 6379, this is the one we're using in conftest"""
return self.create_db_manager_obj(6379)
def create_db_manager_obj(
- self, port, output_dir="output/", flush_db=False
+ self,
+ port,
+ output_dir="output/",
+ flush_db=False,
+ start_redis_server=True,
):
+ """
+ flush_db is False by default because we use this funtion to check
+ the db after integration tests to make sure everything's going fine
+ """
# to prevent config/redis.conf from being overwritten
with patch(
"slips_files.core.database.redis_db.database.RedisDB._set_redis_options",
return_value=Mock(),
):
- db = DBManager(self.logger, output_dir, port, flush_db=flush_db)
+ db = DBManager(
+ self.logger,
+ output_dir,
+ port,
+ flush_db=flush_db,
+ start_redis_server=start_redis_server,
+ )
db.r = db.rdb.r
- db.print = do_nothing
+ db.print = Mock()
assert db.get_used_redis_port() == port
return db
- def create_main_obj(self, input_information):
+ def create_main_obj(self):
"""returns an instance of Main() class in slips.py"""
main = Main(testing=True)
- main.input_information = input_information
+ main.input_information = ""
main.input_type = "pcap"
main.line_type = False
return main
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_http_analyzer_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- http_analyzer = HTTPAnalyzer(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
- http_analyzer.db.rdb = mock_db
+ http_analyzer = HTTPAnalyzer(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
# override the self.print function to avoid broken pipes
- http_analyzer.print = do_nothing
+ http_analyzer.print = Mock()
return http_analyzer
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_virustotal_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- virustotal = VT(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
- virustotal.db.rdb = mock_db
-
- # override the self.print function to avoid broken pipes
- virustotal.print = do_nothing
- virustotal.__read_configuration = read_configuration
- virustotal.key_file = (
- "/media/alya/W/SLIPPS/modules/virustotal/api_key_secret"
+ virustotal = VT(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
)
+ virustotal.print = Mock()
+ virustotal.__read_configuration = Mock()
return virustotal
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_arp_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
+ with patch(
+ "modules.arp.arp.ARP.wait_for_arp_scans", return_value=Mock()
+ ):
arp = ARP(
self.logger,
"dummy_output_dir",
6379,
- self.dummy_termination_event,
+ Mock(),
)
- arp.db.rdb = mock_db
- # override the self.print function to avoid broken pipes
- arp.print = do_nothing
+ arp.print = Mock()
return arp
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_blocking_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- blocking = Blocking(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
- blocking.db.rdb = mock_db
-
+ blocking = Blocking(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
# override the print function to avoid broken pipes
- blocking.print = do_nothing
+ blocking.print = Mock()
return blocking
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_flowalerts_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- flowalerts = FlowAlerts(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
- flowalerts.db.rdb = mock_db
+ flowalerts = FlowAlerts(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
# override the self.print function to avoid broken pipes
- flowalerts.print = do_nothing
+ flowalerts.print = Mock()
return flowalerts
+ @patch(DB_MANAGER, name="mock_db")
def create_dns_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return DNS(flowalerts.db, flowalerts=flowalerts)
+ @patch(DB_MANAGER, name="mock_db")
def create_notice_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return Notice(flowalerts.db, flowalerts=flowalerts)
+ @patch(DB_MANAGER, name="mock_db")
def create_smtp_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return SMTP(flowalerts.db, flowalerts=flowalerts)
+ @patch(DB_MANAGER, name="mock_db")
def create_ssl_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
- return SSL(flowalerts.db, flowalerts=flowalerts)
+ flowalerts = self.create_flowalerts_obj()
+ with patch(
+ "modules.flowalerts.ssl.SSL"
+ ".wait_for_ssl_flows_to_appear_in_connlog",
+ return_value=Mock(),
+ ):
+ ssl = SSL(flowalerts.db, flowalerts=flowalerts)
+ return ssl
+ @patch(DB_MANAGER, name="mock_db")
def create_ssh_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return SSH(flowalerts.db, flowalerts=flowalerts)
+ @patch(DB_MANAGER, name="mock_db")
def create_downloaded_file_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return DownloadedFile(flowalerts.db, flowalerts=flowalerts)
+ @patch(DB_MANAGER, name="mock_db")
def create_tunnel_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return Tunnel(flowalerts.db, flowalerts=flowalerts)
+ @patch(DB_MANAGER, name="mock_db")
def create_conn_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return Conn(flowalerts.db, flowalerts=flowalerts)
+ @patch(DB_MANAGER, name="mock_db")
def create_software_analyzer_obj(self, mock_db):
- flowalerts = self.create_flowalerts_obj(mock_db)
+ flowalerts = self.create_flowalerts_obj()
return Software(flowalerts.db, flowalerts=flowalerts)
+ @patch(CORE_DB_MANAGER, name="mock_db")
def create_input_obj(
self, input_information, input_type, mock_db, line_type=False
):
zeek_tmp_dir = os.path.join(os.getcwd(), "zeek_dir_for_testing")
- dummy_semaphore = Semaphore(0)
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- input = Input(
- Output(),
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- is_input_done=dummy_semaphore,
- profiler_queue=self.profiler_queue,
- input_type=input_type,
- input_information=input_information,
- cli_packet_filter=None,
- zeek_or_bro=check_zeek_or_bro(),
- zeek_dir=zeek_tmp_dir,
- line_type=line_type,
- is_profiler_done_event=self.dummy_termination_event,
- )
- input.db.rdb = mock_db
+ input = Input(
+ Output(),
+ "dummy_output_dir",
+ 6379,
+ is_input_done=Mock(),
+ profiler_queue=self.profiler_queue,
+ input_type=input_type,
+ input_information=input_information,
+ cli_packet_filter=None,
+ zeek_or_bro=check_zeek_or_bro(),
+ zeek_dir=zeek_tmp_dir,
+ line_type=line_type,
+ is_profiler_done_event=Mock(),
+ termination_event=Mock(),
+ )
input.is_done_processing = do_nothing
input.bro_timeout = 1
# override the print function to avoid broken pipes
- input.print = do_nothing
+ input.print = Mock()
input.stop_queues = do_nothing
input.testing = True
return input
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_ip_info_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- ip_info = IPInfo(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
- ip_info.db.rdb = mock_db
+ ip_info = IPInfo(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
# override the self.print function to avoid broken pipes
- ip_info.print = do_nothing
+ ip_info.print = Mock()
return ip_info
+ @patch(DB_MANAGER, name="mock_db")
def create_asn_obj(self, mock_db):
return ASN(mock_db)
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_leak_detector_obj(self, mock_db):
# this file will be used for storing the module output
# and deleted when the tests are done
test_pcap = "dataset/test7-malicious.pcap"
yara_rules_path = "tests/yara_rules_for_testing/rules/"
compiled_yara_rules_path = "tests/yara_rules_for_testing/compiled/"
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- leak_detector = LeakDetector(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
- leak_detector.db.rdb = mock_db
- # override the self.print function to avoid broken pipes
- leak_detector.print = do_nothing
- # this is the path containing 1 yara rule for testing, it matches every pcap
+ leak_detector = LeakDetector(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
+ leak_detector.print = Mock()
+ # this is the path containing 1 yara rule for testing,
+ # it matches every pcap
leak_detector.yara_rules_path = yara_rules_path
leak_detector.compiled_yara_rules_path = compiled_yara_rules_path
leak_detector.pcap = test_pcap
return leak_detector
+ @patch(CORE_DB_MANAGER, name="mock_db")
def create_profiler_obj(self, mock_db):
- dummy_semaphore = Semaphore(0)
profiler = Profiler(
self.logger,
"output/",
6379,
- self.dummy_termination_event,
- is_profiler_done=dummy_semaphore,
+ Mock(),
+ is_profiler_done=Mock(),
profiler_queue=self.input_queue,
- is_profiler_done_event=self.dummy_termination_event,
+ is_profiler_done_event=Mock(),
)
-
# override the self.print function to avoid broken pipes
- profiler.print = do_nothing
+ profiler.print = Mock()
profiler.whitelist_path = "tests/test_whitelist.conf"
profiler.db = mock_db
return profiler
@@ -305,94 +335,125 @@ def create_redis_manager_obj(self, main):
return RedisManager(main)
def create_process_manager_obj(self):
- return ProcessManager(self.create_main_obj(""))
+ return ProcessManager(self.create_main_obj())
def create_utils_obj(self):
return utils
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_threatintel_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- threatintel = ThreatIntel(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
- threatintel.db = mock_db
+ threatintel = ThreatIntel(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
# override the self.print function to avoid broken pipes
- threatintel.print = do_nothing
+ threatintel.print = Mock()
return threatintel
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_update_manager_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- update_manager = UpdateManager(
- self.logger,
- "dummy_output_dir",
- 6379,
- self.dummy_termination_event,
- )
-
- update_manager.db.rdb = mock_db
-
+ update_manager = UpdateManager(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
# override the self.print function to avoid broken pipes
- update_manager.print = do_nothing
+ update_manager.print = Mock()
return update_manager
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_whitelist_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- whitelist = Whitelist(self.logger, mock_db)
- whitelist.db.rdb = mock_db
-
+ whitelist = Whitelist(self.logger, mock_db)
# override the self.print function to avoid broken pipes
- whitelist.print = do_nothing
+ whitelist.print = Mock()
whitelist.whitelist_path = "tests/test_whitelist.conf"
return whitelist
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_flow_handler_obj(self, flow, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- symbol = SymbolHandler(self.logger, mock_db)
- flow_handler = FlowHandler(mock_db, symbol, flow)
- return flow_handler
+ symbol = SymbolHandler(self.logger, mock_db)
+ flow_handler = FlowHandler(mock_db, symbol, flow)
+ flow_handler.profileid = "profile_id"
+ flow_handler.twid = "timewindow_id"
+ return flow_handler
+ @patch(DB_MANAGER, name="mock_db")
def create_horizontal_portscan_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- horizontal_ps = HorizontalPortscan(mock_db)
- return horizontal_ps
+ horizontal_ps = HorizontalPortscan(mock_db)
+ return horizontal_ps
+ @patch(DB_MANAGER, name="mock_db")
def create_vertical_portscan_obj(self, mock_db):
- with patch.object(DBManager, "create_sqlite_db", return_value=Mock()):
- vertical_ps = VerticalPortscan(mock_db)
- return vertical_ps
+ vertical_ps = VerticalPortscan(mock_db)
+ return vertical_ps
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_urlhaus_obj(self, mock_db):
"""Create an instance of URLhaus."""
urlhaus = URLhaus(mock_db)
return urlhaus
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_set_evidence_helper(self, mock_db):
"""Create an instance of SetEvidenceHelper."""
set_evidence_helper = SetEvidnceHelper(mock_db)
return set_evidence_helper
- def create_attacker_obj(self, value="192.168.1.1", direction=Direction.SRC, attacker_type=IoCType.IP):
- return Attacker(direction=direction, attacker_type=attacker_type, value=value)
-
- def create_victim_obj(self, value="192.168.1.2", direction=Direction.DST, victim_type=IoCType.IP):
- return Victim(direction=direction, victim_type=victim_type, value=value)
-
+ def create_output_obj(self):
+ return Output()
+
+ def create_attacker_obj(
+ self,
+ value="192.168.1.1",
+ direction=Direction.SRC,
+ attacker_type=IoCType.IP,
+ ):
+ return Attacker(
+ direction=direction, attacker_type=attacker_type, value=value
+ )
+
+ def create_victim_obj(
+ self,
+ value="192.168.1.2",
+ direction=Direction.DST,
+ victim_type=IoCType.IP,
+ ):
+ return Victim(
+ direction=direction, victim_type=victim_type, value=value
+ )
+
def create_profileid_obj(self, ip="192.168.1.3"):
return ProfileID(ip=ip)
-
- def create_timewindow_obj(self,number=1):
+
+ def create_timewindow_obj(self, number=1):
return TimeWindow(number=number)
-
+
def create_proto_obj(self):
return Proto
-
- def create_evidence_obj(self, evidence_type, description, attacker, threat_level,
- category, victim, profile, timewindow, uid, timestamp,
- proto, port, source_target_tag, id, conn_count, confidence):
+
+ def create_evidence_obj(
+ self,
+ evidence_type,
+ description,
+ attacker,
+ threat_level,
+ category,
+ victim,
+ profile,
+ timewindow,
+ uid,
+ timestamp,
+ proto,
+ port,
+ source_target_tag,
+ id,
+ conn_count,
+ confidence,
+ ):
return Evidence(
evidence_type=evidence_type,
description=description,
@@ -409,12 +470,117 @@ def create_evidence_obj(self, evidence_type, description, attacker, threat_level
source_target_tag=source_target_tag,
id=id,
conn_count=conn_count,
- confidence=confidence
+ confidence=confidence,
)
+ @patch(MODULE_DB_MANAGER, name="mock_db")
def create_network_discovery_obj(self, mock_db):
- with patch('modules.network_discovery.network_discovery.NetworkDiscovery.__init__', return_value=None):
- network_discovery = NetworkDiscovery(mock_db)
- network_discovery.db = mock_db
+ network_discovery = NetworkDiscovery(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
return network_discovery
+ def create_markov_chain_obj(self):
+ return Matrix()
+
+ def create_checker_obj(self):
+ mock_main = Mock()
+ mock_main.args = MagicMock()
+ mock_main.args.output = "test_output"
+ mock_main.args.verbose = "0"
+ mock_main.args.debug = "0"
+ mock_main.redis_man = Mock()
+ mock_main.terminate_slips = Mock()
+ mock_main.print_version = Mock()
+ mock_main.get_input_file_type = Mock()
+ mock_main.handle_flows_from_stdin = Mock()
+ mock_main.pid = 12345
+
+ checker = Checker(mock_main)
+ return checker
+
+ @patch(MODULE_DB_MANAGER, name="mock_db")
+ def create_go_director_obj(self, mock_db):
+ with patch("modules.p2ptrust.utils.utils.send_evaluation_to_go"):
+ go_director = GoDirector(
+ logger=self.logger,
+ trustdb=Mock(spec=TrustDB),
+ db=mock_db,
+ storage_name="test_storage",
+ override_p2p=False,
+ gopy_channel="test_gopy",
+ pygo_channel="test_pygo",
+ p2p_reports_logfile="test_reports.log",
+ )
+ go_director.print = Mock()
+ return go_director
+
+ @patch(MODULE_DB_MANAGER, name="mock_db")
+ def create_progress_bar_obj(self, mock_db):
+ mock_pipe = Mock(spec=Connection)
+ mock_pbar_finished = Mock(spec=Event)
+ pbar = PBar(
+ self.logger,
+ "dummy_output_dir",
+ 6379,
+ Mock(),
+ )
+ pbar.init(
+ pipe=mock_pipe,
+ slips_mode="normal",
+ pbar_finished=mock_pbar_finished,
+ )
+ pbar.print = Mock()
+
+ return pbar
+
+ @patch(DB_MANAGER, name="mock_db")
+ def create_daemon_object(self, mock_db):
+ with (
+ patch("slips.daemon.Daemon.read_pidfile", return_type=None),
+ patch("slips.daemon.Daemon.read_configuration"),
+ patch("builtins.open", mock_open(read_data=None)),
+ ):
+ daemon = Daemon(MagicMock())
+ daemon.stderr = "errors.log"
+ daemon.stdout = "slips.log"
+ daemon.stdin = "/dev/null"
+ daemon.logsfile = "slips.log"
+ daemon.pidfile_dir = "/tmp"
+ daemon.pidfile = os.path.join(daemon.pidfile_dir, "slips_daemon.lock")
+ daemon.daemon_start_lock = "slips_daemon_start"
+ daemon.daemon_stop_lock = "slips_daemon_stop"
+ return daemon
+
+ @patch("sqlite3.connect", name="sqlite_mock")
+ def create_trust_db_obj(self, sqlite_mock):
+ trust_db = TrustDB(self.logger, Mock(), drop_tables_on_startup=False)
+ trust_db.conn = Mock()
+ trust_db.print = Mock()
+ return trust_db
+
+ def create_base_model_obj(self):
+ logger = Mock(spec=Output)
+ trustdb = Mock()
+ return BaseModel(logger, trustdb)
+
+ def create_notify_obj(self):
+ notify = Notify()
+ return notify
+
+ @patch(MODULE_DB_MANAGER, name="mock_db")
+ def create_cesnet_obj(self, mock_db):
+ output_dir = "dummy_output_dir"
+ redis_port = 6379
+ termination_event = MagicMock()
+ cesnet = CESNET(self.logger, output_dir, redis_port, termination_event)
+ cesnet.wclient = MagicMock()
+ cesnet.node_info = [
+ {"Name": "TestNode", "Type": ["IPS"], "SW": ["Slips"]}
+ ]
+
+ cesnet.print = Mock()
+ return cesnet
diff --git a/tests/test_arp.py b/tests/test_arp.py
index 884c42ded..4753cb0d4 100644
--- a/tests/test_arp.py
+++ b/tests/test_arp.py
@@ -2,51 +2,315 @@
from tests.module_factory import ModuleFactory
import json
+import ipaddress
+import pytest
+from slips_files.core.evidence_structure.evidence import EvidenceType
+
-# random values for testing
profileid = "profile_192.168.1.1"
twid = "timewindow1"
-# check_arp_scan is tested in test_dataset.py, check arp-only unit test
-def test_check_dstip_outside_localnet(mock_db):
- ARP = ModuleFactory().create_arp_obj(mock_db)
- daddr = "1.1.1.1"
+@pytest.mark.parametrize(
+ "daddr, saddr, expected_result",
+ [
+ # Test case 1: IP outside local network
+ ("1.1.1.1", "192.168.1.1", True),
+ # Test case 2: IP inside local network
+ ("192.168.1.2", "192.168.1.1", False),
+ # Test case 3: Multicast address
+ ("224.0.0.1", "192.168.1.1", False),
+ # Test case 4: Link-local address
+ ("169.254.1.1", "192.168.1.1", False),
+ # Test case 5: Same subnet, different IP
+ ("192.168.1.100", "192.168.1.1", False),
+ # Test case 6: ARP probe (source 0.0.0.0)
+ ("192.168.1.2", "0.0.0.0", False),
+ # Test case 7: ARP probe (destination 0.0.0.0)
+ ("0.0.0.0", "192.168.1.1", False),
+ ],
+)
+def test_check_dstip_outside_localnet(daddr, saddr, expected_result):
+ ARP = ModuleFactory().create_arp_obj()
+ profileid = f"profile_{saddr}"
+ twid = "timewindow1"
uid = "1234"
- saddr = "192.168.1.1"
ts = "1632214645.783595"
- assert (
- ARP.check_dstip_outside_localnet(
- profileid, twid, daddr, uid, saddr, ts
- )
- is True
+
+ ARP.home_network = [ipaddress.IPv4Network("192.168.0.0/16")]
+
+ result = ARP.check_dstip_outside_localnet(
+ profileid, twid, daddr, uid, saddr, ts
)
+ assert result == expected_result
-def test_detect_unsolicited_arp(mock_db):
- ARP = ModuleFactory().create_arp_obj(mock_db)
+@pytest.mark.parametrize(
+ "dst_mac, dst_hw, src_mac, src_hw, expected_result",
+ [
+ # Test case 1: Valid unsolicited ARP
+ (
+ "ff:ff:ff:ff:ff:ff",
+ "ff:ff:ff:ff:ff:ff",
+ "44:11:44:11:44:11",
+ "44:11:44:11:44:11",
+ True,
+ ),
+ # Test case 2: Invalid dst_mac
+ (
+ "00:11:22:33:44:55",
+ "ff:ff:ff:ff:ff:ff",
+ "44:11:44:11:44:11",
+ "44:11:44:11:44:11",
+ None,
+ ),
+ # Test case 3: Invalid dst_hw
+ (
+ "ff:ff:ff:ff:ff:ff",
+ "00:11:22:33:44:55",
+ "44:11:44:11:44:11",
+ "44:11:44:11:44:11",
+ None,
+ ),
+ # Test case 4: Invalid src_mac
+ # (all zeros)
+ (
+ "ff:ff:ff:ff:ff:ff",
+ "ff:ff:ff:ff:ff:ff",
+ "00:00:00:00:00:00",
+ "44:11:44:11:44:11",
+ None,
+ ),
+ # Test case 5: Invalid src_hw
+ # (all zeros)
+ (
+ "ff:ff:ff:ff:ff:ff",
+ "ff:ff:ff:ff:ff:ff",
+ "44:11:44:11:44:11",
+ "00:00:00:00:00:00",
+ None,
+ ),
+ # Test case 6: Alternative valid case
+ # (dst_hw all zeros)
+ (
+ "ff:ff:ff:ff:ff:ff",
+ "00:00:00:00:00:00",
+ "44:11:44:11:44:11",
+ "44:11:44:11:44:11",
+ None,
+ ),
+ ],
+)
+def test_detect_unsolicited_arp(
+ dst_mac, dst_hw, src_mac, src_hw, expected_result
+):
+ ARP = ModuleFactory().create_arp_obj()
+ profileid = "profile_192.168.1.1"
+ twid = "timewindow1"
uid = "1234"
ts = "1632214645.783595"
- dst_mac = "ff:ff:ff:ff:ff:ff"
- dst_hw = "ff:ff:ff:ff:ff:ff"
- src_mac = "44:11:44:11:44:11"
- src_hw = "44:11:44:11:44:11"
- assert (
- ARP.detect_unsolicited_arp(
- profileid, twid, uid, ts, dst_mac, src_mac, dst_hw, src_hw
- )
- is True
+
+ result = ARP.detect_unsolicited_arp(
+ profileid, twid, uid, ts, dst_mac, src_mac, dst_hw, src_hw
)
+ assert result == expected_result
+
+
+def test_detect_MITM_ARP_attack_with_original_ip():
+ ARP = ModuleFactory().create_arp_obj()
+ twid = "timewindow1"
+ uid = "1234"
+ ts = "1636305825.755132"
+ saddr = "192.168.1.3"
+ original_ip = "192.168.1.1"
+ gateway_ip = "192.168.1.254"
+ gateway_mac = "aa:bb:cc:dd:ee:ff"
+ src_mac = "44:11:44:11:44:11"
+
+ ARP.db.get_ip_of_mac.return_value = json.dumps([f"profile_{original_ip}"])
+ ARP.db.get_gateway_ip.return_value = gateway_ip
+ ARP.db.get_gateway_mac.return_value = gateway_mac
+
+ result = ARP.detect_MITM_ARP_attack(twid, uid, saddr, ts, src_mac)
+ assert result is True
+
+
+def test_detect_MITM_ARP_attack_same_ip():
+ ARP = ModuleFactory().create_arp_obj()
+ twid = "timewindow1"
+ uid = "1234"
+ ts = "1636305825.755132"
+ saddr = "192.168.1.1"
+ original_ip = "192.168.1.1"
+ gateway_ip = "192.168.1.254"
+ gateway_mac = "aa:bb:cc:dd:ee:ff"
+ src_mac = "44:11:44:11:44:11"
+
+ ARP.db.get_ip_of_mac.return_value = json.dumps([f"profile_{original_ip}"])
+ ARP.db.get_gateway_ip.return_value = gateway_ip
+ ARP.db.get_gateway_mac.return_value = gateway_mac
+
+ result = ARP.detect_MITM_ARP_attack(twid, uid, saddr, ts, src_mac)
+ assert result is None
+
+
+def test_detect_mitm_arp_attack_gateway_mac():
+ ARP = ModuleFactory().create_arp_obj()
+ twid = "timewindow1"
+ uid = "1234"
+ ts = "1636305825.755132"
+ saddr = "192.168.1.3"
+ original_ip = "192.168.1.1"
+ gateway_ip = "192.168.1.254"
+ gateway_mac = "44:11:44:11:44:11"
+ src_mac = "44:11:44:11:44:11"
+
+ ARP.db.get_ip_of_mac.return_value = json.dumps([f"profile_{original_ip}"])
+ ARP.db.get_gateway_ip.return_value = gateway_ip
+ ARP.db.get_gateway_mac.return_value = gateway_mac
+ result = ARP.detect_MITM_ARP_attack(twid, uid, saddr, ts, src_mac)
+ assert result is True
-def test_detect_MITM_ARP_attack(mock_db):
- ARP = ModuleFactory().create_arp_obj(mock_db)
- # add a mac addr to this profile
- src_mac = "2e:a4:18:f8:3d:02"
- # now in this flow we have another ip '192.168.1.3' pretending to have the same src_mac
+def test_detect_MITM_ARP_attack_gateway_ip_as_victim():
+ ARP = ModuleFactory().create_arp_obj()
+ twid = "timewindow1"
uid = "1234"
ts = "1636305825.755132"
saddr = "192.168.1.3"
- mock_db.get_ip_of_mac.return_value = json.dumps([profileid])
- assert ARP.detect_MITM_ARP_attack(twid, uid, saddr, ts, src_mac) is True
+ original_ip = "192.168.1.254"
+ gateway_ip = "192.168.1.254"
+ gateway_mac = "aa:bb:cc:dd:ee:ff"
+ src_mac = "44:11:44:11:44:11"
+
+ ARP.db.get_ip_of_mac.return_value = json.dumps([f"profile_{original_ip}"])
+ ARP.db.get_gateway_ip.return_value = gateway_ip
+ ARP.db.get_gateway_mac.return_value = gateway_mac
+
+ result = ARP.detect_MITM_ARP_attack(twid, uid, saddr, ts, src_mac)
+ assert result is True
+
+
+def test_detect_MITM_ARP_attack_no_original_ip():
+ ARP = ModuleFactory().create_arp_obj()
+ twid = "timewindow1"
+ uid = "1234"
+ ts = "1636305825.755132"
+ saddr = "192.168.1.3"
+ gateway_ip = "192.168.1.254"
+ gateway_mac = "aa:bb:cc:dd:ee:ff"
+ src_mac = "44:11:44:11:44:11"
+
+ ARP.db.get_ip_of_mac.return_value = None
+ ARP.db.get_gateway_ip.return_value = gateway_ip
+ ARP.db.get_gateway_mac.return_value = gateway_mac
+
+ result = ARP.detect_MITM_ARP_attack(twid, uid, saddr, ts, src_mac)
+ assert result is None
+
+
+def test_set_evidence_arp_scan():
+ """Tests set_evidence_arp_scan function"""
+
+ ARP = ModuleFactory().create_arp_obj()
+ ts = "1632214645.783595"
+ uids = ["5678", "1234"]
+ conn_count = 5
+
+ ARP.set_evidence_arp_scan(ts, profileid, twid, uids, conn_count)
+
+ ARP.db.set_evidence.assert_called_once()
+ call_args = ARP.db.set_evidence.call_args[0]
+ evidence = call_args[0]
+ assert evidence.evidence_type == EvidenceType.ARP_SCAN
+ assert evidence.attacker.value == "192.168.1.1"
+ assert evidence.conn_count == conn_count
+ assert set(evidence.uid) == set(uids)
+
+
+@pytest.mark.parametrize(
+ "operation, dst_hw, expected_result",
+ [
+ # Test case 1: Valid gratuitous ARP
+ # (reply, broadcast dst_hw)
+ ("reply", "ff:ff:ff:ff:ff:ff", True),
+ # Test case 2: Valid gratuitous ARP
+ # (reply, all-zero dst_hw)
+ ("reply", "00:00:00:00:00:00", True),
+ # Test case 3: Not gratuitous (request)
+ ("request", "ff:ff:ff:ff:ff:ff", False),
+ # Test case 4: Not gratuitous (unicast dst_hw)
+ ("reply", "00:11:22:33:44:55", False),
+ ],
+)
+def test_check_if_gratutitous_arp(operation, dst_hw, expected_result):
+ """Tests check_if_gratutitous_ARP function"""
+ arp = ModuleFactory().create_arp_obj()
+ result = arp.check_if_gratutitous_ARP(dst_hw, operation)
+ assert result == expected_result
+
+
+# def test_wait_for_arp_scans():
+# ARP = ModuleFactory().create_arp_obj()
+# ARP.pending_arp_scan_evidence = Queue()
+# ARP.time_to_wait = 0.1
+# evidence1 = (
+# "1636305825.755132",
+# "profile_192.168.1.1",
+# "timewindow1",
+# ["uid1"],
+# 5,
+# )
+# evidence2 = (
+# "1636305826.755132",
+# "profile_192.168.1.1",
+# "timewindow1",
+# ["uid2"],
+# 6,
+# )
+# evidence3 = (
+# "1636305827.755132",
+# "profile_192.168.1.2",
+# "timewindow1",
+# ["uid3"],
+# 7,
+# )
+#
+# ARP.pending_arp_scan_evidence.put(evidence1)
+# ARP.pending_arp_scan_evidence.put(evidence2)
+# ARP.pending_arp_scan_evidence.put(evidence3)
+#
+# ARP.set_evidence_arp_scan = MagicMock()
+#
+# thread = threading.Thread(target=ARP.wait_for_arp_scans)
+# thread.daemon = True
+# thread.start()
+#
+# time.sleep(1)
+# expected_calls = [
+# call(
+# "1636305826.755132",
+# "profile_192.168.1.1",
+# "timewindow1",
+# ["uid1", "uid2"],
+# 6,
+# ),
+# call(
+# "1636305827.755132",
+# "profile_192.168.1.2",
+# "timewindow1",
+# ["uid3"],
+# 7,
+# ),
+# ]
+# (
+# ARP.set_evidence_arp_scan.assert_has_calls(
+# expected_calls, any_order=True
+# )
+# )
+# assert ARP.set_evidence_arp_scan.call_count == 2
+# assert ARP.pending_arp_scan_evidence.empty()
+# ARP.stop_thread = True
+# thread.join(timeout=1)
+#
diff --git a/tests/test_asn_info.py b/tests/test_asn_info.py
new file mode 100644
index 000000000..3764347ff
--- /dev/null
+++ b/tests/test_asn_info.py
@@ -0,0 +1,432 @@
+"""Unit test for modules/ip_info/ip_info.py"""
+
+from tests.module_factory import ModuleFactory
+import pytest
+from unittest.mock import Mock, patch, call
+import time
+import json
+
+
+@pytest.mark.parametrize(
+ "ip_address, expected_asn_info",
+ [
+ # Testcase 1: IP with known ASN info
+ (
+ "108.200.116.255",
+ {"asn": {"number": "AS7018", "org": "ATT-INTERNET4"}},
+ ),
+ # Testcase 2: IP with no ASN info
+ (
+ "0.0.0.0",
+ {},
+ ),
+ # Testcase 3: Private IP address
+ (
+ "192.168.1.1",
+ {},
+ ),
+ ],
+)
+def test_get_asn_info_from_geolite(ip_address, expected_asn_info):
+ asn_info = ModuleFactory().create_asn_obj()
+ assert asn_info.get_asn_info_from_geolite(ip_address) == expected_asn_info
+
+
+@pytest.mark.parametrize(
+ "ip_address, expected_whois_info, expected_cached_data",
+ [
+ # Testcase 1: Cache miss, successful ASN lookup
+ (
+ "8.8.8.8",
+ {
+ "asn_description": "GOOGLE, US",
+ "asn_cidr": "8.8.8.0/24",
+ "asn": "15169",
+ },
+ {"asn": {"number": "AS15169", "org": "GOOGLE, US"}},
+ ),
+ # Testcase 2: Cache miss, successful ASN lookup, different IP
+ (
+ "1.1.1.1",
+ {
+ "asn_description": "CLOUDFLARENET, US",
+ "asn_cidr": "1.1.1.0/24",
+ "asn": "13335",
+ },
+ {"asn": {"number": "AS13335", "org": "CLOUDFLARENET, US"}},
+ ),
+ # Testcase 3: Cache hit, return cached data
+ (
+ "8.8.8.8",
+ {
+ "asn_description": "GOOGLE, US",
+ "asn_cidr": "8.8.8.0/24",
+ "asn": "15169",
+ },
+ {"asn": {"number": "AS15169", "org": "GOOGLE, US"}},
+ ),
+ # Testcase 4: IP with lookup failure
+ (
+ "192.168.1.1",
+ None,
+ False,
+ ),
+ ],
+)
+def test_cache_ip_range(ip_address, expected_whois_info, expected_cached_data):
+ asn_info = ModuleFactory().create_asn_obj()
+
+ with patch("ipwhois.IPWhois.lookup_rdap") as mock_lookup_rdap:
+ mock_lookup_rdap.return_value = expected_whois_info
+ result = asn_info.cache_ip_range(ip_address)
+ assert result == expected_cached_data
+
+
+@pytest.mark.parametrize(
+ "ip_address, first_octet, cached_data, expected_result",
+ [
+ # Testcase 1: IP in cached range
+ (
+ "192.168.1.100",
+ "192",
+ json.dumps(
+ {"192.168.0.0/16": {"org": "Test Org", "number": "AS12345"}}
+ ),
+ {"asn": {"org": "Test Org", "number": "AS12345"}},
+ ),
+ # Testcase 2: IP not in cached range
+ (
+ "10.0.0.1",
+ "10",
+ json.dumps(
+ {"192.168.0.0/16": {"org": "Test Org", "number": "AS12345"}}
+ ),
+ None,
+ ),
+ # Testcase 3: No cached data for first octet
+ (
+ "172.16.0.1",
+ "172",
+ None,
+ None,
+ ),
+ # Testcase 4: Invalid IP
+ (
+ "invalid_ip",
+ None,
+ None,
+ None,
+ ),
+ # Testcase 5: Cached range without 'number'
+ (
+ "192.168.1.100",
+ "192",
+ json.dumps({"192.168.0.0/16": {"org": "Test Org"}}),
+ {"asn": {"org": "Test Org"}},
+ ),
+ ],
+)
+def test_get_cached_asn(ip_address, first_octet, cached_data, expected_result):
+ asn_info = ModuleFactory().create_asn_obj()
+
+ with patch(
+ "slips_files.common.slips_utils.utils.get_first_octet"
+ ) as mock_get_first_octet:
+ mock_get_first_octet.return_value = first_octet
+
+ asn_info.db.get_asn_cache.return_value = cached_data
+ result = asn_info.get_cached_asn(ip_address)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "cached_data, update_period, expected_result",
+ [
+ # Testcase 1: No cached data
+ (
+ None,
+ 3600,
+ True,
+ ),
+ # Testcase 2: Cached data with no timestamp
+ (
+ {"asn": {}},
+ 3600,
+ True,
+ ),
+ # Testcase 3: Cached data with old timestamp
+ (
+ {"asn": {"timestamp": time.time() - 7200}},
+ 3600,
+ True,
+ ),
+ # Testcase 4: Cached data with recent timestamp
+ (
+ {"asn": {"timestamp": time.time() - 1800}},
+ 3600,
+ False,
+ ),
+ ],
+)
+def test_update_asn(cached_data, update_period, expected_result):
+ asn_info = ModuleFactory().create_asn_obj()
+ result = asn_info.update_asn(cached_data, update_period)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "ip_address, is_ignored, api_status_code, api_text, "
+ "mock_get_side_effect, expected_result",
+ [
+ # Testcase 1: Valid IP with ASN info
+ (
+ "8.8.8.8",
+ False,
+ 200,
+ json.dumps({"as": "AS15169 Google LLC"}),
+ None,
+ {"asn": {"number": "AS15169", "org": "Google LLC"}},
+ ),
+ # Testcase 2: Valid IP without ASN info
+ (
+ "1.1.1.1",
+ False,
+ 200,
+ json.dumps({"as": ""}),
+ None,
+ None,
+ ),
+ # Testcase 3: API request fails
+ (
+ "192.168.1.1",
+ False,
+ 404,
+ "",
+ None,
+ {},
+ ),
+ # Testcase 4: Ignored IP
+ (
+ "127.0.0.1",
+ True,
+ None,
+ None,
+ None,
+ {},
+ ),
+ ],
+)
+def test_get_asn_online(
+ ip_address,
+ is_ignored,
+ api_status_code,
+ api_text,
+ mock_get_side_effect,
+ expected_result,
+):
+ asn_info = ModuleFactory().create_asn_obj()
+
+ with patch(
+ "slips_files.common.slips_utils.utils.is_ignored_ip"
+ ) as mock_is_ignored_ip:
+ mock_is_ignored_ip.return_value = is_ignored
+
+ with patch("requests.get") as mock_get:
+ mock_response = Mock()
+ mock_response.status_code = api_status_code
+ mock_response.text = api_text
+ mock_get.return_value = mock_response
+ mock_get.side_effect = mock_get_side_effect
+
+ result = asn_info.get_asn_online(ip_address)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "ip, cached_ip_info, asn, expected_call",
+ [
+ # Testcase 1: Update with new ASN info
+ (
+ "192.168.1.1",
+ {},
+ {"asn": {"number": "AS12345", "org": "Test Org"}},
+ (
+ "192.168.1.1",
+ {
+ "asn": {"number": "AS12345", "org": "Test Org"},
+ "timestamp": 1625097600,
+ },
+ ),
+ ),
+ # Testcase 2: Update existing ASN info
+ (
+ "10.0.0.1",
+ {"country": "US"},
+ {"asn": {"number": "AS67890", "org": "Another Org"}},
+ (
+ "10.0.0.1",
+ {
+ "country": "US",
+ "asn": {"number": "AS67890", "org": "Another Org"},
+ "timestamp": 1625097600,
+ },
+ ),
+ ),
+ # Testcase 3: Update with empty ASN info
+ (
+ "172.16.0.1",
+ {"some_key": "some_value"},
+ {},
+ (
+ "172.16.0.1",
+ {
+ "some_key": "some_value",
+ "timestamp": 1625097600,
+ },
+ ),
+ ),
+ ],
+)
+def test_update_ip_info(ip, cached_ip_info, asn, expected_call):
+ asn_info = ModuleFactory().create_asn_obj()
+
+ with patch("time.time", return_value=1625097600):
+ asn_info.update_ip_info(ip, cached_ip_info, asn)
+
+ asn_info.db.set_ip_info.assert_called_once_with(*expected_call)
+ expected_cached_ip_info = expected_call[1]
+ assert cached_ip_info == expected_cached_ip_info
+
+
+@pytest.mark.parametrize(
+ "ip, cached_ip_info, cached_asn, cache_ip_range_result, "
+ "geolite_asn, online_asn, expected_result, expected_calls",
+ [
+ # Testcase 1: ASN found in cached range
+ (
+ "192.168.1.1",
+ {},
+ {"asn": {"number": "AS12345", "org": "Cached Org"}},
+ None,
+ None,
+ None,
+ {"asn": {"number": "AS12345", "org": "Cached Org"}},
+ [call.get_cached_asn("192.168.1.1")],
+ ),
+ # Testcase 2: ASN found by cache_ip_range
+ (
+ "8.8.8.8",
+ {},
+ None,
+ {"asn": {"number": "AS15169", "org": "Google LLC"}},
+ None,
+ None,
+ {"asn": {"number": "AS15169", "org": "Google LLC"}},
+ [call.get_cached_asn("8.8.8.8"), call.cache_ip_range("8.8.8.8")],
+ ),
+ # Testcase 3: ASN found in GeoLite database
+ (
+ "1.1.1.1",
+ {},
+ None,
+ None,
+ {"asn": {"number": "AS13335", "org": "Cloudflare, Inc."}},
+ None,
+ {"asn": {"number": "AS13335", "org": "Cloudflare, Inc."}},
+ [
+ call.get_cached_asn("1.1.1.1"),
+ call.cache_ip_range("1.1.1.1"),
+ call.get_asn_info_from_geolite("1.1.1.1"),
+ ],
+ ),
+ # Testcase 4: ASN found online
+ (
+ "203.0.113.1",
+ {},
+ None,
+ None,
+ None,
+ {"asn": {"number": "AS64496", "org": "Example ISP"}},
+ {"asn": {"number": "AS64496", "org": "Example ISP"}},
+ [
+ call.get_cached_asn("203.0.113.1"),
+ call.cache_ip_range("203.0.113.1"),
+ call.get_asn_info_from_geolite("203.0.113.1"),
+ call.get_asn_online("203.0.113.1"),
+ ],
+ ),
+ ],
+)
+def test_get_asn_with_result(
+ ip,
+ cached_ip_info,
+ cached_asn,
+ cache_ip_range_result,
+ geolite_asn,
+ online_asn,
+ expected_result,
+ expected_calls,
+):
+ asn_info = ModuleFactory().create_asn_obj()
+
+ with patch.object(
+ asn_info, "get_cached_asn", return_value=cached_asn
+ ) as mock_get_cached_asn, patch.object(
+ asn_info, "cache_ip_range", return_value=cache_ip_range_result
+ ) as mock_cache_ip_range, patch.object(
+ asn_info, "get_asn_info_from_geolite", return_value=geolite_asn
+ ) as mock_get_geolite, patch.object(
+ asn_info, "get_asn_online", return_value=online_asn
+ ) as mock_get_online, patch.object(
+ asn_info, "update_ip_info"
+ ) as mock_update_ip_info:
+ asn_info.get_asn(ip, cached_ip_info)
+
+ actual_calls = (
+ mock_get_cached_asn.mock_calls
+ + mock_cache_ip_range.mock_calls
+ + mock_get_geolite.mock_calls
+ + mock_get_online.mock_calls
+ )
+ assert actual_calls == expected_calls
+
+ mock_update_ip_info.assert_called_once_with(
+ ip, cached_ip_info, expected_result
+ )
+
+
+def test_get_asn_without_result():
+ """Testcase: ASN not found anywhere."""
+ ip = "10.0.0.1"
+ cached_ip_info = {}
+ expected_calls = [
+ call.get_cached_asn("10.0.0.1"),
+ call.cache_ip_range("10.0.0.1"),
+ call.get_asn_info_from_geolite("10.0.0.1"),
+ call.get_asn_online("10.0.0.1"),
+ ]
+
+ asn_info = ModuleFactory().create_asn_obj()
+
+ with patch.object(
+ asn_info, "get_cached_asn", return_value=None
+ ) as mock_get_cached_asn, patch.object(
+ asn_info, "cache_ip_range", return_value=None
+ ) as mock_cache_ip_range, patch.object(
+ asn_info, "get_asn_info_from_geolite", return_value=None
+ ) as mock_get_geolite, patch.object(
+ asn_info, "get_asn_online", return_value=None
+ ) as mock_get_online, patch.object(
+ asn_info, "update_ip_info"
+ ) as mock_update_ip_info:
+ asn_info.get_asn(ip, cached_ip_info)
+
+ actual_calls = (
+ mock_get_cached_asn.mock_calls
+ + mock_cache_ip_range.mock_calls
+ + mock_get_geolite.mock_calls
+ + mock_get_online.mock_calls
+ )
+ assert actual_calls == expected_calls
+
+ mock_update_ip_info.assert_not_called()
diff --git a/tests/test_base_model.py b/tests/test_base_model.py
new file mode 100644
index 000000000..0b6977d56
--- /dev/null
+++ b/tests/test_base_model.py
@@ -0,0 +1,164 @@
+import pytest
+from unittest.mock import Mock
+from tests.module_factory import ModuleFactory
+
+
+@pytest.mark.parametrize(
+ "ipaddr, reports, expected_score, expected_confidence",
+ [ # testcase1: get opinion with one report
+ ("192.168.1.1", [(0.8, 0.9, 0.7, 0.8, 0.9)], 0.8, 0.9),
+ # testcase2: get opinion with multiple reports
+ (
+ "172.16.0.1",
+ [(0.6, 0.7, 0.8, 0.7, 0.8), (0.7, 0.8, 0.9, 0.8, 0.9)],
+ 0.65,
+ 0.75,
+ ),
+ ],
+)
+def test_get_opinion_on_ip_with_reports(
+ ipaddr, reports, expected_score, expected_confidence
+):
+ base_model = ModuleFactory().create_base_model_obj()
+ base_model.trustdb.get_opinion_on_ip.return_value = reports
+ base_model.assemble_peer_opinion = Mock(
+ return_value=(expected_score, expected_confidence)
+ )
+
+ score, confidence = base_model.get_opinion_on_ip(ipaddr)
+
+ base_model.trustdb.get_opinion_on_ip.assert_called_once_with(ipaddr)
+ base_model.assemble_peer_opinion.assert_called_once_with(reports)
+ base_model.trustdb.update_cached_network_opinion.assert_called_once_with(
+ "ip", ipaddr, expected_score, expected_confidence, 0
+ )
+ assert score == expected_score
+ assert confidence == expected_confidence
+
+
+def test_get_opinion_on_ip_no_reports():
+ base_model = ModuleFactory().create_base_model_obj()
+ base_model.trustdb.get_opinion_on_ip.return_value = []
+
+ base_model.assemble_peer_opinion = Mock()
+ base_model.trustdb.update_cached_network_opinion = Mock()
+
+ ipaddr = "10.0.0.1"
+ score, confidence = base_model.get_opinion_on_ip(ipaddr)
+
+ base_model.trustdb.get_opinion_on_ip.assert_called_once_with(ipaddr)
+ base_model.assemble_peer_opinion.assert_not_called()
+ base_model.trustdb.update_cached_network_opinion.assert_not_called()
+ assert score is None
+ assert confidence is None
+
+
+@pytest.mark.parametrize(
+ "reliability, score, confidence, expected_trust",
+ [
+ # testcase1: compute peer trust with normal values
+ (0.8, 0.9, 0.7, 0.595),
+ # testcase2: compute peer trust with mid-range values
+ (0.5, 0.6, 0.8, 0.415),
+ # testcase3: compute peer trust with maximum values
+ (1.0, 1.0, 1.0, 0.85),
+ # testcase4: compute peer trust with minimum values
+ (0.0, 0.0, 0.0, 0.0),
+ ],
+)
+def test_compute_peer_trust(reliability, score, confidence, expected_trust):
+ base_model = ModuleFactory().create_base_model_obj()
+ result = base_model.compute_peer_trust(reliability, score, confidence)
+ assert pytest.approx(result, 0.001) == expected_trust
+
+
+@pytest.mark.parametrize(
+ "text, verbose, debug",
+ [
+ # testcase1: Print a test message with normal verbosity
+ ("Test message", 1, 0),
+ # testcase2: Print a debug message
+ ("Debug message", 0, 1),
+ # testcase3: Print a verbose message
+ ("Verbose message", 2, 0),
+ # testcase4: Print an error message
+ ("Error message", 0, 3),
+ ],
+)
+def test_print(text, verbose, debug):
+ base_model = ModuleFactory().create_base_model_obj()
+ base_model.notify_observers = Mock()
+
+ base_model.print(text, verbose, debug)
+
+ base_model.notify_observers.assert_called_once_with(
+ {
+ "from": base_model.name,
+ "txt": text,
+ "verbose": verbose,
+ "debug": debug,
+ }
+ )
+
+
+@pytest.mark.parametrize(
+ "data, expected_score, expected_confidence",
+ [
+ # testcase1: assemble opinion with one report
+ ([(0.8, 0.9, 0.7, 0.8, 0.9)], 0.8, 0.5445),
+ # testcase2: assemble opinion with multiple reports
+ (
+ [(0.6, 0.7, 0.8, 0.7, 0.8), (0.7, 0.8, 0.9, 0.8, 0.9)],
+ 0.6517774343122101,
+ 0.46599999999999997,
+ ),
+ # testcase3: assemble opinion with diverse reports
+ (
+ [
+ (0.9, 0.8, 0.6, 0.7, 0.8),
+ (0.5, 0.6, 0.9, 0.8, 0.7),
+ (0.3, 0.4, 0.7, 0.6, 0.5),
+ ],
+ 0.5707589285714285,
+ 0.30233333333333334,
+ ),
+ ],
+)
+def test_assemble_peer_opinion(data, expected_score, expected_confidence):
+ base_model = ModuleFactory().create_base_model_obj()
+
+ score, confidence = base_model.assemble_peer_opinion(data)
+
+ assert pytest.approx(score, 0.0001) == expected_score
+ assert pytest.approx(confidence, 0.0001) == expected_confidence
+
+
+@pytest.mark.parametrize(
+ "peers, expected_weighted_trust",
+ [
+ # testcase1: normalize single peer reputation
+ ([0.5], [1.0]),
+ # testcase2: normalize multiple peer reputations
+ (
+ [0.7, 0.3, -0.2],
+ [0.4473684210526316, 0.34210526315789475, 0.2105263157894737],
+ ),
+ # testcase3: normalize peer reputations including extremes
+ ([1.0, 0.0, -1.0], [0.6666666666666666, 0.3333333333333333, 0.0]),
+ # testcase4: normalize peer reputations with all negative values
+ (
+ [-0.2, -0.5, -0.8],
+ [0.5333333333333333, 0.3333333333333333, 0.1333333333333333],
+ ),
+ ],
+)
+def test_normalize_peer_reputations(peers, expected_weighted_trust):
+ base_model = ModuleFactory().create_base_model_obj()
+
+ weighted_trust = base_model.normalize_peer_reputations(peers)
+
+ assert len(weighted_trust) == len(expected_weighted_trust)
+ for calculated, expected in zip(weighted_trust, expected_weighted_trust):
+ assert pytest.approx(calculated, 0.0001) == expected
+
+ assert pytest.approx(sum(weighted_trust), 0.0001) == 1.0
diff --git a/tests/test_cesnet.py b/tests/test_cesnet.py
new file mode 100644
index 000000000..b6e960804
--- /dev/null
+++ b/tests/test_cesnet.py
@@ -0,0 +1,125 @@
+import pytest
+from unittest.mock import MagicMock
+from tests.module_factory import ModuleFactory
+
+
+@pytest.mark.parametrize(
+ "evidence_in_idea, expected_output",
+ [
+ # testcase1: Remove private IPv4
+ (
+ {"Source": [{"IP4": ["192.168.1.100"]}, {"IP4": ["8.8.8.8"]}]},
+ {"Source": [{"IP4": ["8.8.8.8"]}]},
+ ),
+ # testcase2: Remove private IPv6
+ (
+ {"Target": [{"IP6": ["fd00::1"]}, {"IP6": ["2001:db8::1"]}]},
+ {"Target": [{"IP6": ["2001:db8::1"]}]},
+ ),
+ # testcase3: Keep public IPs
+ (
+ {"Source": [{"IP4": ["1.1.1.1"]}]},
+ {"Source": [{"IP4": ["1.1.1.1"]}]},
+ ),
+ # testcase4: Remove all IPs (should result in empty dict)
+ (
+ {
+ "Source": [{"IP4": ["10.0.0.1"]}],
+ "Target": [{"IP6": ["fc00::1"]}],
+ },
+ {},
+ ),
+ ],
+)
+def test_remove_private_ips(evidence_in_idea, expected_output):
+ cesnet = ModuleFactory().create_cesnet_obj()
+ result = cesnet.remove_private_ips(evidence_in_idea)
+ assert result == expected_output
+
+
+@pytest.mark.parametrize(
+ "evidence_in_idea, expected_output",
+ [
+ # testcase1: Valid alert with Source
+ ({"Source": [{"IP4": ["8.8.8.8"]}]}, True),
+ # testcase2: Valid alert with Target
+ ({"Target": [{"IP6": ["2001:db8::1"]}]}, True),
+ # testcase3: Invalid alert (no Source or Target)
+ ({}, False),
+ ],
+)
+def test_is_valid_alert(evidence_in_idea, expected_output):
+ cesnet = ModuleFactory().create_cesnet_obj()
+ result = cesnet.is_valid_alert(evidence_in_idea)
+ assert result == expected_output
+
+
+@pytest.mark.parametrize(
+ "events, expected_output",
+ [
+ # testcase1: Single valid event
+ (
+ [
+ {
+ "Source": [{"IP4": ["8.8.8.8"]}],
+ "Category": ["Malware"],
+ "Description": "Test",
+ "Node": [{"Name": "Test", "SW": ["TestSW"]}],
+ }
+ ],
+ 1,
+ ),
+ # testcase2: Multiple events, one invalid
+ (
+ [
+ {
+ "Source": [{"IP4": ["8.8.8.8"]}],
+ "Category": ["Malware"],
+ "Description": "Test1",
+ "Node": [{"Name": "Test1", "SW": ["TestSW1"]}],
+ },
+ {}, # Invalid event
+ {
+ "Source": [{"IP6": ["2001:db8::1"]}],
+ "Category": ["Intrusion"],
+ "Description": "Test2",
+ "Node": [{"Name": "Test2", "SW": ["TestSW2"]}],
+ },
+ ],
+ 2,
+ ),
+ # testcase3: All invalid events
+ ([{}, {}, {}], 0),
+ # testcase4: Events with multiple source IPs
+ (
+ [
+ {
+ "Source": [
+ {"IP4": ["192.168.1.100", "8.8.8.8"]},
+ {"IP6": ["2001:db8::1"]},
+ ],
+ "Category": ["Malware"],
+ "Description": "Test",
+ "Node": [{"Name": "Test", "SW": ["TestSW"]}],
+ },
+ ],
+ 2,
+ ),
+ ],
+)
+def test_import_alerts(events, expected_output):
+ cesnet = ModuleFactory().create_cesnet_obj()
+
+ cesnet.wclient = MagicMock()
+ cesnet.wclient.getEvents = MagicMock(return_value=events)
+ cesnet.db = MagicMock()
+ cesnet.db.add_ips_to_IoC = MagicMock()
+ cesnet.print = MagicMock()
+
+ cesnet.import_alerts()
+
+ assert cesnet.db.add_ips_to_IoC.call_count == 1
+
+ src_ips = cesnet.db.add_ips_to_IoC.call_args[0][0]
+
+ assert len(src_ips) == expected_output
diff --git a/tests/test_checker.py b/tests/test_checker.py
new file mode 100644
index 000000000..4ee81e78c
--- /dev/null
+++ b/tests/test_checker.py
@@ -0,0 +1,215 @@
+import os
+from unittest import mock
+import psutil
+import pytest
+from unittest.mock import patch
+from tests.module_factory import ModuleFactory
+
+
+def test_clear_redis_cache():
+ checker = ModuleFactory().create_checker_obj()
+ checker.clear_redis_cache()
+ checker.main.redis_man.clear_redis_cache_database.assert_called_once()
+ assert checker.main.input_information == ""
+ assert checker.main.zeek_dir == ""
+ checker.main.redis_man.log_redis_server_pid.assert_called_once_with(
+ 6379, mock.ANY
+ )
+ checker.main.terminate_slips.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "args, expected_calls",
+ [
+ # Test case 1: Help flag
+ ({"help": True}, ["print_version", "terminate_slips"]),
+ # Test case 2: Interface and filepath flags
+ (
+ {"interface": "eth0", "filepath": "/path/to/file"},
+ ["terminate_slips"],
+ ),
+ # Test case 3: Interface/filepath with input_module
+ ({"interface": "eth0", "input_module": "module"}, ["terminate_slips"]),
+ # Test case 4: Save/db flag without root privileges
+ ({"save": True}, ["terminate_slips"]),
+ # Test case 5: Invalid verbose/debug value
+ ({"verbose": "4"}, ["terminate_slips"]),
+ # Test case 6: Redis not running
+ ({}, ["terminate_slips"]),
+ # Test case 7: Invalid config file
+ ({"config": "/nonexistent/path"}, ["terminate_slips"]),
+ # Test case 8: Invalid interface
+ ({"interface": "nonexistent0"}, ["terminate_slips"]),
+ # Test case 9: Invalid input module
+ ({"input_module": "nonexistent_module"}, ["terminate_slips"]),
+ # Test case 10: Blocking without interface
+ ({"blocking": True}, ["terminate_slips"]),
+ # Test case 11: Version flag
+ ({"version": True}, ["print_version", "terminate_slips"]),
+ # Test case 12: Blocking with interface but not root
+ ({"interface": "eth0", "blocking": True}, ["terminate_slips"]),
+ # Test case 13: Clear blocking without root
+ ({"clearblocking": True}, ["terminate_slips"]),
+ # Test case 14: Save and load DB simultaneously
+ ({"save": True, "db": True}, ["terminate_slips"]),
+ ],
+)
+def test_check_given_flags(args, expected_calls, monkeypatch):
+
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.terminate_slips.reset_mock()
+ checker.main.print_version.reset_mock()
+
+ for arg, value in args.items():
+ setattr(checker.main.args, arg, value)
+
+ monkeypatch.setattr(os, "getuid", lambda: 1000)
+ monkeypatch.setattr(os, "geteuid", lambda: 1000)
+ monkeypatch.setattr(os.path, "exists", lambda x: False)
+ monkeypatch.setattr(psutil, "net_if_addrs", lambda: {"eth0": None})
+ checker.main.redis_man.check_redis_database.return_value = False
+ checker.input_module_exists = mock.MagicMock(return_value=False)
+
+ checker.check_given_flags()
+
+ for method_name in expected_calls:
+ method = getattr(checker.main, method_name)
+ assert (
+ method.called
+ ), f"Expected '{method_name}' to be called, but it was not."
+
+
+def test_check_given_flags_root_user(monkeypatch):
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.args.clearblocking = True
+ monkeypatch.setattr(os, "geteuid", lambda: 0)
+
+ with mock.patch.object(checker, "delete_blocking_chain") as mock_delete:
+ checker.check_given_flags()
+ mock_delete.assert_called_once()
+ checker.main.terminate_slips.assert_called()
+
+
+def test_check_input_type_interface():
+
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.args.interface = "eth0"
+ checker.main.args.filepath = None
+ checker.main.args.db = None
+ checker.main.args.input_module = None
+
+ result = checker.check_input_type()
+ assert result == ("interface", "eth0", False)
+
+
+def test_check_input_type_db():
+
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.args.interface = None
+ checker.main.args.filepath = None
+ checker.main.args.db = True
+ checker.main.args.input_module = None
+
+ checker.main.redis_man.load_db = mock.MagicMock()
+
+ result = checker.check_input_type()
+ assert result is None
+ checker.main.redis_man.load_db.assert_called_once()
+
+
+def test_check_input_type_input_module():
+
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.args.interface = None
+ checker.main.args.filepath = None
+ checker.main.args.db = None
+ checker.main.args.input_module = "zeek"
+
+ result = checker.check_input_type()
+ assert result == ("zeek", "input_module", "zeek")
+
+
+@pytest.mark.parametrize(
+ "filepath, is_file, is_dir, expected_result",
+ [
+ # Test case 1: Filepath input (file)
+ ("/path/to/file", True, False, ("mock_type", "/path/to/file", False)),
+ # Test case 2: Filepath input (directory)
+ ("/path/to/dir", False, True, ("mock_type", "/path/to/dir", False)),
+ ],
+)
+def test_check_input_type_filepath(filepath, is_file, is_dir, expected_result):
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.args.interface = None
+ checker.main.args.filepath = filepath
+ checker.main.args.db = None
+ checker.main.args.input_module = None
+
+ with mock.patch("os.path.isfile", return_value=is_file), mock.patch(
+ "os.path.isdir", return_value=is_dir
+ ), mock.patch.object(
+ checker.main, "get_input_file_type", return_value="mock_type"
+ ):
+
+ result = checker.check_input_type()
+ assert result == expected_result
+
+
+def test_check_input_type_stdin():
+
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.args.interface = None
+ checker.main.args.filepath = "stdin-type"
+ checker.main.args.db = None
+ checker.main.args.input_module = None
+
+ with mock.patch("os.path.isfile", return_value=False), mock.patch(
+ "os.path.isdir", return_value=False
+ ), mock.patch.object(
+ checker.main,
+ "handle_flows_from_stdin",
+ return_value=("mock_type", "mock_line_type"),
+ ):
+
+ result = checker.check_input_type()
+ assert result == ("mock_type", "stdin-type", "mock_line_type")
+
+
+def test_check_input_type_no_input():
+
+ checker = ModuleFactory().create_checker_obj()
+ checker.main.args.interface = None
+ checker.main.args.filepath = None
+ checker.main.args.db = None
+ checker.main.args.input_module = None
+
+ with pytest.raises(SystemExit) as excinfo:
+ checker.check_input_type()
+
+ assert excinfo.value.code == -1
+
+
+@pytest.mark.parametrize(
+ "module_name, available_modules, module_dir_content, expected_result",
+ [
+ # Test case 1: Module exists and is correctly structured
+ ("valid_module", ["valid_module"], ["valid_module.py"], True),
+ # Test case 2: Module directory doesn't exist
+ ("nonexistent_module", ["other_module"], [], False),
+ # Test case 3: Module directory exists but .py file is missing
+ (
+ "incomplete_module",
+ ["incomplete_module"],
+ ["other_file.txt"],
+ False,
+ ),
+ ],
+)
+def test_input_module_exists(
+ module_name, available_modules, module_dir_content, expected_result
+):
+ checker = ModuleFactory().create_checker_obj()
+ with patch("os.listdir") as mock_listdir:
+ mock_listdir.side_effect = [available_modules, module_dir_content]
+ result = checker.input_module_exists(module_name)
+ assert result == expected_result
diff --git a/tests/test_conn.py b/tests/test_conn.py
index dd9daf049..20148b47f 100644
--- a/tests/test_conn.py
+++ b/tests/test_conn.py
@@ -67,7 +67,6 @@
],
)
def test_is_p2p(
- mock_db,
dport,
proto,
daddr,
@@ -75,7 +74,7 @@ def test_is_p2p(
expected_result,
expected_final_p2p_daddrs,
):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
conn.p2p_daddrs = initial_p2p_daddrs.copy()
result = conn.is_p2p(dport, proto, daddr)
assert result == expected_result
@@ -93,7 +92,6 @@ def test_is_p2p(
)
def test_check_unknown_port(
mocker,
- mock_db,
dport,
proto,
expected_result,
@@ -101,9 +99,9 @@ def test_check_unknown_port(
mock_is_ftp_port,
mock_port_belongs_to_an_org,
):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
- mock_db.get_port_info.return_value = mock_port_info
- mock_db.is_ftp_port.return_value = mock_is_ftp_port
+ conn = ModuleFactory().create_conn_analyzer_obj()
+ conn.db.get_port_info.return_value = mock_port_info
+ conn.db.is_ftp_port.return_value = mock_is_ftp_port
flowalerts_mock = mocker.patch(
"modules.flowalerts.conn.Conn.port_belongs_to_an_org"
)
@@ -117,12 +115,12 @@ def test_check_unknown_port(
)
-def test_check_unknown_port_true_case(mocker, mock_db):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+def test_check_unknown_port_true_case(mocker):
+ conn = ModuleFactory().create_conn_analyzer_obj()
dport = "12345"
proto = "tcp"
- mock_db.get_port_info.return_value = None
- mock_db.is_ftp_port.return_value = False
+ conn.db.get_port_info.return_value = None
+ conn.db.is_ftp_port.return_value = False
mocker.patch.object(conn, "port_belongs_to_an_org", return_value=False)
mocker.patch.object(conn, "is_p2p", return_value=False)
mock_set_evidence = mocker.patch.object(conn.set_evidence, "unknown_port")
@@ -167,18 +165,18 @@ def test_check_unknown_port_true_case(mocker, mock_db):
],
)
def test_check_multiple_reconnection_attempts(
- mocker, mock_db, origstate, saddr, daddr, dport, uids, expected_calls
+ mocker, origstate, saddr, daddr, dport, uids, expected_calls
):
"""
Tests the check_multiple_reconnection_attempts function
with various scenarios.
"""
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence."
"SetEvidnceHelper.multiple_reconnection_attempts"
)
- mock_db.get_reconnections_for_tw.return_value = {}
+ conn.db.get_reconnections_for_tw.return_value = {}
for uid in uids:
conn.check_multiple_reconnection_attempts(
@@ -202,8 +200,8 @@ def test_check_multiple_reconnection_attempts(
("8.8.8.8", False),
],
)
-def test_is_ignored_ip_data_upload(mock_db, ip_address, expected_result):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+def test_is_ignored_ip_data_upload(ip_address, expected_result):
+ conn = ModuleFactory().create_conn_analyzer_obj()
conn.gateway = "192.168.1.1"
assert conn.is_ignored_ip_data_upload(ip_address) is expected_result
@@ -243,8 +241,8 @@ def test_is_ignored_ip_data_upload(mock_db, ip_address, expected_result):
),
],
)
-def test_get_sent_bytes(mock_db, all_flows, expected_bytes_sent):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+def test_get_sent_bytes(all_flows, expected_bytes_sent):
+ conn = ModuleFactory().create_conn_analyzer_obj()
bytes_sent = conn.get_sent_bytes(all_flows)
assert bytes_sent == expected_bytes_sent
@@ -262,13 +260,13 @@ def test_get_sent_bytes(mock_db, all_flows, expected_bytes_sent):
],
)
def test_check_data_upload(
- mocker, mock_db, sbytes, daddr, expected_result, expected_call_count
+ mocker, sbytes, daddr, expected_result, expected_call_count
):
"""
Tests the check_data_upload function with
various scenarios for data upload.
"""
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence." "SetEvidnceHelper.data_exfiltration"
)
@@ -304,7 +302,6 @@ def test_check_data_upload(
)
def test_should_ignore_conn_without_dns(
mocker,
- mock_db,
flow_type,
appproto,
daddr,
@@ -316,9 +313,9 @@ def test_should_ignore_conn_without_dns(
):
"""Tests the should_ignore_conn_without_dns
function with various scenarios."""
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
- mock_db.get_input_type.return_value = input_type
- mock_db.is_doh_server.return_value = is_doh_server
+ conn = ModuleFactory().create_conn_analyzer_obj()
+ conn.db.get_input_type.return_value = input_type
+ conn.db.is_doh_server.return_value = is_doh_server
conn.dns_analyzer = Mock()
conn.dns_analyzer.is_dns_server = Mock(return_value=is_dns_server)
@@ -369,18 +366,17 @@ def test_should_ignore_conn_without_dns(
],
)
def test_check_if_resolution_was_made_by_different_version(
- mock_db,
profileid,
daddr,
mock_get_the_other_ip_version_return_value,
mock_get_dns_resolution_return_value,
expected_result,
):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
- mock_db.get_the_other_ip_version.return_value = (
+ conn = ModuleFactory().create_conn_analyzer_obj()
+ conn.db.get_the_other_ip_version.return_value = (
mock_get_the_other_ip_version_return_value
)
- mock_db.get_dns_resolution.return_value = (
+ conn.db.get_dns_resolution.return_value = (
mock_get_dns_resolution_return_value
)
@@ -430,12 +426,12 @@ def test_check_if_resolution_was_made_by_different_version(
],
)
def test_check_conn_to_port_0(
- mocker, mock_db, sport, dport, proto, saddr, daddr, expected_calls
+ mocker, sport, dport, proto, saddr, daddr, expected_calls
):
"""
Tests the check_conn_to_port_0 function with various scenarios.
"""
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence."
"SetEvidnceHelper.for_port_0_connection"
@@ -508,7 +504,6 @@ def test_check_conn_to_port_0(
)
def test_check_non_http_port_80_conns(
mocker,
- mock_db,
state,
daddr,
dport,
@@ -521,7 +516,7 @@ def test_check_non_http_port_80_conns(
Tests the check_non_http_port_80_conns
function with various scenarios.
"""
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence."
"SetEvidnceHelper.non_http_port_80_conn"
@@ -590,9 +585,9 @@ def test_check_non_http_port_80_conns(
],
)
def test_check_long_connection(
- mocker, mock_db, dur, daddr, saddr, expected_result, expected_evidence_call
+ mocker, dur, daddr, saddr, expected_result, expected_evidence_call
):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
conn.long_connection_threshold = 1500
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence." "SetEvidnceHelper.long_connection"
@@ -664,7 +659,6 @@ def test_check_long_connection(
)
def test_port_belongs_to_an_org(
mocker,
- mock_db,
daddr,
portproto,
org_info,
@@ -673,10 +667,10 @@ def test_port_belongs_to_an_org(
is_ip_in_org,
expected_result,
):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
- mock_db.get_organization_of_port.return_value = org_info
- mock_db.get_mac_vendor_from_profile.return_value = mac_vendor
- mock_db.get_ip_identification.return_value = ip_identification
+ conn = ModuleFactory().create_conn_analyzer_obj()
+ conn.db.get_organization_of_port.return_value = org_info
+ conn.db.get_mac_vendor_from_profile.return_value = mac_vendor
+ conn.db.get_ip_identification.return_value = ip_identification
mocker.patch.object(
conn.whitelist.org_analyzer, "is_ip_in_org", return_value=is_ip_in_org
)
@@ -733,14 +727,14 @@ def test_port_belongs_to_an_org(
],
)
def test_check_device_changing_ips(
- mocker, mock_db, flow_type, smac, old_ip_list, saddr, expected_calls
+ mocker, flow_type, smac, old_ip_list, saddr, expected_calls
):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
mock_set_evidence = mocker.patch.object(
conn.set_evidence, "device_changing_ips"
)
- mock_db.was_ip_seen_in_connlog_before.return_value = expected_calls == 0
- mock_db.get_ip_of_mac.return_value = old_ip_list
+ conn.db.was_ip_seen_in_connlog_before.return_value = expected_calls == 0
+ conn.db.get_ip_of_mac.return_value = old_ip_list
conn.check_device_changing_ips(
flow_type, smac, f"profile_{saddr}", twid, uid, timestamp
@@ -807,7 +801,6 @@ def test_check_device_changing_ips(
)
def test_is_well_known_org(
mocker,
- mock_db,
ip,
ip_info,
is_ip_asn_in_org_asn,
@@ -815,8 +808,8 @@ def test_is_well_known_org(
is_ip_in_org,
expected_result,
):
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
- mock_db.get_ip_info.return_value = ip_info
+ conn = ModuleFactory().create_conn_analyzer_obj()
+ conn.db.get_ip_info.return_value = ip_info
mock_is_ip_asn_in_org_asn = mocker.patch(
"slips_files.core.helpers.whitelist.organization_whitelist."
"OrgAnalyzer.is_ip_asn_in_org_asn"
@@ -877,18 +870,18 @@ def test_is_well_known_org(
],
)
def test_check_different_localnet_usage(
- mocker, mock_db, saddr, daddr, dport, proto, what_to_check, expected_calls
+ mocker, saddr, daddr, dport, proto, what_to_check, expected_calls
):
"""
Tests the check_different_localnet_usage function
with various scenarios.
"""
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence."
"SetEvidnceHelper.different_localnet_usage"
)
- mock_db.get_local_network.return_value = "192.168.1.0/24"
+ conn.db.get_local_network.return_value = "192.168.1.0/24"
conn.check_different_localnet_usage(
saddr,
@@ -933,19 +926,18 @@ def test_check_different_localnet_usage(
],
)
def test_check_connection_to_local_ip(
- mocker, mock_db, daddr, dport, proto, saddr, expected_calls
+ mocker, daddr, dport, proto, saddr, expected_calls
):
"""
Tests the check_connection_to_local_ip function with various scenarios.
"""
- conn = ModuleFactory().create_conn_analyzer_obj(mock_db)
+ conn = ModuleFactory().create_conn_analyzer_obj()
mock_set_evidence = mocker.patch.object(
conn.set_evidence, "conn_to_private_ip"
)
- mock_db.get_gateway_ip.return_value = "192.168.1.1"
+ conn.db.get_gateway_ip.return_value = "192.168.1.1"
conn.check_connection_to_local_ip(
daddr, dport, proto, saddr, twid, uid, timestamp
)
assert mock_set_evidence.call_count == expected_calls
-
diff --git a/tests/test_daemon.py b/tests/test_daemon.py
new file mode 100644
index 000000000..543a27290
--- /dev/null
+++ b/tests/test_daemon.py
@@ -0,0 +1,250 @@
+import os
+import pytest
+from unittest.mock import patch, mock_open, call
+from exclusiveprocess import CannotAcquireLock
+from tests.module_factory import ModuleFactory
+from io import StringIO
+import signal
+import sys
+
+
+@pytest.mark.parametrize(
+ "test_message, expected_log_content",
+ [ # testcase1: Simple message
+ ("Test message 1", "Test message 1\n"),
+ # testcase2: Multiline message
+ ("Multiline\nmessage", "Multiline\nmessage\n"),
+ # testcase3: Empty message
+ ("", "\n"),
+ ],
+)
+def test_print(test_message, expected_log_content, tmpdir):
+ output_dir = tmpdir.mkdir("output")
+ daemon = ModuleFactory().create_daemon_object()
+ daemon.logsfile = os.path.join(output_dir, daemon.logsfile)
+ daemon.print(test_message)
+ with open(daemon.logsfile, "r") as f:
+ log_content = f.read()
+ assert log_content == expected_log_content
+
+
+@pytest.mark.parametrize(
+ "argv, stderr, stdout, logsfile, expected_files",
+ [ # testcase1: Create all streams
+ (
+ [],
+ "errors.log",
+ "slips.log",
+ "slips.log",
+ ["errors.log", "slips.log"],
+ ),
+ # testcase2: Create only stderr when stopping
+ (["-S"], "errors.log", "slips.log", "slips.log", ["errors.log"]),
+ ],
+)
+def test_create_std_streams(
+ argv, stderr, stdout, logsfile, expected_files, tmpdir
+):
+ output_dir = tmpdir.mkdir("output")
+ daemon = ModuleFactory().create_daemon_object()
+
+ daemon.stderr = stderr
+ daemon.stdout = stdout
+ daemon.logsfile = logsfile
+
+ daemon.prepare_std_streams(str(output_dir))
+
+ with patch.object(sys, "argv", argv):
+ daemon.create_std_streams()
+
+ expected_paths = [os.path.join(output_dir, f) for f in expected_files]
+ created_files = [
+ os.path.join(output_dir, f) for f in os.listdir(output_dir)
+ ]
+ assert sorted(created_files) == sorted(expected_paths)
+
+
+@pytest.mark.parametrize(
+ "output_dir, expected_stderr, " "expected_stdout, expected_logsfile",
+ [ # testcase1: Using /var/log/slips/ directory
+ (
+ "/var/log/slips/",
+ "/var/log/slips/errors.log",
+ "/var/log/slips/slips.log",
+ "/var/log/slips/slips.log",
+ ),
+ # testcase2: Using custom output directory
+ (
+ "/tmp/slips",
+ "/tmp/slips/errors.log",
+ "/tmp/slips/slips.log",
+ "/tmp/slips/slips.log",
+ ),
+ ],
+)
+def test_prepare_std_streams(
+ output_dir,
+ expected_stderr,
+ expected_stdout,
+ expected_logsfile,
+):
+ daemon = ModuleFactory().create_daemon_object()
+ daemon.prepare_std_streams(output_dir)
+ assert daemon.stderr == expected_stderr
+ assert daemon.stdout == expected_stdout
+ assert daemon.logsfile == expected_logsfile
+
+
+@patch("os.fork")
+@patch("os.setsid")
+@patch("os.umask")
+@patch("os.dup2")
+@patch("builtins.open", new_callable=mock_open)
+@patch("sys.stdin")
+@patch("sys.stdout")
+@patch("sys.stderr")
+def test_daemonize(
+ mock_stderr,
+ mock_stdout,
+ mock_stdin,
+ mock_open,
+ mock_dup2,
+ mock_umask,
+ mock_setsid,
+ mock_fork,
+):
+ mock_stdin.fileno.return_value = 0
+ mock_stdout.fileno.return_value = 1
+ mock_stderr.fileno.return_value = 2
+
+ mock_fork.side_effect = [0, 0]
+ daemon = ModuleFactory().create_daemon_object()
+ daemon.daemonize()
+
+ assert mock_fork.call_count == 2
+
+ mock_setsid.assert_called_once()
+ mock_umask.assert_called_once_with(0)
+
+ assert mock_dup2.call_count == 3
+
+ mock_open.assert_called_with(daemon.pidfile, "w+")
+ mock_open().write.assert_called_once()
+
+
+@patch("os.fork")
+@patch("sys.stderr", new_callable=StringIO)
+def test_daemonize_fork_error(mock_stderr, mock_fork):
+ mock_fork.side_effect = OSError("Fork failed")
+ daemon = ModuleFactory().create_daemon_object()
+ with pytest.raises(SystemExit):
+ daemon.daemonize()
+
+ assert "Fork #1 failed" in mock_stderr.getvalue()
+
+
+@pytest.mark.parametrize(
+ "file_content, expected_result",
+ [
+ # Test case 1: Valid daemon info
+ (
+ "# Some comment\n"
+ "Date,Time,Port,DB,InputType,OutputDir,"
+ "PID,IsDaemon\n"
+ "2023-07-25,10:00:00,6379,redis,pcap,"
+ "/tmp/output,12345,True\n",
+ ("6379", "/tmp/output", "12345"),
+ ),
+ # Test case 2: Multiple entries, last one is daemon
+ (
+ "2023-07-25,09:00:00,6380,redis,pcap,"
+ "/tmp/output1,12344,False\n"
+ "2023-07-25,10:00:00,6379,redis,pcap,"
+ "/tmp/output2,12345,True\n",
+ ("6379", "/tmp/output2", "12345"),
+ ),
+ # Test case 3: Empty file
+ ("", None),
+ ],
+)
+def test_get_last_opened_daemon_info(file_content, expected_result):
+ daemon = ModuleFactory().create_daemon_object()
+ daemon.slips.redis_man.running_logfile = "mock_logfile.txt"
+
+ with patch("builtins.open", mock_open(read_data=file_content)):
+ result = daemon.get_last_opened_daemon_info()
+
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "pidfile_exists, expected_output, " "expected_remove_calls",
+ [
+ # Test case 1: pidfile exists and is deleted
+ (True, ["pidfile deleted."], [call("/tmp/slips_daemon.lock")]),
+ # Test case 2: pidfile doesn't exist
+ (
+ False,
+ [
+ "Can't delete pidfile, /tmp/slips_daemon.lock doesn't exist.",
+ "Either Daemon stopped normally or an error occurred.",
+ ],
+ [],
+ ),
+ ],
+)
+def test_delete_pidfile(
+ pidfile_exists, expected_output, expected_remove_calls
+):
+ daemon = ModuleFactory().create_daemon_object()
+ with patch("os.path.exists", return_value=pidfile_exists), patch(
+ "os.remove"
+ ) as mock_remove, patch.object(daemon, "print") as mock_print:
+ daemon.delete_pidfile()
+
+ mock_remove.assert_has_calls(expected_remove_calls)
+ mock_print.assert_has_calls([call(line) for line in expected_output])
+
+
+@pytest.mark.parametrize(
+ "pid, os_kill_side_effect",
+ [
+ # Test case 1: Successfully kill the daemon
+ (12345, None),
+ # Test case 2: Daemon already killed
+ (12345, ProcessLookupError),
+ ],
+)
+def test_killdaemon(pid, os_kill_side_effect):
+ daemon = ModuleFactory().create_daemon_object()
+ daemon.pid = str(pid)
+
+ with patch("os.kill", side_effect=os_kill_side_effect) as mock_kill:
+ daemon.killdaemon()
+
+ mock_kill.assert_called_once_with(pid, signal.SIGTERM)
+
+
+@pytest.mark.parametrize(
+ "pid, lock_side_effect," " expected_result",
+ [
+ # Testcase1:pid exists lock acquired
+ (12345, None, True),
+ # Testcase2:no pid lock acquired
+ (None, None, False),
+ # Testcase3:pid exists lock not acquired
+ (12345, CannotAcquireLock(), True),
+ # Testcase4:no pid lock not acquired
+ (None, CannotAcquireLock(), False),
+ ],
+)
+def test_is_running(pid, lock_side_effect, expected_result):
+ daemon = ModuleFactory().create_daemon_object()
+ daemon.pid = pid
+
+ with patch("exclusiveprocess.Lock") as mock_lock:
+ mock_lock.return_value.__enter__.side_effect = lock_side_effect
+
+ result = daemon._is_running()
+
+ assert result == expected_result
diff --git a/tests/test_database.py b/tests/test_database.py
index aa7563095..3a60f28e1 100644
--- a/tests/test_database.py
+++ b/tests/test_database.py
@@ -1,5 +1,4 @@
import redis
-import os
import json
import time
import pytest
@@ -45,21 +44,10 @@
"",
)
-# this should always be the first unit test in this file
-# because we don't want another unit test adding the same flow before this one
-
-db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
-
-
-def add_flow():
- db.add_flow(flow, "", profileid, twid, label="benign")
-
def test_getProfileIdFromIP():
"""unit test for add_profile and getProfileIdFromIP"""
-
- # clear the database before running this test
- os.system("./slips.py -c slips.yaml -cc")
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
# add a profile
db.add_profile("profile_192.168.1.1", "00:00", "1")
@@ -70,6 +58,7 @@ def test_getProfileIdFromIP():
def test_timewindows():
"""unit tests for addNewTW , getLastTWforProfile and
getFirstTWforProfile"""
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
profileid = "profile_192.168.1.1"
# add a profile
db.add_profile(profileid, "00:00", "1")
@@ -81,24 +70,20 @@ def test_timewindows():
assert db.get_last_twid_of_profile(profileid) == ("timewindow2", 3700.0)
-def getSlipsInternalTime():
- """return a random time for testing"""
- return 50.0
-
-
def test_add_ips():
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
# add a profile
db.add_profile(profileid, "00:00", "1")
# add a tw to that profile
db.add_new_tw(profileid, "timewindow1", 0.0)
# make sure ip is added
assert db.add_ips(profileid, twid, flow, "Server") is True
- hash_id = f"{profileid}_{twid}"
- stored_dstips = db.r.hget(hash_id, "SrcIPs")
- assert stored_dstips == '{"192.168.1.1": 1}'
+ stored_src_ips = db.r.hget(f"{profileid}_{twid}", "SrcIPs")
+ assert stored_src_ips == '{"192.168.1.1": 1}'
def test_add_port():
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
new_flow = flow
new_flow.state = "Not Established"
db.add_port(profileid, twid, flow, "Server", "Dst")
@@ -109,6 +94,7 @@ def test_add_port():
def test_set_evidence():
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
attacker: Attacker = Attacker(
direction=Direction.SRC, attacker_type=IoCType.IP, value=test_ip
)
@@ -141,6 +127,7 @@ def test_set_evidence():
def test_setInfoForDomains():
"""tests setInfoForDomains, setNewDomain and getDomainData"""
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
domain = "www.google.com"
domain_data = {"threatintelligence": "sample data"}
db.set_info_for_domains(domain, domain_data)
@@ -151,6 +138,7 @@ def test_setInfoForDomains():
def test_subscribe():
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
# invalid channel
assert db.subscribe("invalid_channel") is False
# valid channel, shoud return a pubsub object
@@ -159,6 +147,7 @@ def test_subscribe():
def test_profile_moddule_labels():
"""tests set and get_profile_module_label"""
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
module_label = "malicious"
module_name = "test"
db.set_profile_module_label(profileid, module_name, module_label)
@@ -168,6 +157,7 @@ def test_profile_moddule_labels():
def test_add_mac_addr_to_profile():
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
ipv4 = "192.168.1.5"
profileid_ipv4 = f"profile_{ipv4}"
mac_addr = "00:00:5e:00:53:af"
@@ -199,6 +189,7 @@ def test_add_mac_addr_to_profile():
def test_get_the_other_ip_version():
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
# profileid is ipv4
ipv6 = "2001:0db8:85a3:0000:0000:8a2e:0370:7334"
db.set_ipv6_of_profile(profileid, ipv6)
@@ -226,6 +217,7 @@ def test_get_the_other_ip_version():
],
)
def test_add_tuple(tupleid: str, symbol, expected_direction, role, flow):
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
db.add_tuple(profileid, twid, tupleid, symbol, role, flow)
assert symbol[0] in db.r.hget(
f"profile_{flow.saddr}_{twid}", expected_direction
@@ -243,6 +235,7 @@ def test_add_tuple(tupleid: str, symbol, expected_direction, role, flow):
def test_update_max_threat_level(
max_threat_level, cur_threat_level, expected_max
):
+ db = ModuleFactory().create_db_manager_obj(6379, flush_db=True)
db.set_max_threat_level(profileid, max_threat_level)
assert (
db.update_max_threat_level(profileid, cur_threat_level) == expected_max
diff --git a/tests/test_dns.py b/tests/test_dns.py
index 116a6b0b4..c7ca25fd7 100644
--- a/tests/test_dns.py
+++ b/tests/test_dns.py
@@ -26,10 +26,8 @@
("example.com", "NXDOMAIN", False),
],
)
-def test_should_detect_dns_without_conn(
- mock_db, domain, rcode_name, expected_result
-):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_should_detect_dns_without_conn(domain, rcode_name, expected_result):
+ dns = ModuleFactory().create_dns_analyzer_obj()
assert (
dns.should_detect_dns_without_conn(domain, rcode_name)
== expected_result
@@ -63,10 +61,10 @@ def test_should_detect_dns_without_conn(
],
)
def test_is_cname_contacted(
- mock_db, answers, cname_resolution, contacted_ips, expected_result
+ answers, cname_resolution, contacted_ips, expected_result
):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
- mock_db.get_domain_resolution.return_value = cname_resolution
+ dns = ModuleFactory().create_dns_analyzer_obj()
+ dns.db.get_domain_resolution.return_value = cname_resolution
assert dns.is_cname_contacted(answers, contacted_ips) is expected_result
@@ -85,11 +83,11 @@ def test_is_cname_contacted(
],
)
def test_detect_young_domains(
- mock_db, domain, answers, age, should_detect, expected_result
+ domain, answers, age, should_detect, expected_result
):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.should_detect_young_domain = Mock(return_value=should_detect)
- mock_db.get_domain_data.return_value = {"Age": age}
+ dns.db.get_domain_data.return_value = {"Age": age}
assert (
dns.detect_young_domains(
@@ -109,11 +107,11 @@ def test_detect_young_domains(
],
)
def test_detect_young_domains_other_cases(
- mock_db, domain, answers, domain_data, expected_result
+ domain, answers, domain_data, expected_result
):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.should_detect_young_domain = Mock(return_value=True)
- mock_db.get_domain_data.return_value = domain_data
+ dns.db.get_domain_data.return_value = domain_data
result = dns.detect_young_domains(
domain, answers, timestamp, profileid, twid, uid
@@ -121,11 +119,11 @@ def test_detect_young_domains_other_cases(
assert result is expected_result
dns.should_detect_young_domain.assert_called_once_with(domain)
- mock_db.get_domain_data.assert_called_once_with(domain)
+ dns.db.get_domain_data.assert_called_once_with(domain)
-def test_extract_ips_from_dns_answers(mock_db):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_extract_ips_from_dns_answers():
+ dns = ModuleFactory().create_dns_analyzer_obj()
answers = [
"192.168.1.1",
"2001:db8::1",
@@ -149,17 +147,11 @@ def test_extract_ips_from_dns_answers(mock_db):
],
)
def test_is_connection_made_by_different_version(
- mocker, mock_db, contacted_ips, other_ip, expected_result
+ mocker, contacted_ips, other_ip, expected_result
):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
- mocker.patch.object(
- dns.db,
- "get_all_contacted_ips_in_profileid_twid",
- return_value=contacted_ips,
- )
- mocker.patch.object(
- dns.db, "get_the_other_ip_version", return_value=other_ip
- )
+ dns = ModuleFactory().create_dns_analyzer_obj()
+ dns.db.get_all_contacted_ips_in_profileid_twid.return_value = contacted_ips
+ dns.db.get_the_other_ip_version.return_value = other_ip
assert (
dns.is_connection_made_by_different_version(profileid, twid, "8.8.8.8")
@@ -177,8 +169,8 @@ def test_is_connection_made_by_different_version(
("Hello world!", False),
],
)
-def test_estimate_shannon_entropy(mock_db, string, expected_result):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_estimate_shannon_entropy(string, expected_result):
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.shannon_entropy_threshold = 4.0
entropy = dns.estimate_shannon_entropy(string)
@@ -196,52 +188,43 @@ def test_estimate_shannon_entropy(mock_db, string, expected_result):
],
)
def test_check_invalid_dns_answers_call_counts(
- mocker,
- mock_db,
domain,
answers,
expected_evidence_calls,
expected_db_deletes,
):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
-
- mock_set_evidence = mocker.patch.object(
- dns.set_evidence, "invalid_dns_answer"
- )
- mock_delete_dns_resolution = mocker.patch.object(
- mock_db, "delete_dns_resolution"
- )
-
- profileid, twid, timestamp, uid = ("profile1", "tw1", 1234567890, "uid1")
-
+ dns = ModuleFactory().create_dns_analyzer_obj()
+ profileid, twid, timestamp, uid = (
+ "profileid_1.1.1.1",
+ "timewindow1",
+ 1234567890,
+ "uid1",
+ )
+ dns.set_evidence.invalid_dns_answer = Mock()
dns.check_invalid_dns_answers(
domain, answers, profileid, twid, timestamp, uid
)
- assert mock_set_evidence.call_count == expected_evidence_calls
- assert mock_delete_dns_resolution.call_count == expected_db_deletes
-
-
-def test_check_invalid_dns_answers_with_invalid_answer(mocker, mock_db):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
-
- mock_set_evidence = mocker.patch.object(
- dns.set_evidence, "invalid_dns_answer"
- )
- mock_delete_dns_resolution = mocker.patch.object(
- mock_db, "delete_dns_resolution"
+ assert (
+ dns.set_evidence.invalid_dns_answer.call_count
+ == expected_evidence_calls
)
+ assert dns.db.delete_dns_resolution.call_count == expected_db_deletes
- domain, answers = "example.com", ["127.0.0.1"]
+def test_check_invalid_dns_answers_with_invalid_answer():
+ dns = ModuleFactory().create_dns_analyzer_obj()
+ dns.set_evidence.invalid_dns_answer = Mock()
+ dns.db.delete_dns_resolution = Mock()
+ domain, answers = "example.com", ["127.0.0.1"]
dns.check_invalid_dns_answers(
domain, answers, profileid, twid, timestamp, uid
)
- mock_set_evidence.assert_called_once_with(
+ dns.set_evidence.invalid_dns_answer.assert_called_once_with(
domain, answers[0], profileid, twid, timestamp, uid
)
- mock_delete_dns_resolution.assert_called_once_with(answers[0])
+ dns.db.delete_dns_resolution.assert_called_once_with(answers[0])
@pytest.mark.parametrize(
@@ -267,8 +250,8 @@ def test_check_invalid_dns_answers_with_invalid_answer(mocker, mock_db):
),
],
)
-def test_check_dns_arpa_scan(mock_db, domains, timestamps, expected_result):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_check_dns_arpa_scan(domains, timestamps, expected_result):
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.arpa_scan_threshold = 10
for i, (domain, ts) in enumerate(zip(domains, timestamps)):
@@ -288,22 +271,21 @@ def test_check_dns_arpa_scan(mock_db, domains, timestamps, expected_result):
("192.168.1.100", Exception("DNS timeout error"), False),
],
)
-def test_is_dns_server(
- mock_db, test_ip, mock_query_side_effect, expected_result
-):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_is_dns_server(test_ip, mock_query_side_effect, expected_result):
+ dns = ModuleFactory().create_dns_analyzer_obj()
with patch("dns.query.udp", side_effect=mock_query_side_effect):
result = dns.is_dns_server(test_ip)
assert result == expected_result
-def test_read_configuration(mock_db):
+def test_read_configuration():
"""Test if read_configuration correctly reads the entropy threshold."""
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+ dns = ModuleFactory().create_dns_analyzer_obj()
with patch(
- "slips_files.common.parsers.config_parser.ConfigParser.get_entropy_threshold",
+ "slips_files.common.parsers.config_parser.ConfigParser."
+ "get_entropy_threshold",
return_value=3.5,
):
dns.read_configuration()
@@ -311,28 +293,23 @@ def test_read_configuration(mock_db):
assert dns.shannon_entropy_threshold == 3.5
-def test_check_high_entropy_dns_answers_with_call(mocker, mock_db):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_check_high_entropy_dns_answers_with_call():
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.shannon_entropy_threshold = 4.0
domain = "example.com"
answers = ["A 1.2.3.4", "TXT abcdefghijklmnopqrstuvwxyz1234567890"]
expected_entropy = 4.5
+ dns.estimate_shannon_entropy = Mock()
+ dns.estimate_shannon_entropy.return_value = expected_entropy
- mock_estimate_entropy = mocker.patch.object(
- dns, "estimate_shannon_entropy", return_value=expected_entropy
- )
-
- mock_set_evidence = mocker.patch.object(
- dns.set_evidence, "suspicious_dns_answer"
- )
+ dns.set_evidence.suspicious_dns_answer = Mock()
dns.check_high_entropy_dns_answers(
domain, answers, daddr, profileid, twid, timestamp, uid
)
- assert mock_set_evidence.call_count == 1
- mock_set_evidence.assert_called_once_with(
+ dns.set_evidence.suspicious_dns_answer.assert_called_once_with(
domain,
answers[1],
expected_entropy,
@@ -342,7 +319,7 @@ def test_check_high_entropy_dns_answers_with_call(mocker, mock_db):
timestamp,
uid,
)
- assert mock_estimate_entropy.call_count == 1
+ assert dns.estimate_shannon_entropy.call_count == 1
@pytest.mark.parametrize(
@@ -363,26 +340,21 @@ def test_check_high_entropy_dns_answers_with_call(mocker, mock_db):
],
)
def test_check_high_entropy_dns_answers_no_call(
- mocker, mock_db, domain, answers, expected_entropy
+ domain, answers, expected_entropy
):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.shannon_entropy_threshold = 4.0
-
- mock_estimate_entropy = mocker.patch.object(
- dns, "estimate_shannon_entropy", return_value=expected_entropy
- )
-
- mock_set_evidence = mocker.patch.object(
- dns.set_evidence, "suspicious_dns_answer"
- )
+ dns.estimate_shannon_entropy = Mock()
+ dns.estimate_shannon_entropy.return_value = expected_entropy
+ dns.set_evidence.suspicious_dns_answer = Mock()
dns.check_high_entropy_dns_answers(
domain, answers, daddr, profileid, twid, timestamp, uid
)
- assert mock_set_evidence.call_count == 0
+ assert dns.set_evidence.suspicious_dns_answer.call_count == 0
expected_estimate_calls = sum("TXT" in answer for answer in answers)
- assert mock_estimate_entropy.call_count == expected_estimate_calls
+ assert dns.estimate_shannon_entropy.call_count == expected_estimate_calls
@pytest.mark.parametrize(
@@ -441,54 +413,37 @@ def test_check_high_entropy_dns_answers_no_call(
),
],
)
-def test_analyze(mocker, mock_db, test_case, expected_calls):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
-
- mock_check_dns_without_connection = mocker.patch.object(
- dns, "check_dns_without_connection"
- )
- mock_check_high_entropy_dns_answers = mocker.patch.object(
- dns, "check_high_entropy_dns_answers"
- )
- mock_check_invalid_dns_answers = mocker.patch.object(
- dns, "check_invalid_dns_answers"
- )
- mock_detect_dga = mocker.patch.object(dns, "detect_dga")
- mock_detect_young_domains = mocker.patch.object(
- dns, "detect_young_domains"
- )
- mock_check_dns_arpa_scan = mocker.patch.object(dns, "check_dns_arpa_scan")
-
- dns.flowalerts = Mock(
- get_msg=Mock(
- side_effect=[
- {"data": test_case["data"]},
- {"data": test_case.get("data2", False)},
- ]
- )
- )
-
- dns.analyze()
+def test_analyze_new_flow_msg(mocker, test_case, expected_calls):
+ dns = ModuleFactory().create_dns_analyzer_obj()
+ dns.connections_checked_in_dns_conn_timer_thread = []
+ dns.check_dns_without_connection = Mock()
+ dns.check_high_entropy_dns_answers = Mock()
+ dns.check_invalid_dns_answers = Mock()
+ dns.detect_dga = Mock()
+ dns.detect_young_domains = Mock()
+ dns.check_dns_arpa_scan = Mock()
+
+ dns.analyze({"channel": "new_dns", "data": test_case["data"]})
assert (
- mock_check_dns_without_connection.call_count
+ dns.check_dns_without_connection.call_count
== expected_calls["check_dns_without_connection"]
)
assert (
- mock_check_high_entropy_dns_answers.call_count
+ dns.check_high_entropy_dns_answers.call_count
== expected_calls["check_high_entropy_dns_answers"]
)
assert (
- mock_check_invalid_dns_answers.call_count
+ dns.check_invalid_dns_answers.call_count
== expected_calls["check_invalid_dns_answers"]
)
- assert mock_detect_dga.call_count == expected_calls["detect_dga"]
+ assert dns.detect_dga.call_count == expected_calls["detect_dga"]
assert (
- mock_detect_young_domains.call_count
+ dns.detect_young_domains.call_count
== expected_calls["detect_young_domains"]
)
assert (
- mock_check_dns_arpa_scan.call_count
+ dns.check_dns_arpa_scan.call_count
== expected_calls["check_dns_arpa_scan"]
)
@@ -547,34 +502,31 @@ def test_analyze(mocker, mock_db, test_case, expected_calls):
],
)
def test_detect_dga_no_alert(
- mocker,
- mock_db,
rcode_name,
query,
initial_nxdomains,
expected_nxdomains,
expected_result,
):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.nxdomains = initial_nxdomains
dns.nxdomains_threshold = 10
- mocker.patch.object(
- dns.flowalerts.whitelist.domain_analyzer,
- "is_whitelisted",
- return_value=False,
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted = Mock()
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted.return_value = (
+ False
)
- mock_set_evidence = mocker.patch.object(dns.set_evidence, "dga")
+ dns.set_evidence.dga = Mock()
result = dns.detect_dga(rcode_name, query, timestamp, profileid, twid, uid)
assert result == expected_result
assert dns.nxdomains == expected_nxdomains
- mock_set_evidence.assert_not_called()
+ dns.set_evidence.dga.assert_not_called()
-def test_detect_dga_alert(mocker, mock_db):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_detect_dga_alert(mocker):
+ dns = ModuleFactory().create_dns_analyzer_obj()
initial_nxdomains = {
f"{profileid}_{twid}": (
@@ -595,12 +547,12 @@ def test_detect_dga_alert(mocker, mock_db):
dns.nxdomains = initial_nxdomains
dns.nxdomains_threshold = 10
- mocker.patch.object(
- dns.flowalerts.whitelist.domain_analyzer,
- "is_whitelisted",
- return_value=False,
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted = Mock()
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted.return_value = (
+ False
)
- mock_set_evidence = mocker.patch.object(dns.set_evidence, "dga")
+
+ dns.set_evidence.dga = Mock()
result = dns.detect_dga(
"NXDOMAIN", "example10.com", timestamp, profileid, twid, uid
@@ -608,22 +560,20 @@ def test_detect_dga_alert(mocker, mock_db):
expected_result = True
assert result == expected_result
assert dns.nxdomains == {f"{profileid}_{twid}": ([], [])}
- mock_set_evidence.assert_called_once_with(
+ dns.set_evidence.dga.assert_called_once_with(
10, timestamp, profileid, twid, [uid] * 10
)
-def test_detect_dga_whitelisted(mocker, mock_db):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_detect_dga_whitelisted(mocker):
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.nxdomains = {}
dns.nxdomains_threshold = 10
- mocker.patch.object(
- dns.flowalerts.whitelist.domain_analyzer,
- "is_whitelisted",
- return_value=True,
- )
- mock_set_evidence = mocker.patch.object(dns.set_evidence, "dga")
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted = Mock()
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted.return_value = True
+
+ dns.set_evidence.dga = Mock()
result = dns.detect_dga(
"NXDOMAIN", "example.com", timestamp, profileid, twid, uid
@@ -632,7 +582,7 @@ def test_detect_dga_whitelisted(mocker, mock_db):
expected_result = False
assert result == expected_result
assert dns.nxdomains == {}
- mock_set_evidence.assert_not_called()
+ dns.set_evidence.dga.assert_not_called()
@pytest.mark.parametrize(
@@ -644,20 +594,20 @@ def test_detect_dga_whitelisted(mocker, mock_db):
],
ids=["arpa_domain", "local_domain"],
)
-def test_detect_dga_special_domains(mocker, mock_db, query, expected_result):
- dns = ModuleFactory().create_dns_analyzer_obj(mock_db)
+def test_detect_dga_special_domains(mocker, query, expected_result):
+ dns = ModuleFactory().create_dns_analyzer_obj()
dns.nxdomains = {}
dns.nxdomains_threshold = 10
- mocker.patch.object(
- dns.flowalerts.whitelist.domain_analyzer,
- "is_whitelisted",
- return_value=False,
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted = Mock()
+ dns.flowalerts.whitelist.domain_analyzer.is_whitelisted.return_value = (
+ False
)
- mock_set_evidence = mocker.patch.object(dns.set_evidence, "dga")
+
+ dns.set_evidence.dga = Mock()
result = dns.detect_dga("NXDOMAIN", query, timestamp, profileid, twid, uid)
assert result == expected_result
assert dns.nxdomains == {}
- mock_set_evidence.assert_not_called()
+ dns.set_evidence.dga.assert_not_called()
diff --git a/tests/test_downloaded_file.py b/tests/test_downloaded_file.py
index bfd8ff379..1ad0954e1 100644
--- a/tests/test_downloaded_file.py
+++ b/tests/test_downloaded_file.py
@@ -1,8 +1,9 @@
"""Unit test for modules/flowalerts/download_file.py"""
+from unittest.mock import Mock
+
from tests.module_factory import ModuleFactory
import json
-from unittest.mock import MagicMock
import pytest
@@ -47,19 +48,18 @@
),
],
)
-def test_check_malicious_ssl(
- mocker, mock_db, ssl_info, db_result, expected_call_count
-):
- downloadfile = ModuleFactory().create_downloaded_file_analyzer_obj(mock_db)
- mock_set_evidence = mocker.patch.object(
- downloadfile.set_evidence, "malicious_ssl"
- )
+def test_check_malicious_ssl(mocker, ssl_info, db_result, expected_call_count):
+ downloadfile = ModuleFactory().create_downloaded_file_analyzer_obj()
+ downloadfile.set_evidence.malicious_ssl = Mock()
- mock_db.get_ssl_info.return_value = db_result
+ downloadfile.db.get_ssl_info.return_value = db_result
downloadfile.check_malicious_ssl(ssl_info)
- assert mock_set_evidence.call_count == expected_call_count
- mock_set_evidence.assert_has_calls(
+ assert (
+ downloadfile.set_evidence.malicious_ssl.call_count
+ == expected_call_count
+ )
+ downloadfile.set_evidence.malicious_ssl.assert_has_calls(
[mocker.call(ssl_info, db_result)] * expected_call_count
)
@@ -87,28 +87,21 @@ def test_check_malicious_ssl(
({"data": json.dumps({"type": "not_zeek", "flow": {}})}, 1),
],
)
-def test_analyze_with_data(mocker, mock_db, msg, expected_call_count):
- mock_flowalerts = MagicMock()
- mock_flowalerts.get_msg.return_value = msg
- downloadfile = ModuleFactory().create_downloaded_file_analyzer_obj(mock_db)
- downloadfile.flowalerts = mock_flowalerts
- mock_check_malicious_ssl = mocker.patch.object(
- downloadfile, "check_malicious_ssl"
- )
- downloadfile.analyze()
- mock_flowalerts.get_msg.assert_called_once_with("new_downloaded_file")
- assert mock_check_malicious_ssl.call_count == expected_call_count
- mock_check_malicious_ssl.assert_called_with(json.loads(msg["data"]))
+def test_analyze_with_data(msg, expected_call_count):
+ downloadfile = ModuleFactory().create_downloaded_file_analyzer_obj()
+ downloadfile.check_malicious_ssl = Mock()
+ msg.update({"channel": "new_downloaded_file"})
+ downloadfile.analyze(msg)
-def test_analyze_no_msg(mocker, mock_db):
- mock_flowalerts = MagicMock()
- mock_flowalerts.get_msg.return_value = None
- downloadfile = ModuleFactory().create_downloaded_file_analyzer_obj(mock_db)
- downloadfile.flowalerts = mock_flowalerts
- mock_check_malicious_ssl = mocker.patch.object(
- downloadfile, "check_malicious_ssl"
+ assert downloadfile.check_malicious_ssl.call_count == expected_call_count
+ downloadfile.check_malicious_ssl.assert_called_with(
+ json.loads(msg["data"])
)
- downloadfile.analyze()
- (mock_flowalerts.get_msg.assert_called_once_with("new_downloaded_file"))
- mock_check_malicious_ssl.assert_not_called()
+
+
+def test_analyze_no_msg(mocker):
+ downloadfile = ModuleFactory().create_downloaded_file_analyzer_obj()
+ downloadfile.check_malicious_ssl = Mock()
+ downloadfile.analyze({})
+ downloadfile.check_malicious_ssl.assert_not_called()
diff --git a/tests/test_evidence.py b/tests/test_evidence.py
index 7a9515e44..9fd7cf4a0 100644
--- a/tests/test_evidence.py
+++ b/tests/test_evidence.py
@@ -1,4 +1,3 @@
-from uuid import uuid4
from tests.module_factory import ModuleFactory
import pytest
from slips_files.core.evidence_structure.evidence import validate_timestamp
@@ -18,7 +17,6 @@
Recon,
Attempt,
evidence_to_dict,
- dict_to_evidence,
)
@@ -42,7 +40,7 @@
"tcp",
80,
Tag.RECON,
- str(uuid4()),
+ "d4afbe1a-1cb9-4db4-9fac-74f2da6f5f34",
10,
0.8,
),
@@ -61,7 +59,7 @@
"udp",
53,
Tag.RECON,
- str(uuid4()),
+ "d243119b-2aae-4d7a-8ea1-edf3c6e72f4a",
5,
0.5,
),
@@ -73,11 +71,11 @@ def test_evidence_post_init(
attacker_value,
threat_level,
category,
+ victim_value,
profile_ip,
timewindow_number,
uid,
timestamp,
- victim_value,
proto_value,
port,
source_target_tag,
@@ -97,22 +95,22 @@ def test_evidence_post_init(
)
proto = ModuleFactory().create_proto_obj()[proto_value.upper()]
evidence = ModuleFactory().create_evidence_obj(
- evidence_type=evidence_type,
- description=description,
- attacker=attacker,
- threat_level=threat_level,
- category=category,
- victim=victim,
- profile=profile,
- timewindow=timewindow,
- uid=uid,
- timestamp=timestamp,
- proto=proto,
- port=port,
- source_target_tag=source_target_tag,
- id=id,
- conn_count=conn_count,
- confidence=confidence,
+ evidence_type,
+ description,
+ attacker,
+ threat_level,
+ category,
+ victim,
+ profile,
+ timewindow,
+ uid,
+ timestamp,
+ proto,
+ port,
+ source_target_tag,
+ id,
+ conn_count,
+ confidence,
)
assert evidence.evidence_type == evidence_type
assert evidence.description == description
@@ -183,7 +181,7 @@ def test_evidence_post_init_invalid_uid():
"tcp",
80,
Tag.RECON,
- str(uuid4()),
+ "d243119b-2aae-4d7a-8ea1-edf3c6e72f4a",
10,
0.8,
),
@@ -202,7 +200,7 @@ def test_evidence_post_init_invalid_uid():
"udp",
53,
Tag.RECON,
- str(uuid4()),
+ "d243119b-2aae-4d7a-8ea1-e4f3c6e72f4a",
5,
0.5,
),
@@ -221,7 +219,7 @@ def test_evidence_post_init_invalid_uid():
"icmp",
0,
Tag.MALWARE,
- str(uuid4()),
+ "d243119b-2aae-4d7a-8ea1-eef3c6e72f4a",
1000,
1.0,
),
diff --git a/tests/test_flow_handler.py b/tests/test_flow_handler.py
index e26a51e31..3be53df00 100644
--- a/tests/test_flow_handler.py
+++ b/tests/test_flow_handler.py
@@ -2,15 +2,14 @@
import pytest
from unittest.mock import Mock, call
-from slips_files.core.helpers.flow_handler import FlowHandler
from slips_files.core.flows.zeek import DHCP
import json
from dataclasses import asdict
-def test_is_supported_flow_not_ts(flow, mock_db):
+def test_is_supported_flow_not_ts(flow):
flow.starttime = None
- flow_handler = ModuleFactory().create_flow_handler_obj(flow, mock_db)
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
assert flow_handler.is_supported_flow() is False
@@ -23,99 +22,84 @@ def test_is_supported_flow_not_ts(flow, mock_db):
],
)
def test_is_supported_flow_without_ts(
- flow_type: str, expected_val: bool, flow, mock_db
+ flow_type: str, expected_val: bool, flow
):
# just change the flow_type
flow.type_ = flow_type
- flow_handler = ModuleFactory().create_flow_handler_obj(flow, mock_db)
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
assert flow_handler.is_supported_flow() == expected_val
# testing handle_dns
def test_handle_dns():
- mock_db = Mock()
flow = Mock()
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.twid = "timewindow_id"
- flow_handler.profileid = "profile_id"
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_dns()
- mock_db.add_out_dns.assert_called_with(
+ flow_handler.db.add_out_dns.assert_called_with(
flow_handler.profileid, flow_handler.twid, flow
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_ftp
def test_handle_ftp():
- mock_db = Mock()
flow = Mock()
flow.used_port = 21 # Assuming FTP typically uses port 21
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_ftp()
- mock_db.set_ftp_port.assert_called_with(21)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.set_ftp_port.assert_called_with(21)
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_http
def test_handle_http():
- mock_db = Mock()
flow = Mock()
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_http()
- mock_db.add_out_http.assert_called_with(
+ flow_handler.db.add_out_http.assert_called_with(
flow_handler.profileid, flow_handler.twid, flow
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_ssl
-def test_handle_ssl(flow, mock_db):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_ssl(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_ssl()
- mock_db.add_out_ssl.assert_called_with(
+ flow_handler.db.add_out_ssl.assert_called_with(
flow_handler.profileid, flow_handler.twid, flow
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_ssh
-def test_handle_ssh(flow, mock_db):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_ssh(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_ssh()
- mock_db.add_out_ssh.assert_called_with(
+ flow_handler.db.add_out_ssh.assert_called_with(
flow_handler.profileid, flow_handler.twid, flow
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_weird
-def test_handle_weird(flow, mock_db):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_weird(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_weird()
expected_payload = {
@@ -123,19 +107,18 @@ def test_handle_weird(flow, mock_db):
"twid": flow_handler.twid,
"flow": asdict(flow),
}
- mock_db.publish.assert_called_with(
+ flow_handler.db.publish.assert_called_with(
"new_weird", json.dumps(expected_payload)
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_tunnel
-def test_handle_tunnel(flow, mock_db):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_tunnel(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
+
flow_handler.handle_tunnel()
expected_payload = {
@@ -143,30 +126,28 @@ def test_handle_tunnel(flow, mock_db):
"twid": flow_handler.twid,
"flow": asdict(flow),
}
- mock_db.publish.assert_called_with(
+ flow_handler.db.publish.assert_called_with(
"new_tunnel", json.dumps(expected_payload)
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_conn
-def test_handle_conn(flow, mock_db, mocker):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_conn(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow.daddr = "192.168.1.1"
flow.dport = 80
flow.proto = "tcp"
- mock_symbol = mocker.Mock()
+ mock_symbol = Mock()
mock_symbol.compute.return_value = ("A", "B", "C")
flow_handler.symbol = mock_symbol
flow_handler.handle_conn()
- mock_db.add_tuple.assert_called_with(
+ flow_handler.db.add_tuple.assert_called_with(
flow_handler.profileid,
flow_handler.twid,
"192.168.1.1-80-tcp",
@@ -174,10 +155,10 @@ def test_handle_conn(flow, mock_db, mocker):
"Client",
flow,
)
- mock_db.add_ips.assert_called_with(
+ flow_handler.db.add_ips.assert_called_with(
flow_handler.profileid, flow_handler.twid, flow, "Client"
)
- mock_db.add_port.assert_has_calls(
+ flow_handler.db.add_port.assert_has_calls(
[
call(
flow_handler.profileid,
@@ -195,10 +176,10 @@ def test_handle_conn(flow, mock_db, mocker):
),
]
)
- mock_db.add_flow.assert_called_with(
+ flow_handler.db.add_flow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
- mock_db.add_mac_addr_to_profile.assert_called_with(
+ flow_handler.db.add_mac_addr_to_profile.assert_called_with(
flow_handler.profileid, flow.smac
)
if not flow_handler.running_non_stop:
@@ -208,10 +189,8 @@ def test_handle_conn(flow, mock_db, mocker):
# testing handle_files
-def test_handle_files(flow, mock_db):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_files(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_files()
@@ -221,27 +200,22 @@ def test_handle_files(flow, mock_db):
"profileid": flow_handler.profileid,
"twid": flow_handler.twid,
}
- mock_db.publish.assert_called_with(
+ flow_handler.db.publish.assert_called_with(
"new_downloaded_file", json.dumps(expected_payload)
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_arp
-def test_handle_arp(flow, mock_db, mocker):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_arp(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow.dmac = "aa:bb:cc:dd:ee:ff"
flow.smac = "ff:ee:dd:cc:bb:aa"
flow.daddr = "192.168.1.1"
flow.saddr = "192.168.1.2"
-
- mock_publisher = mocker.Mock()
- flow_handler.publisher = mock_publisher
-
+ flow_handler.publisher = Mock()
flow_handler.handle_arp()
expected_payload = {
@@ -249,24 +223,23 @@ def test_handle_arp(flow, mock_db, mocker):
"profileid": flow_handler.profileid,
"twid": flow_handler.twid,
}
- mock_db.publish.assert_called_with("new_arp", json.dumps(expected_payload))
- mock_db.add_mac_addr_to_profile.assert_called_with(
+ flow_handler.db.publish.assert_called_with(
+ "new_arp", json.dumps(expected_payload)
+ )
+ flow_handler.db.add_mac_addr_to_profile.assert_called_with(
flow_handler.profileid, flow.smac
)
- mock_publisher.new_MAC.assert_has_calls(
+ flow_handler.publisher.new_MAC.assert_has_calls(
[call(flow.dmac, flow.daddr), call(flow.smac, flow.saddr)]
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_smtp
-def test_handle_smtp(flow, mock_db):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
-
+def test_handle_smtp(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
flow_handler.handle_smtp()
expected_payload = {
@@ -274,57 +247,52 @@ def test_handle_smtp(flow, mock_db):
"profileid": flow_handler.profileid,
"twid": flow_handler.twid,
}
- mock_db.publish.assert_called_with(
+ flow_handler.db.publish.assert_called_with(
"new_smtp", json.dumps(expected_payload)
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_software
-def test_handle_software(flow, mock_db, mocker):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
-
- mock_publisher = mocker.Mock()
- flow_handler.publisher = mock_publisher
+def test_handle_software(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
+ flow_handler.publisher = Mock()
flow_handler.handle_software()
- mock_db.add_software_to_profile.assert_called_with(
+ flow_handler.db.add_software_to_profile.assert_called_with(
flow_handler.profileid, flow
)
- mock_publisher.new_software.assert_called_with(
+ flow_handler.publisher.new_software.assert_called_with(
flow_handler.profileid, flow
)
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_notice
-def test_handle_notice(flow, mock_db):
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
+def test_handle_notice(flow):
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
+
flow.note = "Gateway_addr_identified: 192.168.1.1"
flow.msg = "Gateway_addr_identified: 192.168.1.1"
flow_handler.handle_notice()
- mock_db.add_out_notice.assert_called_with(
+ flow_handler.db.add_out_notice.assert_called_with(
flow_handler.profileid, flow_handler.twid, flow
)
- mock_db.set_default_gateway.assert_called_with("IP", "192.168.1.1")
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.set_default_gateway.assert_called_with("IP", "192.168.1.1")
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
# testing handle_dhcp
-def test_handle_dhcp(mock_db, mocker):
+def test_handle_dhcp():
flow = DHCP(
starttime=1234567890,
uids=["uid1", "uid2", "uid3"],
@@ -336,25 +304,24 @@ def test_handle_dhcp(mock_db, mocker):
host_name="test-host",
requested_addr="192.168.1.4",
)
- flow_handler = FlowHandler(mock_db, None, flow)
- flow_handler.profileid = "profile_id"
- flow_handler.twid = "timewindow_id"
-
- mock_publisher = mocker.Mock()
- flow_handler.publisher = mock_publisher
-
+ flow_handler = ModuleFactory().create_flow_handler_obj(flow)
+ flow_handler.publisher = Mock()
flow_handler.handle_dhcp()
- mock_publisher.new_MAC.assert_called_with(flow.smac, flow.saddr)
- mock_db.add_mac_addr_to_profile.assert_called_with(
+ flow_handler.publisher.new_MAC.assert_called_with(flow.smac, flow.saddr)
+ flow_handler.db.add_mac_addr_to_profile.assert_called_with(
flow_handler.profileid, flow.smac
)
- mock_db.store_dhcp_server.assert_called_with("192.168.1.1")
- mock_db.mark_profile_as_dhcp.assert_called_with(flow_handler.profileid)
- mock_publisher.new_dhcp.assert_called_with(flow_handler.profileid, flow)
+ flow_handler.db.store_dhcp_server.assert_called_with("192.168.1.1")
+ flow_handler.db.mark_profile_as_dhcp.assert_called_with(
+ flow_handler.profileid
+ )
+ flow_handler.publisher.new_dhcp.assert_called_with(
+ flow_handler.profileid, flow
+ )
for uid in flow.uids:
flow.uid = uid
- mock_db.add_altflow.assert_called_with(
+ flow_handler.db.add_altflow.assert_called_with(
flow, flow_handler.profileid, flow_handler.twid, "benign"
)
diff --git a/tests/test_flowalerts.py b/tests/test_flowalerts.py
deleted file mode 100644
index 93903bb89..000000000
--- a/tests/test_flowalerts.py
+++ /dev/null
@@ -1,193 +0,0 @@
-"""Unit test for modules/flowalerts/flowalerts.py"""
-
-from unittest.mock import Mock
-
-from tests.module_factory import ModuleFactory
-import json
-from numpy import arange
-
-# dummy params used for testing
-profileid = "profile_192.168.1.1"
-twid = "timewindow1"
-uid = "CAeDWs37BipkfP21u8"
-timestamp = 1635765895.037696
-saddr = "192.168.1.1"
-daddr = "192.168.1.2"
-dst_profileid = f"profile_{daddr}"
-
-
-def test_port_belongs_to_an_org(mock_db):
- flowalerts = ModuleFactory().create_conn_analyzer_obj(mock_db)
-
- # belongs to apple
- portproto = "65509/tcp"
-
- # mock the db response to say that the org of this port
- # is apple and the mac vendor of the
- # given profile is also apple
- mock_db.get_organization_of_port.return_value = json.dumps(
- {"ip": [], "org_name": "apple"}
- )
- mock_db.get_mac_vendor_from_profile.return_value = "apple"
-
- assert (
- flowalerts.port_belongs_to_an_org(daddr, portproto, profileid) is True
- )
-
- # doesn't belong to any org
- portproto = "78965/tcp"
- # expectations
- mock_db.get_organization_of_port.return_value = None
- assert (
- flowalerts.port_belongs_to_an_org(daddr, portproto, profileid) is False
- )
-
-
-def test_check_unknown_port(mocker, mock_db):
- flowalerts = ModuleFactory().create_conn_analyzer_obj(mock_db)
- # database.set_port_info('23/udp', 'telnet')
- mock_db.get_port_info.return_value = "telnet"
- # now we have info 23 udp
- assert (
- flowalerts.check_unknown_port(
- "23", "udp", daddr, profileid, twid, uid, timestamp, "Established"
- )
- is False
- )
-
- # test when the port is unknown
- mock_db.get_port_info.return_value = None
- mock_db.is_ftp_port.return_value = False
- # mock the flowalerts call to port_belongs_to_an_org
- flowalerts_mock = mocker.patch(
- "modules.flowalerts.flowalerts.Conn.port_belongs_to_an_org"
- )
- flowalerts_mock.return_value = False
-
- assert (
- flowalerts.check_unknown_port(
- "1337",
- "udp",
- daddr,
- profileid,
- twid,
- uid,
- timestamp,
- "Established",
- )
- is True
- )
-
-
-def test_check_if_resolution_was_made_by_different_version(mock_db):
- flowalerts = ModuleFactory().create_conn_analyzer_obj(mock_db)
-
- # now this ipv6 belongs to the same profileid, is supposed to be
- # the other version of the ipv4 of the used profileid
- mock_db.get_the_other_ip_version.return_value = json.dumps(
- "2001:0db8:85a3:0000:0000:8a2e:0370:7334"
- )
- # now the daddr given to check_if_resolution_was_made_by_different_version()
- # is supposed to be resolved by the ipv6 of the profile, not th eipv4
- mock_db.get_dns_resolution.return_value = {
- "resolved-by": "2001:0db8:85a3:0000:0000:8a2e:0370:7334"
- }
-
- # give flowalerts the ipv4 and the daddr, it should detect that it
- # was resolved by the othger versio
- assert (
- flowalerts.check_if_resolution_was_made_by_different_version(
- profileid, daddr
- )
- is True
- )
-
- # check the case when the resolution wasn't done by another IP
- mock_db.get_the_other_ip_version.return_value = json.dumps(
- "2001:0db8:85a3:0000:0000:8a2e:0370:7334"
- )
- mock_db.get_dns_resolution.return_value = {"resolved-by": []}
-
- assert (
- flowalerts.check_if_resolution_was_made_by_different_version(
- profileid, "2.3.4.5"
- )
- is False
- )
-
-
-def test_check_dns_arpa_scan(mock_db):
- flowalerts = ModuleFactory().create_dns_analyzer_obj(mock_db)
- # make 10 different arpa scans
- for ts in arange(0, 1, 1 / 10):
- is_arpa_scan = flowalerts.check_dns_arpa_scan(
- f"{ts}example.in-addr.arpa", timestamp + ts, profileid, twid, uid
- )
-
- assert is_arpa_scan is True
-
-
-def test_check_multiple_ssh_versions(mock_db):
- flowalerts = ModuleFactory().create_software_analyzer_obj(mock_db)
- # in the first flow, we only have 1 use ssh client
- # so no version incompatibility
- mock_db.get_software_from_profile.return_value = {
- "SSH::CLIENT": {
- "version-major": 8,
- "version-minor": 1,
- "uid": "YTYwNjBiMjIxZDkzOWYyYTc4",
- }
- }
-
- flow2 = {
- "starttime": 1632302619.444328,
- "uid": "M2VhNTA3ZmZiYjU3OGMxMzJk",
- "saddr": "192.168.1.247",
- "daddr": "192.168.1.50",
- "software": "SSH::CLIENT",
- "unparsed_version": "OpenSSH_9.1",
- "version_major": 9,
- "version_minor": 1,
- "type_": "software",
- }
-
- # in flow 2 slips should detect a client version change
- assert flowalerts.check_multiple_ssh_versions(flow2, "timewindow1") is True
-
-
-def test_detect_dga(mock_db):
- flowalerts = ModuleFactory().create_dns_analyzer_obj(mock_db)
- rcode_name = "NXDOMAIN"
- flowalerts.whitelist.domain_analyzer.is_whitelisted = Mock()
- flowalerts.whitelist.domain_analyzer.is_whitelisted.return_value = False
-
- for i in range(10):
- dga_detected = flowalerts.detect_dga(
- rcode_name,
- f"example{i}.com",
- timestamp,
- profileid,
- twid,
- uid,
- )
- assert dga_detected is True
-
-
-def test_detect_young_domains(mock_db):
- flowalerts = ModuleFactory().create_dns_analyzer_obj(mock_db)
- domain = "example.com"
- answers = ["192.168.1.1", "192.168.1.2", "192.168.1.3", "CNAME_HERE.com"]
-
- # age in days
- mock_db.get_domain_data.return_value = {"Age": 50}
- assert flowalerts.detect_young_domains(
- domain, answers, timestamp, profileid, twid, uid
- )
-
- # more than the age threshold
- mock_db.get_domain_data.return_value = {"Age": 1000}
- assert not (
- flowalerts.detect_young_domains(
- domain, answers, timestamp, profileid, twid, uid
- )
- )
diff --git a/tests/test_go_director.py b/tests/test_go_director.py
new file mode 100644
index 000000000..22f6625c8
--- /dev/null
+++ b/tests/test_go_director.py
@@ -0,0 +1,611 @@
+from unittest.mock import Mock, patch
+import pytest
+from tests.module_factory import ModuleFactory
+import tempfile
+import os
+
+
+@pytest.mark.parametrize(
+ "data_dict, expected_method, expected_args",
+ [
+ # Test case 1: Handling a valid peer update message
+ (
+ {
+ "message_type": "peer_update",
+ "message_contents": {
+ "peerid": "test_peer",
+ "ip": "192.168.1.1",
+ "reliability": 0.8,
+ },
+ },
+ "process_go_update",
+ [{"peerid": "test_peer", "ip": "192.168.1.1", "reliability": 0.8}],
+ ),
+ # Test case 2: Handling a valid go_data message
+ (
+ {
+ "message_type": "go_data",
+ "message_contents": {
+ "reporter": "test_reporter",
+ "report_time": 1649445643,
+ "message": "eyJtZXNzYWdlX3R5cGUiOiJyZXBvcnQiLCJrZXkiOiIxOTIuMTY4LjE"
+ "uMSIsImtleV90eXBlIjoiaXAiLCJldmFsdWF0aW9uX3R5cGUiOiJzY29y"
+ "ZV9jb25maWRlbmNlIiwiZXZhbHVhdGlvbiI6eyJzY29yZSI6MC41LCJjb2"
+ "5maWRlbmNlIjowLjh9fQ==",
+ },
+ },
+ "process_go_data",
+ [
+ {
+ "reporter": "test_reporter",
+ "report_time": 1649445643,
+ "message": "eyJtZXNzYWdlX3R5cGUiOiJyZXBvcnQiLCJrZXkiOiIxOTIuMTY4LjEuM"
+ "SIsImtleV90eXBlIjoiaXAiLCJldmFsdWF0aW9uX3R5cGUiOiJzY29yZV9jb"
+ "25maWRlbmNlIiwiZXZhbHVhdGlvbiI6eyJzY29yZSI6MC41LCJjb25maWRlbm"
+ "NlIjowLjh9fQ==",
+ }
+ ],
+ ),
+ ],
+)
+def test_handle_gopy_data(data_dict, expected_method, expected_args):
+ go_director = ModuleFactory().create_go_director_obj()
+
+ with patch.object(go_director, expected_method) as mock_method:
+ go_director.handle_gopy_data(data_dict)
+ mock_method.assert_called_once_with(*expected_args)
+
+
+@pytest.mark.parametrize(
+ "data_dict, expected_print_args",
+ [
+ # Test case 1: Handling missing keys in the message
+ (
+ {"invalid_key": "value"},
+ (
+ "Json from the pigeon: {'invalid_key': 'value'} doesn't contain expected values "
+ "message_type or message_contents",
+ 0,
+ 1,
+ ),
+ ),
+ # Test case 2: Handling an invalid message type
+ (
+ {"message_type": "invalid_type", "message_contents": {}},
+ ("Invalid command: invalid_type", 0, 2),
+ ),
+ ],
+)
+def test_handle_gopy_data_error_cases(data_dict, expected_print_args):
+ go_director = ModuleFactory().create_go_director_obj()
+
+ go_director.handle_gopy_data(data_dict)
+ go_director.print.assert_called_once_with(*expected_print_args)
+
+
+@pytest.mark.parametrize(
+ "report, expected_method, expected_args",
+ [
+ # Test case 1: Handling a valid report message
+ (
+ {
+ "reporter": "test_reporter",
+ "report_time": 1649445643,
+ "message": "eyJtZXNzYWdlX3R5cGUiOiJyZXBvcnQiLCJrZXkiOiIxOTIuMT"
+ "Y4LjEuMSIsImtleV90eXBlIjoiaXAiLCJldmFsdWF0aW9uX3R5cG"
+ "UiOiJzY29yZV9jb25maWRlbmNlIiwiZXZhbHVhdGlvbiI6eyJzY29yZ"
+ "SI6MC41LCJjb25maWRlbmNlIjowLjh9fQ==",
+ },
+ "process_message_report",
+ [
+ "test_reporter",
+ 1649445643,
+ {
+ "message_type": "report",
+ "key": "192.168.1.1",
+ "key_type": "ip",
+ "evaluation_type": "score_confidence",
+ "evaluation": {"score": 0.5, "confidence": 0.8},
+ },
+ ],
+ ),
+ # Test case 2: Handling a valid request message
+ (
+ {
+ "reporter": "test_reporter",
+ "report_time": 1649445643,
+ "message": "eyJtZXNzYWdlX3R5cGUiOiJyZXF1ZXN0IiwiZXZhbHVhdGl"
+ "vbl90eXBlIjoic2NvcmVfY29uZmlkZW5jZSIsImtleV90eXBlIjoiaXA"
+ "iLCJrZXkiOiIxOTIuMTY4LjEuMSJ9",
+ },
+ "process_message_request",
+ [
+ "test_reporter",
+ 1649445643,
+ {
+ "message_type": "request",
+ "evaluation_type": "score_confidence",
+ "key_type": "ip",
+ "key": "192.168.1.1",
+ },
+ ],
+ ),
+ ],
+)
+def test_process_go_data(report, expected_method, expected_args):
+ go_director = ModuleFactory().create_go_director_obj()
+
+ with patch.object(go_director, expected_method) as mock_method:
+ go_director.process_go_data(report)
+ mock_method.assert_called_once_with(*expected_args)
+
+
+@pytest.mark.parametrize(
+ "message, expected_message_type, expected_data",
+ [
+ # Test case 1: Valid base64 encoded JSON message
+ (
+ "eyJtZXNzYWdlX3R5cGUiOiJyZXBvcnQiLCJrZXkiOiIxOTIuMTY4LjEuMSIsImtleV90eXBlIjoiaXAiLCJldmFsd"
+ "WF0aW9uX3R5cGUiOiJzY29yZV9jb25maWRlbmNlIiwiZXZhbHVhdGlvbiI6ey"
+ "JzY29yZSI6MC41LCJjb25maWRlbmNlIjowLjh9fQ==",
+ "report",
+ {
+ "message_type": "report",
+ "key": "192.168.1.1",
+ "key_type": "ip",
+ "evaluation_type": "score_confidence",
+ "evaluation": {"score": 0.5, "confidence": 0.8},
+ },
+ ),
+ # Test case 2: Invalid base64 encoded message
+ ("invalid_base64_string", "", {}),
+ # Test case 3: Valid base64 but invalid JSON message
+ ("eyJpbmZvIjoiYmFkIGpzb24ifQ==", "", {}),
+ # Test case 4: Valid base64 and JSON but missing 'message_type' key
+ ("eyJrZXkiOiIxOTIuMTY4LjEuMSJ9", "", {}),
+ ],
+)
+def test_validate_message(message, expected_message_type, expected_data):
+ go_director = ModuleFactory().create_go_director_obj()
+ message_type, data = go_director.validate_message(message)
+ assert message_type == expected_message_type
+ assert data == expected_data
+
+
+@pytest.mark.parametrize(
+ "data, expected_result",
+ [
+ # Test case 1: Valid request data
+ (
+ {
+ "key": "192.168.1.1",
+ "key_type": "ip",
+ "evaluation_type": "score_confidence",
+ },
+ True,
+ ),
+ # Test case 2: Missing key
+ ({"key_type": "ip", "evaluation_type": "score_confidence"}, False),
+ # Test case 3: Invalid key type
+ (
+ {
+ "key": "192.168.1.1",
+ "key_type": "invalid_type",
+ "evaluation_type": "score_confidence",
+ },
+ False,
+ ),
+ # Test case 4: Invalid IP address
+ (
+ {
+ "key": "invalid_ip",
+ "key_type": "ip",
+ "evaluation_type": "score_confidence",
+ },
+ False,
+ ),
+ # Test case 5: Invalid evaluation type
+ (
+ {
+ "key": "192.168.1.1",
+ "key_type": "ip",
+ "evaluation_type": "invalid_evaluation",
+ },
+ False,
+ ),
+ ],
+)
+def test_validate_message_request(
+ data,
+ expected_result,
+):
+ go_director = ModuleFactory().create_go_director_obj()
+ result = go_director.validate_message_request(data)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "ip, reporter, score, confidence, timestamp, "
+ "profileid_of_attacker, "
+ "expected_description, expected_threat_level",
+ [
+ # Test case 1: Basic test with valid data
+ (
+ "192.168.1.1",
+ "test_reporter",
+ 0.5,
+ 0.8,
+ 1649445643,
+ "profile_192.168.1.1",
+ "attacking another peer: " "(test_reporter). confidence: 0.8 ",
+ "medium",
+ ),
+ # Test case 2: Test with a different score and confidence
+ (
+ "10.0.0.1",
+ "another_reporter",
+ 0.9,
+ 0.6,
+ 1649445644,
+ "profile_10.0.0.1",
+ "attacking another peer: " "(another_reporter). confidence: 0.6 ",
+ "critical",
+ ),
+ ],
+)
+def test_set_evidence_p2p_report(
+ ip,
+ reporter,
+ score,
+ confidence,
+ timestamp,
+ profileid_of_attacker,
+ expected_description,
+ expected_threat_level,
+):
+ go_director = ModuleFactory().create_go_director_obj()
+ go_director.trustdb.get_ip_of_peer.return_value = (timestamp, "")
+
+ go_director.set_evidence_p2p_report(
+ ip, reporter, score, confidence, timestamp, profileid_of_attacker
+ )
+
+ go_director.db.set_evidence.assert_called_once()
+ call_args = go_director.db.set_evidence.call_args[0][0]
+ assert call_args.attacker.value == ip
+ assert expected_description in call_args.description
+ assert call_args.threat_level == expected_threat_level
+
+
+def test_read_configuration():
+ with patch(
+ "slips_files.common.parsers.config_parser.ConfigParser",
+ return_value=3600.0,
+ ):
+ go_director = ModuleFactory().create_go_director_obj()
+ go_director.read_configuration()
+ assert go_director.width == 3600.0
+
+
+@pytest.mark.parametrize(
+ "text, expected_log_content",
+ [ # Test case 1: Basic log message
+ ("Test log message", " - Test log message\n"),
+ # Test case 2: Another log message
+ ("Another test message", " - Another test message\n"),
+ ],
+)
+def test_log(
+ text,
+ expected_log_content,
+):
+ go_director = ModuleFactory().create_go_director_obj()
+
+ with tempfile.NamedTemporaryFile(mode="w+", delete=False) as temp_file:
+ temp_filename = temp_file.name
+
+ with patch.object(go_director, "reports_logfile", temp_file):
+ go_director.log(text)
+
+ with open(temp_filename, "r") as f:
+ log_content = f.read()
+ os.unlink(temp_filename)
+ assert expected_log_content in log_content
+
+
+def test_process_message_request_valid_request():
+ """Test handling of valid requests when override_p2p is False."""
+ go_director = ModuleFactory().create_go_director_obj()
+ go_director.override_p2p = False
+
+ data = {
+ "key": "192.168.1.1",
+ "key_type": "ip",
+ "evaluation_type": "score_confidence",
+ }
+
+ with patch.object(
+ go_director, "respond_to_message_request"
+ ) as mock_respond:
+ go_director.process_message_request("test_reporter", 1649445643, data)
+
+ mock_respond.assert_called_once_with("192.168.1.1", "test_reporter")
+ go_director.print.assert_called_once_with(
+ "[The Network -> Slips] request about "
+ "192.168.1.1 from: test_reporter"
+ )
+
+
+@pytest.mark.parametrize(
+ "data, expected_print_args",
+ [
+ # Test Case: Invalid Key Type
+ (
+ {
+ "key": "192.168.1.1",
+ "key_type": "invalid_type",
+ "evaluation_type": "score_confidence",
+ },
+ ("Module can't process key " "type invalid_type", 0, 2),
+ ),
+ # Test Case: Invalid Key
+ (
+ {
+ "key": "invalid_ip",
+ "key_type": "ip",
+ "evaluation_type": "score_confidence",
+ },
+ (
+ "Provided key invalid_ip isn't a "
+ "valid value for it's type ip",
+ 0,
+ 2,
+ ),
+ ),
+ # Test Case: Invalid Evaluation Type
+ (
+ {
+ "key": "192.168.1.1",
+ "key_type": "ip",
+ "evaluation_type": "invalid_evaluation",
+ },
+ (
+ "Module can't process evaluation " "type invalid_evaluation",
+ 0,
+ 2,
+ ),
+ ),
+ ],
+)
+def test_process_message_request_invalid_request(data, expected_print_args):
+ """Test handling of invalid requests (regardless of override_p2p)."""
+ go_director = ModuleFactory().create_go_director_obj()
+
+ go_director.process_message_request("test_reporter", 1649445643, data)
+ go_director.print.assert_called_once_with(*expected_print_args)
+
+
+def test_process_message_request_override_p2p():
+ """Test behavior when override_p2p is True."""
+ go_director = ModuleFactory().create_go_director_obj()
+ go_director.override_p2p = True
+ go_director.request_func = Mock()
+ data = {
+ "key": "192.168.1.1",
+ "key_type": "ip",
+ "evaluation_type": "score_confidence",
+ }
+
+ go_director.process_message_request("test_reporter", 1649445643, data)
+
+ go_director.request_func.assert_called_once_with(
+ "192.168.1.1", "test_reporter"
+ )
+
+
+@pytest.mark.parametrize(
+ "reporter, report_time, key_type, " "key, evaluation, expected_error",
+ [
+ ( # testcase1:Score value is out of bounds
+ "test_reporter",
+ 1649445643,
+ "ip",
+ "192.168.1.1",
+ {"score": 1.5, "confidence": 0.8},
+ "Score value is out of bounds",
+ ),
+ ( # testcase2:Confidence value is out of bounds
+ "test_reporter",
+ 1649445643,
+ "ip",
+ "192.168.1.1",
+ {"score": 0.5, "confidence": 1.2},
+ "Confidence value is out of bounds",
+ ),
+ ( # testcase3:Score or confidence are missing
+ "test_reporter",
+ 1649445643,
+ "ip",
+ "192.168.1.1",
+ {"score": 0.5},
+ "Score or confidence are missing",
+ ),
+ ( # testcase4:Score or confidence have wrong data type
+ "test_reporter",
+ 1649445643,
+ "ip",
+ "192.168.1.1",
+ {"score": "invalid", "confidence": 0.8},
+ "Score or confidence have wrong data type",
+ ),
+ ],
+)
+def test_process_evaluation_score_confidence_invalid(
+ reporter, report_time, key_type, key, evaluation, expected_error
+):
+ go_director = ModuleFactory().create_go_director_obj()
+
+ with patch.object(go_director, "print") as mock_print, patch.object(
+ go_director.trustdb, "insert_new_go_report"
+ ) as mock_insert, patch.object(
+ go_director.db, "store_p2p_report"
+ ) as mock_store, patch.object(
+ go_director.db, "add_profile"
+ ) as mock_add_profile, patch.object(
+ go_director, "set_evidence_p2p_report"
+ ) as mock_set_evidence:
+ go_director.process_evaluation_score_confidence(
+ reporter, report_time, key_type, key, evaluation
+ )
+
+ mock_print.assert_called_with(expected_error, 0, 2)
+ mock_insert.assert_not_called()
+ mock_store.assert_not_called()
+ mock_add_profile.assert_not_called()
+ mock_set_evidence.assert_not_called()
+
+
+def test_process_evaluation_score_confidence_valid():
+ go_director = ModuleFactory().create_go_director_obj()
+
+ reporter = "test_reporter"
+ report_time = 1649445643
+ key_type = "ip"
+ key = "192.168.1.1"
+ evaluation = {"score": 0.5, "confidence": 0.8}
+ expected_result = (
+ "Data processing ok: reporter test_reporter, "
+ "report time 1649445643, key 192.168.1.1 (ip), "
+ "score 0.5, confidence 0.8"
+ )
+
+ with patch.object(go_director, "print") as mock_print, patch.object(
+ go_director.trustdb, "insert_new_go_report"
+ ) as mock_insert, patch.object(
+ go_director.db, "store_p2p_report"
+ ) as mock_store, patch.object(
+ go_director.db, "add_profile"
+ ) as mock_add_profile, patch.object(
+ go_director, "set_evidence_p2p_report"
+ ) as mock_set_evidence:
+ go_director.process_evaluation_score_confidence(
+ reporter, report_time, key_type, key, evaluation
+ )
+
+ mock_print.assert_called_with(expected_result, 2, 0)
+ mock_insert.assert_called_once()
+ mock_store.assert_called_once()
+ mock_add_profile.assert_called_once()
+ mock_set_evidence.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "data, expected_calls",
+ [
+ # Test case 1: Valid update with both IP and reliability
+ (
+ {
+ "peerid": "test_peer",
+ "ip": "192.168.1.1",
+ "reliability": 0.8,
+ "timestamp": 1649445643,
+ },
+ [
+ ("insert_go_reliability", ("test_peer", 0.8)),
+ ("insert_go_ip_pairing", ("test_peer", "192.168.1.1")),
+ ],
+ ),
+ # Test case 2: Update with only reliability
+ (
+ {"peerid": "test_peer", "reliability": 0.7},
+ [("insert_go_reliability", ("test_peer", 0.7))],
+ ),
+ # Test case 3: Update with only IP
+ (
+ {"peerid": "test_peer", "ip": "192.168.1.2"},
+ [("insert_go_ip_pairing", ("test_peer", "192.168.1.2"))],
+ ),
+ # Test case 4: Invalid IP address
+ ({"peerid": "test_peer", "ip": "invalid_ip"}, []),
+ ],
+)
+def test_process_go_update(data, expected_calls):
+ go_director = ModuleFactory().create_go_director_obj()
+
+ with patch.object(
+ go_director.trustdb, "insert_go_reliability"
+ ) as mock_insert_reliability, patch.object(
+ go_director.trustdb, "insert_go_ip_pairing"
+ ) as mock_insert_ip:
+
+ go_director.process_go_update(data)
+
+ actual_calls = []
+ for call in mock_insert_reliability.call_args_list:
+ actual_calls.append(("insert_go_reliability", call[0]))
+ for call in mock_insert_ip.call_args_list:
+ actual_calls.append(("insert_go_ip_pairing", call[0]))
+
+ assert actual_calls == expected_calls
+
+
+def test_respond_to_message_request_with_info():
+ go_director = ModuleFactory().create_go_director_obj()
+ key = "192.168.1.1"
+ reporter = "test_reporter"
+ score = 0.5
+ confidence = 0.8
+
+ with patch(
+ "modules.p2ptrust.utils." "go_director.get_ip_info_from_slips",
+ return_value=(score, confidence),
+ ) as mock_get_info:
+ with patch(
+ "modules.p2ptrust." "utils.go_director." "send_evaluation_to_go"
+ ) as mock_send_evaluation:
+ go_director.respond_to_message_request(key, reporter)
+
+ mock_get_info.assert_called_once_with(key, go_director.db)
+
+ expected_print = (
+ f"[Slips -> The Network] Slips responded "
+ f"with info score={score} confidence={confidence} "
+ f"about IP: {key} to {reporter}."
+ )
+ go_director.print.assert_called_once_with(expected_print, 2, 0)
+
+ mock_send_evaluation.assert_called_once_with(
+ key,
+ score,
+ confidence,
+ reporter,
+ go_director.pygo_channel,
+ go_director.db,
+ )
+
+
+def test_respond_to_message_request_without_info():
+ go_director = ModuleFactory().create_go_director_obj()
+ key = "10.0.0.1"
+ reporter = "another_reporter"
+ score = None
+ confidence = None
+
+ with patch(
+ "modules.p2ptrust.utils." "go_director.get_ip_info_from_slips",
+ return_value=(score, confidence),
+ ) as mock_get_info:
+ with patch(
+ "modules.p2ptrust.utils." "go_director.send_evaluation_to_go"
+ ) as mock_send_evaluation:
+ go_director.respond_to_message_request(key, reporter)
+
+ mock_get_info.assert_called_once_with(key, go_director.db)
+
+ expected_print = (
+ f"[Slips -> The Network] Slips has no info about IP: {key}. "
+ f"Not responding to {reporter}"
+ )
+ go_director.print.assert_called_once_with(expected_print, 2, 0)
+
+ mock_send_evaluation.assert_not_called()
diff --git a/tests/test_horizontal_portscans.py b/tests/test_horizontal_portscans.py
index c4e4fe4e6..922e3faf2 100644
--- a/tests/test_horizontal_portscans.py
+++ b/tests/test_horizontal_portscans.py
@@ -22,12 +22,12 @@ def generate_random_ip():
return ".".join(str(random.randint(0, 255)) for _ in range(4))
-def enough_dstips_to_reach_the_threshold(mock_db):
+def enough_dstips_to_reach_the_threshold():
"""
returns conns to dport that are not enough
to reach the minimum dports to trigger the first scan
"""
- module = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ module = ModuleFactory().create_horizontal_portscan_obj()
# get a random list of ints(ports) that are below the threshold
# Generate a random number between 0 and threshold
amount_of_dstips: int = random.randint(
@@ -56,7 +56,7 @@ def enough_dstips_to_reach_the_threshold(mock_db):
],
)
def test_check_if_enough_dstips_to_trigger_an_evidence(
- mock_db, prev_amount_of_dstips, cur_amount_of_dstips, expected_return_val
+ prev_amount_of_dstips, cur_amount_of_dstips, expected_return_val
):
"""
slip sdetects can based on the number of current dports scanned to the
@@ -68,7 +68,7 @@ def test_check_if_enough_dstips_to_trigger_an_evidence(
timewindow = "timewindow0"
dport = 5555
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
key: str = horizontal_ps.get_twid_identifier(profileid, timewindow, dport)
horizontal_ps.cached_thresholds_per_tw[key] = prev_amount_of_dstips
@@ -79,12 +79,12 @@ def test_check_if_enough_dstips_to_trigger_an_evidence(
assert enough == expected_return_val
-def test_check_if_enough_dstips_to_trigger_an_evidence_no_cache(mock_db):
+def test_check_if_enough_dstips_to_trigger_an_evidence_no_cache():
"""
Test the check_if_enough_dstips_to_trigger_an_evidence
method when there is no cached threshold.
"""
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
timewindow = "timewindow0"
dport = 5555
@@ -98,10 +98,8 @@ def test_check_if_enough_dstips_to_trigger_an_evidence_no_cache(mock_db):
assert enough is True
-def test_check_if_enough_dstips_to_trigger_an_evidence_less_than_minimum(
- mock_db,
-):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_check_if_enough_dstips_to_trigger_an_evidence_less_than_minimum():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
timewindow = "timewindow0"
dport = 5555
@@ -115,12 +113,12 @@ def test_check_if_enough_dstips_to_trigger_an_evidence_less_than_minimum(
assert enough is False
-def not_enough_dstips_to_reach_the_threshold(mock_db):
+def not_enough_dstips_to_reach_the_threshold():
"""
returns conns to dport that are not enough
to reach the minimum dports to trigger the first scan
"""
- module = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ module = ModuleFactory().create_horizontal_portscan_obj()
# get a random list of ints(ports) that are below the threshold
# Generate a random number between 0 and threshold
amount_of_dstips: int = random.randint(
@@ -137,8 +135,8 @@ def not_enough_dstips_to_reach_the_threshold(mock_db):
return res
-def test_check_if_enough_dstips_to_trigger_an_evidence_equal_min_dips(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_check_if_enough_dstips_to_trigger_an_evidence_equal_min_dips():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
timewindow = "timewindow0"
dport = 80
@@ -158,19 +156,20 @@ def test_check_if_enough_dstips_to_trigger_an_evidence_equal_min_dips(mock_db):
],
)
def test_check_if_enough_dstips_to_trigger_an_evidence_min_dstips_threshold(
- get_test_conns, expected_return_val: bool, mock_db
+ get_test_conns,
+ expected_return_val: bool,
):
"""
test by mocking the connections returned from the database
"""
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
timewindow = "timewindow0"
dport = 5555
- dports: dict = get_test_conns(mock_db)
- mock_db.get_data_from_profile_tw.return_value = dports
+ dports: dict = get_test_conns()
+ horizontal_ps.db.get_data_from_profile_tw.return_value = dports
cache_key = horizontal_ps.get_twid_identifier(profileid, timewindow, dport)
amount_of_dips = len(dports[dport]["dstips"])
@@ -183,8 +182,8 @@ def test_check_if_enough_dstips_to_trigger_an_evidence_min_dstips_threshold(
)
-def test_get_not_estab_dst_ports(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_not_estab_dst_ports():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
twid = "timewindow0"
protocol = "TCP"
@@ -199,7 +198,7 @@ def test_get_not_estab_dst_ports(mock_db):
}
},
}
- mock_db.get_data_from_profile_tw.return_value = mock_dports
+ horizontal_ps.db.get_data_from_profile_tw.return_value = mock_dports
dports = horizontal_ps.get_not_estab_dst_ports(
protocol, state, profileid, twid
@@ -207,15 +206,15 @@ def test_get_not_estab_dst_ports(mock_db):
assert dports == mock_dports
-def test_get_not_estab_dst_ports_missing_dstports(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_not_estab_dst_ports_missing_dstports():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
twid = "timewindow0"
protocol = "TCP"
state = "Not Established"
mock_dports = {80: {"dstips": {"8.8.8.8": {}}}}
- mock_db.get_data_from_profile_tw.return_value = mock_dports
+ horizontal_ps.db.get_data_from_profile_tw.return_value = mock_dports
dports = horizontal_ps.get_not_estab_dst_ports(
protocol, state, profileid, twid
@@ -223,18 +222,18 @@ def test_get_not_estab_dst_ports_missing_dstports(mock_db):
assert dports == mock_dports
-def test_get_uids_empty_dstips(mock_db):
+def test_get_uids_empty_dstips():
"""
Test the get_uids method with an empty dstips dictionary.
"""
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
dstips = {}
uids = horizontal_ps.get_uids(dstips)
assert uids == []
-def test_get_uids(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_uids():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
dstips = {
"1.1.1.1": {"uid": ["uid1", "uid2"]},
"2.2.2.2": {"uid": ["uid3", "uid4", "uid5"]},
@@ -244,12 +243,12 @@ def test_get_uids(mock_db):
assert set(uids) == {"uid1", "uid2", "uid3", "uid4", "uid5"}
-def test_get_uids_duplicate(mock_db):
+def test_get_uids_duplicate():
"""
Test the get_uids method with a dstips dictionary that has
duplicate uids
"""
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
dstips = {
"1.1.1.1": {"uid": ["uid1", "uid2", "uid1"]},
"2.2.2.2": {"uid": ["uid3", "uid4", "uid5"]},
@@ -259,17 +258,17 @@ def test_get_uids_duplicate(mock_db):
assert set(uids) == {"uid1", "uid2", "uid3", "uid4", "uid5"}
-def test_get_not_estab_dst_ports_no_data(mock_db):
+def test_get_not_estab_dst_ports_no_data():
"""
Test the get_not_estab_dst_ports method when there is no data.
"""
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
twid = "timewindow0"
protocol = "TCP"
state = "Not Established"
- mock_db.get_data_from_profile_tw.return_value = {}
+ horizontal_ps.db.get_data_from_profile_tw.return_value = {}
dports = horizontal_ps.get_not_estab_dst_ports(
protocol, state, profileid, twid
@@ -299,7 +298,7 @@ def test_get_packets_sent_empty_dstips():
assert pkts_sent == 0
-def test_get_packets_sent_invalid_values(mock_db):
+def test_get_packets_sent_invalid_values():
horizontal_ps = HorizontalPortscan(MagicMock())
dstips = {
"1.1.1.1": {"pkts": "invalid", "spkts": 50},
@@ -331,8 +330,8 @@ def test_get_cache_key_empty_dport():
assert cache_key is False
-def test_get_cache_key_none_dport(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_cache_key_none_dport():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = "profile_1.1.1.1"
twid = "timewindow0"
dport = None
@@ -345,18 +344,18 @@ def test_get_cache_key_none_dport(mock_db):
"modules.network_discovery.horizontal_portscan.HorizontalPortscan.get_not_estab_dst_ports"
)
def test_check_broadcast_or_multicast_address(
- mock_get_not_estab_dst_ports, mock_db
+ mock_get_not_estab_dst_ports,
):
- mock_db.get_field_separator.return_value = "_"
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
+ horizontal_ps.db.get_field_separator.return_value = "_"
profileid = "profile_255.255.255.255"
twid = "timewindow0"
horizontal_ps.check(profileid, twid)
mock_get_not_estab_dst_ports.assert_not_called()
-def test_set_evidence_horizontal_portscan_empty_port_info(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_set_evidence_horizontal_portscan_empty_port_info():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
evidence = {
"protocol": "TCP",
"profileid": "profile_1.1.1.1",
@@ -369,23 +368,23 @@ def test_set_evidence_horizontal_portscan_empty_port_info(mock_db):
"amount_of_dips": 10,
}
- mock_db.get_port_info.return_value = ""
- mock_db.set_evidence.return_value = None
+ horizontal_ps.db.get_port_info.return_value = ""
+ horizontal_ps.db.set_evidence.return_value = None
horizontal_ps.set_evidence_horizontal_portscan(evidence)
- mock_db.set_evidence.assert_called_once()
- call_args = mock_db.set_evidence.call_args[0][0]
+ horizontal_ps.db.set_evidence.assert_called_once()
+ call_args = horizontal_ps.db.set_evidence.call_args[0][0]
assert call_args.description.startswith(
"Horizontal port scan to port 80/TCP."
)
-def test_set_evidence_horizontal_portscan_no_uids(mock_db):
+def test_set_evidence_horizontal_portscan_no_uids():
"""
Test the set_evidence_horizontal_portscan method when there are no uids.
"""
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
evidence = {
"protocol": "TCP",
"profileid": "profile_1.1.1.1",
@@ -398,18 +397,18 @@ def test_set_evidence_horizontal_portscan_no_uids(mock_db):
"amount_of_dips": 10,
}
- mock_db.get_port_info.return_value = "HTTP"
- mock_db.set_evidence.return_value = None
+ horizontal_ps.db.get_port_info.return_value = "HTTP"
+ horizontal_ps.db.set_evidence.return_value = None
horizontal_ps.set_evidence_horizontal_portscan(evidence)
- mock_db.set_evidence.assert_called_once()
- call_args = mock_db.set_evidence.call_args[0][0]
+ horizontal_ps.db.set_evidence.assert_called_once()
+ call_args = horizontal_ps.db.set_evidence.call_args[0][0]
assert call_args.uid == []
-def test_set_evidence_horizontal_portscan(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_set_evidence_horizontal_portscan():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
evidence = {
"protocol": "TCP",
"profileid": "profile_1.1.1.1",
@@ -422,13 +421,13 @@ def test_set_evidence_horizontal_portscan(mock_db):
"amount_of_dips": 10,
}
- mock_db.get_port_info.return_value = "HTTP"
- mock_db.set_evidence.return_value = None
+ horizontal_ps.db.get_port_info.return_value = "HTTP"
+ horizontal_ps.db.set_evidence.return_value = None
horizontal_ps.set_evidence_horizontal_portscan(evidence)
- mock_db.set_evidence.assert_called_once()
- call_args = mock_db.set_evidence.call_args[0][0]
+ horizontal_ps.db.set_evidence.assert_called_once()
+ call_args = horizontal_ps.db.set_evidence.call_args[0][0]
assert call_args.evidence_type == EvidenceType.HORIZONTAL_PORT_SCAN
assert call_args.attacker.value == "1.1.1.1"
assert call_args.confidence == 1
@@ -446,8 +445,8 @@ def test_set_evidence_horizontal_portscan(mock_db):
assert call_args.port == 80
-def test_set_evidence_horizontal_portscan_empty_uids(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_set_evidence_horizontal_portscan_empty_uids():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
evidence = {
"protocol": "TCP",
"profileid": "profile_1.1.1.1",
@@ -459,11 +458,11 @@ def test_set_evidence_horizontal_portscan_empty_uids(mock_db):
"state": "Not Established",
"amount_of_dips": 10,
}
- mock_db.get_port_info.return_value = "HTTP"
- mock_db.set_evidence.return_value = None
+ horizontal_ps.db.get_port_info.return_value = "HTTP"
+ horizontal_ps.db.set_evidence.return_value = None
horizontal_ps.set_evidence_horizontal_portscan(evidence)
- assert mock_db.set_evidence.call_count == 1
- call_args = mock_db.set_evidence.call_args[0][0]
+ assert horizontal_ps.db.set_evidence.call_count == 1
+ call_args = horizontal_ps.db.set_evidence.call_args[0][0]
assert call_args.uid == []
@@ -476,19 +475,19 @@ def test_set_evidence_horizontal_portscan_empty_uids(mock_db):
("1.1.1.1", True),
],
)
-def test_is_valid_saddr(mock_db, ip, expected_val):
- mock_db.get_field_separator.return_value = "_"
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_is_valid_saddr(ip, expected_val):
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
+ horizontal_ps.db.get_field_separator.return_value = "_"
profileid = f"profile_{ip}"
assert horizontal_ps.is_valid_saddr(profileid) == expected_val
-def test_get_resolved_ips(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_resolved_ips():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
dstips = ["1.1.1.1", "2.2.2.2", "3.3.3.3"]
- mock_db.get_dns_resolution.side_effect = [
+ horizontal_ps.db.get_dns_resolution.side_effect = [
{"domains": ["example.com"]},
{"domains": []},
{"domains": ["test.com", "another.com"]},
@@ -498,18 +497,18 @@ def test_get_resolved_ips(mock_db):
assert sorted(resolved_ips) == ["1.1.1.1", "3.3.3.3"]
-def test_get_resolved_ips_empty_list(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_resolved_ips_empty_list():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
dstips = []
resolved_ips = horizontal_ps.get_resolved_ips(dstips)
assert resolved_ips == []
-def test_get_resolved_ips_invalid_ip(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_resolved_ips_invalid_ip():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
dstips = ["1.1.1.1", "256.256.256.256", "3.3.3.3"]
- mock_db.get_dns_resolution.side_effect = [
+ horizontal_ps.db.get_dns_resolution.side_effect = [
{"domains": ["example.com"]},
{},
{"domains": ["test.com"]},
@@ -519,10 +518,10 @@ def test_get_resolved_ips_invalid_ip(mock_db):
assert sorted(resolved_ips) == ["1.1.1.1", "3.3.3.3"]
-def test_get_resolved_ips_mixed_list(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_get_resolved_ips_mixed_list():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
dstips = ["1.1.1.1", "2.2.2.2", "3.3.3.3"]
- mock_db.get_dns_resolution.side_effect = [
+ horizontal_ps.db.get_dns_resolution.side_effect = [
{"domains": ["example.com"]},
{"domains": []},
{"domains": ["test.com"]},
@@ -531,9 +530,9 @@ def test_get_resolved_ips_mixed_list(mock_db):
assert sorted(resolved_ips) == ["1.1.1.1", "3.3.3.3"]
-def test_check_valid_ip(mock_db):
- mock_db.get_field_separator.return_value = "_"
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_check_valid_ip():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
+ horizontal_ps.db.get_field_separator.return_value = "_"
profileid = "profile_10.0.0.1"
twid = "timewindow0"
@@ -542,15 +541,15 @@ def test_check_valid_ip(mock_db):
horizontal_ps.check(profileid, twid)
-def test_check_invalid_profileid(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_check_invalid_profileid():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
profileid = None
twid = "timewindow0"
with pytest.raises(Exception):
horizontal_ps.check(profileid, twid)
-def test_is_valid_twid(mock_db):
- horizontal_ps = ModuleFactory().create_horizontal_portscan_obj(mock_db)
+def test_is_valid_twid():
+ horizontal_ps = ModuleFactory().create_horizontal_portscan_obj()
twid = ""
assert not horizontal_ps.is_valid_twid(twid)
diff --git a/tests/test_http_analyzer.py b/tests/test_http_analyzer.py
index b2c805cf4..583d32496 100644
--- a/tests/test_http_analyzer.py
+++ b/tests/test_http_analyzer.py
@@ -3,7 +3,6 @@
import json
from tests.module_factory import ModuleFactory
-import random
from unittest.mock import patch, MagicMock
from modules.http_analyzer.http_analyzer import utils
import pytest
@@ -21,16 +20,8 @@
)
-def get_random_MAC():
- return "02:00:00:%02x:%02x:%02x" % (
- random.randint(0, 255),
- random.randint(0, 255),
- random.randint(0, 255),
- )
-
-
-def test_check_suspicious_user_agents(mock_db):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_check_suspicious_user_agents():
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
# create a flow with suspicious user agent
assert (
http_analyzer.check_suspicious_user_agents(
@@ -46,8 +37,8 @@ def test_check_suspicious_user_agents(mock_db):
)
-def test_check_multiple_google_connections(mock_db):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_check_multiple_google_connections():
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
# {"ts":1635765765.435485,"uid":"C7mv0u4M1zqJBHydgj",
# "id.orig_h":"192.168.1.28","id.orig_p":52102,"id.resp_h":"216.58.198.78",
# "id.resp_p":80,"trans_depth":1,"method":"GET","host":"google.com","uri":"/",
@@ -66,16 +57,16 @@ def test_check_multiple_google_connections(mock_db):
assert found_detection is True
-def test_parsing_online_ua_info(mock_db, mocker):
+def test_parsing_online_ua_info(mocker):
"""
tests the parsing and processing the ua found by the online query
"""
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
# use a different profile for this unit test to make
# sure we don't already have info about it in the db
profileid = "profile_192.168.99.99"
- mock_db.get_user_agent_from_profile.return_value = None
+ http_analyzer.db.get_user_agent_from_profile.return_value = None
# mock the function that gets info about the given ua from an online db
mock_requests = mocker.patch("requests.get")
mock_requests.return_value.status_code = 200
@@ -91,8 +82,8 @@ def test_parsing_online_ua_info(mock_db, mocker):
assert ua_info["browser"] == "Safari"
-def test_get_user_agent_info(mock_db, mocker):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_get_user_agent_info(mocker):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
# mock the function that gets info about the
# given ua from an online db: get_ua_info_online()
mock_requests = mocker.patch("requests.get")
@@ -103,8 +94,8 @@ def test_get_user_agent_info(mock_db, mocker):
"os_name":"OS X"
}"""
- mock_db.add_all_user_agent_to_profile.return_value = True
- mock_db.get_user_agent_from_profile.return_value = None
+ http_analyzer.db.add_all_user_agent_to_profile.return_value = True
+ http_analyzer.db.get_user_agent_from_profile.return_value = None
expected_ret_value = {
"browser": "Safari",
@@ -131,14 +122,14 @@ def test_get_user_agent_info(mock_db, mocker):
],
)
def test_check_incompatible_user_agent(
- mock_db, mac_vendor, user_agent, expected_result
+ mac_vendor, user_agent, expected_result
):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
# Use a different profile for this unit test
profileid = "profile_192.168.77.254"
- mock_db.get_mac_vendor_from_profile.return_value = mac_vendor
- mock_db.get_user_agent_from_profile.return_value = user_agent
+ http_analyzer.db.get_mac_vendor_from_profile.return_value = mac_vendor
+ http_analyzer.db.get_user_agent_from_profile.return_value = user_agent
result = http_analyzer.check_incompatible_user_agent(
"google.com", "/images", timestamp, profileid, twid, uid
@@ -147,11 +138,11 @@ def test_check_incompatible_user_agent(
assert result is expected_result
-def test_extract_info_from_ua(mock_db):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_extract_info_from_ua():
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
# use another profile, because the default
# one already has a ua in the db
- mock_db.get_user_agent_from_profile.return_value = None
+ http_analyzer.db.get_user_agent_from_profile.return_value = None
profileid = "profile_192.168.1.2"
server_bag_ua = "server-bag[macOS,11.5.1,20G80,MacBookAir10,1]"
expected_output = {
@@ -197,9 +188,9 @@ def test_extract_info_from_ua(mock_db):
],
)
def test_check_multiple_user_agents_in_a_row(
- mock_db, cached_ua, new_ua, expected_result
+ cached_ua, new_ua, expected_result
):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
result = http_analyzer.check_multiple_user_agents_in_a_row(
cached_ua, new_ua, timestamp, profileid, twid, uid
)
@@ -221,13 +212,13 @@ def test_check_multiple_user_agents_in_a_row(
# Mixed executable and non-executable MIME types
],
)
-def test_detect_executable_mime_types(mock_db, mime_types, expected):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_detect_executable_mime_types(mime_types, expected):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
assert http_analyzer.detect_executable_mime_types(mime_types) is expected
-def test_set_evidence_http_traffic(mock_db, mocker):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_set_evidence_http_traffic(mocker):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
mocker.spy(http_analyzer.db, "set_evidence")
http_analyzer.set_evidence_http_traffic(
@@ -237,9 +228,11 @@ def test_set_evidence_http_traffic(mock_db, mocker):
http_analyzer.db.set_evidence.assert_called_once()
-def test_set_evidence_weird_http_method(mock_db, mocker):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
- mock_db.get_ip_identification.return_value = "Some IP identification"
+def test_set_evidence_weird_http_method(mocker):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
+ http_analyzer.db.get_ip_identification.return_value = (
+ "Some IP identification"
+ )
mocker.spy(http_analyzer.db, "set_evidence")
flow = {
@@ -254,9 +247,11 @@ def test_set_evidence_weird_http_method(mock_db, mocker):
http_analyzer.db.set_evidence.assert_called_once()
-def test_set_evidence_executable_mime_type(mock_db, mocker):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
- mock_db.get_ip_identification.return_value = "Some IP identification"
+def test_set_evidence_executable_mime_type(mocker):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
+ http_analyzer.db.get_ip_identification.return_value = (
+ "Some IP identification"
+ )
mocker.spy(http_analyzer.db, "set_evidence")
http_analyzer.set_evidence_executable_mime_type(
"application/x-msdownload", profileid, twid, uid, timestamp, "8.8.8.8"
@@ -265,9 +260,11 @@ def test_set_evidence_executable_mime_type(mock_db, mocker):
assert http_analyzer.db.set_evidence.call_count == 2
-def test_set_evidence_executable_mime_type_source_dest(mock_db, mocker):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
- mock_db.get_ip_identification.return_value = "Some IP identification"
+def test_set_evidence_executable_mime_type_source_dest(mocker):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
+ http_analyzer.db.get_ip_identification.return_value = (
+ "Some IP identification"
+ )
mocker.spy(http_analyzer.db, "set_evidence")
@@ -279,8 +276,8 @@ def test_set_evidence_executable_mime_type_source_dest(mock_db, mocker):
@pytest.mark.parametrize("config_value", [700])
-def test_read_configuration_valid(mock_db, mocker, config_value):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_read_configuration_valid(mocker, config_value):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
mock_conf = mocker.patch(
"slips_files.common.parsers.config_parser.ConfigParser"
)
@@ -306,10 +303,8 @@ def test_read_configuration_valid(mock_db, mocker, config_value):
),
],
)
-def test_check_weird_http_method(
- mock_db, mocker, flow_name, evidence_expected
-):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_check_weird_http_method(mocker, flow_name, evidence_expected):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
mocker.spy(http_analyzer, "set_evidence_weird_http_method")
msg = {
@@ -332,8 +327,8 @@ def test_check_weird_http_method(
http_analyzer.set_evidence_weird_http_method.assert_not_called()
-def test_pre_main(mock_db, mocker):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_pre_main(mocker):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
mocker.patch("slips_files.common.slips_utils.Utils.drop_root_privs")
http_analyzer.pre_main()
@@ -350,9 +345,9 @@ def test_pre_main(mock_db, mocker):
],
)
def test_check_multiple_empty_connections(
- mock_db, uri, request_body_len, expected_result
+ uri, request_body_len, expected_result
):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
host = "google.com"
result = http_analyzer.check_multiple_empty_connections(
uid, host, uri, timestamp, request_body_len, profileid, twid
@@ -378,14 +373,16 @@ def test_check_multiple_empty_connections(
],
)
def test_check_pastebin_downloads(
- mock_db, url, response_body_len, method, expected_result
+ url, response_body_len, method, expected_result
):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
if url != "pastebin.com":
- mock_db.get_ip_identification.return_value = "Not a Pastebin domain"
+ http_analyzer.db.get_ip_identification.return_value = (
+ "Not a Pastebin domain"
+ )
else:
- mock_db.get_ip_identification.return_value = "pastebin.com"
+ http_analyzer.db.get_ip_identification.return_value = "pastebin.com"
http_analyzer.pastebin_downloads_threshold = 1024
result = http_analyzer.check_pastebin_downloads(
@@ -407,7 +404,7 @@ def test_check_pastebin_downloads(
MagicMock(side_effect=requests.exceptions.ReadTimeout),
],
)
-def test_get_ua_info_online_error_cases(mock_db, mock_response):
- http_analyzer = ModuleFactory().create_http_analyzer_obj(mock_db)
+def test_get_ua_info_online_error_cases(mock_response):
+ http_analyzer = ModuleFactory().create_http_analyzer_obj()
with patch("requests.get", return_value=mock_response):
assert http_analyzer.get_ua_info_online(SAFARI_UA) is False
diff --git a/tests/test_inputProc.py b/tests/test_inputProc.py
index 91dd3f476..3d6be8ab3 100644
--- a/tests/test_inputProc.py
+++ b/tests/test_inputProc.py
@@ -1,10 +1,14 @@
import pytest
from tests.module_factory import ModuleFactory
-from unittest.mock import patch
-
+from unittest.mock import (
+ patch,
+ MagicMock,
+ Mock,
+)
import shutil
import os
import json
+import signal
@pytest.mark.parametrize(
@@ -12,15 +16,23 @@
# the pcaps here must have a conn.log when read by zeek
[("pcap", "dataset/test7-malicious.pcap")],
)
-def test_handle_pcap_and_interface(input_type, input_information, mock_db):
- # no need to test interfaces because in that case read_zeek_files runs in a loop and never returns
- input = ModuleFactory().create_input_obj(
- input_information, input_type, mock_db
- )
+def test_handle_pcap_and_interface(input_type, input_information):
+ # no need to test interfaces because in that case read_zeek_files runs
+ # in a loop and never returns
+ input = ModuleFactory().create_input_obj(input_information, input_type)
input.zeek_pid = "False"
input.is_zeek_tabs = False
- with patch.object(input, "get_flows_number", return_value=500):
+ input.start_observer = Mock()
+ input.read_zeek_files = Mock()
+ input.zeek_thread = Mock()
+ with (
+ patch.object(input, "get_flows_number", return_value=500),
+ patch("time.sleep"),
+ ):
assert input.handle_pcap_and_interface() is True
+ input.zeek_thread.start.assert_called_once()
+ input.read_zeek_files.assert_called_once()
+ input.start_observer.assert_called_once()
# delete the zeek logs created
shutil.rmtree(input.zeek_dir)
@@ -33,12 +45,19 @@ def test_handle_pcap_and_interface(input_type, input_information, mock_db):
("dataset/test9-mixed-zeek-dir/", True), # json
],
)
-def test_is_growing_zeek_dir(zeek_dir: str, is_tabs: bool, mock_db):
- input = ModuleFactory().create_input_obj(zeek_dir, "zeek_folder", mock_db)
- mock_db.get_all_zeek_files.return_value = [
+def test_read_zeek_folder(zeek_dir: str, is_tabs: bool):
+ input = ModuleFactory().create_input_obj(zeek_dir, "zeek_folder")
+ input.given_path = zeek_dir
+ input.testing = True
+ input.is_zeek_tabs = is_tabs
+ input.db.get_all_zeek_files.return_value = [
os.path.join(zeek_dir, "conn.log")
]
-
+ input.db.is_growing_zeek_dir.return_value = False
+ input.is_done_processing = Mock()
+ input.is_done_processing.return_value = True
+ input.start_observer = Mock()
+ input.start_observer.return_value = True
assert input.read_zeek_folder() is True
@@ -49,8 +68,8 @@ def test_is_growing_zeek_dir(zeek_dir: str, is_tabs: bool, mock_db):
("dataset/test9-mixed-zeek-dir/conn.log", False), # json
],
)
-def test_is_zeek_tabs_file(path: str, expected_val: bool, mock_db):
- input = ModuleFactory().create_input_obj(path, "zeek_folder", mock_db)
+def test_is_zeek_tabs_file(path: str, expected_val: bool):
+ input = ModuleFactory().create_input_obj(path, "zeek_folder")
assert input.is_zeek_tabs_file(path) == expected_val
@@ -63,9 +82,9 @@ def test_is_zeek_tabs_file(path: str, expected_val: bool, mock_db):
("dataset/test9-mixed-zeek-dir/x509.log", False), # json
],
)
-def test_handle_zeek_log_file(input_information, mock_db, expected_output):
+def test_handle_zeek_log_file(input_information, expected_output):
input = ModuleFactory().create_input_obj(
- input_information, "zeek_log_file", mock_db
+ input_information, "zeek_log_file"
)
assert input.handle_zeek_log_file() == expected_output
@@ -73,18 +92,16 @@ def test_handle_zeek_log_file(input_information, mock_db, expected_output):
@pytest.mark.parametrize(
"path, is_tabs, line_cached",
[
- # sllips shouldn't be able to cache teh first line as it's a comment
("dataset/test10-mixed-zeek-dir/conn.log", True, False),
("dataset/test9-mixed-zeek-dir/conn.log", False, True),
],
)
-def test_cache_nxt_line_in_file(
- path: str, is_tabs: str, line_cached: bool, mock_db
-):
+def test_cache_nxt_line_in_file(path: str, is_tabs: str, line_cached: bool):
"""
- :param line_cached: should slips cache the first line of this file or not
+ :param line_cached: should slips cache
+ the first line of this file or not
"""
- input = ModuleFactory().create_input_obj(path, "zeek_log_file", mock_db)
+ input = ModuleFactory().create_input_obj(path, "zeek_log_file")
input.cache_lines = {}
input.file_time = {}
input.is_zeek_tabs = is_tabs
@@ -92,7 +109,6 @@ def test_cache_nxt_line_in_file(
assert input.cache_nxt_line_in_file(path) == line_cached
if line_cached:
assert input.cache_lines[path]["type"] == path
- # make sure it did read 1 line from the file
assert input.cache_lines[path]["data"]
@@ -102,30 +118,43 @@ def test_cache_nxt_line_in_file(
(
"dataset/test10-mixed-zeek-dir/conn.log",
True,
- "1601998375.703087 ClqdMB11qLHjikB6bd 2001:718:2:1663:dc58:6d9:ef13:51a5 63580 2a00:1450:4014:80c::200a443 udp - 30.131973 6224 10110 SF - - 0 Dd 14 6896 15 10830 -",
+ "1601998375.703087 ClqdMB11qLHjikB6bd "
+ "2001:718:2:1663:dc58:6d9:ef13:51a5 63580 "
+ "2a00:1450:4014:80c::200a443 udp - "
+ "30.131973 6224 10110 SF - - "
+ "0 Dd 14 6896 15 10830 -",
1601998375.703087,
),
(
"dataset/test9-mixed-zeek-dir/conn.log",
False,
- '{"ts":271.102532,"uid":"CsYeNL1xflv3dW9hvb","id.orig_h":"10.0.2.15","id.orig_p":59393,'
- '"id.resp_h":"216.58.201.98","id.resp_p":443,"proto":"udp","duration":0.5936019999999758,"orig_bytes":5219,"resp_bytes":5685,"conn_state":"SF","missed_bytes":0,"history":"Dd","orig_pkts":9,"orig_ip_bytes":5471,"resp_pkts":10,"resp_ip_bytes":5965}',
+ '{"ts":271.102532,"uid":"CsYeNL1xflv3dW9hvb",'
+ '"id.orig_h":"10.0.2.15","id.orig_p":59393,'
+ '"id.resp_h":"216.58.201.98","id.resp_p":443,'
+ '"proto":"udp","duration":0.5936019999999758,'
+ '"orig_bytes":5219,"resp_bytes":5685,"conn_state":"SF",'
+ '"missed_bytes":0,"history":"Dd","orig_pkts":9,"orig_ip_bytes":5471,'
+ '"resp_pkts":10,"resp_ip_bytes":5965}',
271.102532,
),
- # this scenario is corrupted and should fail
(
"dataset/test9-mixed-zeek-dir/conn.log",
False,
- '{"ts":"corrupted","uid":"CsYeNL1xflv3dW9hvb","id.orig_h":"10.0.2.15","id.orig_p":59393,'
- '"id.resp_h":"216.58.201.98","id.resp_p":443,"proto":"udp","duration":0.5936019999999758,"orig_bytes":5219,"resp_bytes":5685,"conn_state":"SF","missed_bytes":0,"history":"Dd","orig_pkts":9,"orig_ip_bytes":5471,"resp_pkts":10,"resp_ip_bytes":5965}',
+ '{"ts":"corrupted","uid":"CsYeNL1xflv3dW9hvb",'
+ '"id.orig_h":"10.0.2.15","id.orig_p":59393,'
+ '"id.resp_h":"216.58.201.98","id.resp_p":443,'
+ '"proto":"udp","duration":0.5936019999999758,"orig_bytes":5219,'
+ '"resp_bytes":5685,"conn_state":"SF","missed_bytes":0,'
+ '"history":"Dd","orig_pkts":9,"orig_ip_bytes":5471,"resp_pkts":10,'
+ '"resp_ip_bytes":5965}',
(False, False),
),
],
)
def test_get_ts_from_line(
- path: str, is_tabs: str, zeek_line: str, expected_val: float, mock_db
+ path: str, is_tabs: str, zeek_line: str, expected_val: float
):
- input = ModuleFactory().create_input_obj(path, "zeek_log_file", mock_db)
+ input = ModuleFactory().create_input_obj(path, "zeek_log_file")
input.is_zeek_tabs = is_tabs
input.get_ts_from_line(zeek_line)
@@ -140,12 +169,11 @@ def test_get_ts_from_line(
],
)
def test_reached_timeout(
- last_updated_file_time, now, bro_timeout, expected_val, mock_db
+ last_updated_file_time, now, bro_timeout, expected_val
):
- input = ModuleFactory().create_input_obj("", "zeek_log_file", mock_db)
+ input = ModuleFactory().create_input_obj("", "zeek_log_file")
input.last_updated_file_time = last_updated_file_time
input.bro_timeout = bro_timeout
- # make it seem as we don't have cache lines anymore to be able to check the timeout
input.cache_lines = False
with patch("datetime.datetime") as dt:
dt.now.return_value = now
@@ -155,14 +183,14 @@ def test_reached_timeout(
@pytest.mark.skipif(
"nfdump" not in shutil.which("nfdump"), reason="nfdump is not installed"
)
-@pytest.mark.parametrize("path", [("dataset/test1-normal.nfdump")])
-def test_handle_nfdump(path, mock_db):
- input = ModuleFactory().create_input_obj(path, "nfdump", mock_db)
+@pytest.mark.parametrize("path", ["dataset/test1-normal.nfdump"])
+def test_handle_nfdump(path):
+ input = ModuleFactory().create_input_obj(path, "nfdump")
assert input.handle_nfdump() is True
-def test_get_earliest_line(mock_db):
- input = ModuleFactory().create_input_obj("", "zeek_log_file", mock_db)
+def test_get_earliest_line():
+ input = ModuleFactory().create_input_obj("", "zeek_log_file")
input.file_time = {
"software.log": 3,
"ssh.log": 2,
@@ -184,22 +212,6 @@ def test_get_earliest_line(mock_db):
assert input.get_earliest_line() == ("line1", "notice.log")
-@pytest.mark.parametrize(
- "path, is_tabs, expected_val",
- [
- ("dataset/test1-normal.nfdump", False, 4646),
- ("dataset/test9-mixed-zeek-dir/conn.log", False, 577),
- ("dataset/test10-mixed-zeek-dir/conn.log", True, 116),
- ],
-)
-def test_get_flows_number(
- path: str, is_tabs: bool, expected_val: int, mock_db
-):
- input = ModuleFactory().create_input_obj(path, "nfdump", mock_db)
- input.is_zeek_tabs = is_tabs
- assert input.get_flows_number(path) == expected_val
-
-
@pytest.mark.parametrize(
"input_type,input_information",
[
@@ -207,24 +219,18 @@ def test_get_flows_number(
("binetflow", "dataset/test5-mixed.binetflow"),
],
)
-# ('binetflow','dataset/test3-mixed.binetflow'),
-# ('binetflow','dataset/test4-malicious.binetflow'),
-def test_handle_binetflow(input_type, input_information, mock_db):
- input = ModuleFactory().create_input_obj(
- input_information, input_type, mock_db
- )
+def test_handle_binetflow(input_type, input_information):
+ input = ModuleFactory().create_input_obj(input_information, input_type)
with patch.object(input, "get_flows_number", return_value=5):
assert input.handle_binetflow() is True
@pytest.mark.parametrize(
"input_information",
- [("dataset/test6-malicious.suricata.json")],
+ ["dataset/test6-malicious.suricata.json"],
)
-def test_handle_suricata(input_information, mock_db):
- input = ModuleFactory().create_input_obj(
- input_information, "suricata", mock_db
- )
+def test_handle_suricata(input_information):
+ input = ModuleFactory().create_input_obj(input_information, "suricata")
assert input.handle_suricata() is True
@@ -233,40 +239,309 @@ def test_handle_suricata(input_information, mock_db):
[
(
"zeek",
- '{"ts":271.102532,"uid":"CsYeNL1xflv3dW9hvb","id.orig_h":"10.0.2.15","id.orig_p":59393,'
- '"id.resp_h":"216.58.201.98","id.resp_p":443,"proto":"udp","duration":0.5936019999999758,'
- '"orig_bytes":5219,"resp_bytes":5685,"conn_state":"SF","missed_bytes":0,"history":"Dd",'
- '"orig_pkts":9,"orig_ip_bytes":5471,"resp_pkts":10,"resp_ip_bytes":5965}',
+ '{"ts":271.102532,"uid":"CsYeNL1xflv3dW9hvb",'
+ '"id.orig_h":"10.0.2.15","id.orig_p":59393,'
+ '"id.resp_h":"216.58.201.98","id.resp_p":443,'
+ '"proto":"udp","duration":0.5936019999999758,'
+ '"orig_bytes":5219,"resp_bytes":5685,"conn_state":"SF",'
+ '"missed_bytes":0,"history":"Dd",'
+ '"orig_pkts":9,"orig_ip_bytes":5471,"resp_pkts":10,'
+ '"resp_ip_bytes":5965}',
),
(
"suricata",
- '{"timestamp":"2021-06-06T15:57:37.272281+0200","flow_id":2054715089912378,"event_type":"flow",'
- '"src_ip":"193.46.255.92","src_port":49569,"dest_ip":"192.168.1.129","dest_port":8014,'
- '"proto":"TCP","flow":{"pkts_toserver":2,"pkts_toclient":2,"bytes_toserver":120,"bytes_toclient":120,"start":"2021-06-07T15:45:48.950842+0200","end":"2021-06-07T15:45:48.951095+0200","age":0,"state":"closed","reason":"shutdown","alerted":false},"tcp":{"tcp_flags":"16","tcp_flags_ts":"02","tcp_flags_tc":"14","syn":true,"rst":true,"ack":true,"state":"closed"},"host":"stratosphere.org"}',
+ '{"timestamp":"2021-06-06T15:57:37.272281+0200",'
+ '"flow_id":2054715089912378,"event_type":"flow",'
+ '"src_ip":"193.46.255.92","src_port":49569,'
+ '"dest_ip":"192.168.1.129","dest_port":8014,'
+ '"proto":"TCP","flow":{"pkts_toserver":2,"pkts_toclient":2,'
+ '"bytes_toserver":120,"bytes_toclient":120,'
+ '"start":"2021-06-07T15:45:48.950842+0200",'
+ '"end":"2021-06-07T15:45:48.951095+0200",'
+ '"age":0,"state":"closed","reason":"shutdown",'
+ '"alerted":false},"tcp":{"tcp_flags":"16",'
+ '"tcp_flags_ts":"02","tcp_flags_tc":"14","syn":true,'
+ '"rst":true,"ack":true,"state":"closed"},"host":"stratosphere.org"}',
),
(
"argus",
- "2019/04/05 16:15:09.194268,0.031142,udp,10.8.0.69,8278, <->,8.8.8.8,53,CON,0,0,2,186,64,1,",
+ "2019/04/05 16:15:09.194268,0.031142,udp,10.8.0.69,8278, "
+ "<->,8.8.8.8,53,CON,0,0,2,186,64,1,",
),
],
)
-def test_read_from_stdin(line_type: str, line: str, mock_db):
- # slips supports reading zeek json conn.log only using stdin,
- # tabs aren't supported
+def test_read_from_stdin(line_type: str, line: str):
input = ModuleFactory().create_input_obj(
line_type,
"stdin",
- mock_db,
line_type=line_type,
)
with patch.object(input, "stdin", return_value=[line, "done\n"]):
- # this function will give the line to profiler
assert input.read_from_stdin()
line_sent: dict = input.profiler_queue.get()
- # in case it's a zeek line, it gets sent as a dict
expected_received_line = (
json.loads(line) if line_type == "zeek" else line
)
assert line_sent["line"]["data"] == expected_received_line
assert line_sent["line"]["line_type"] == line_type
assert line_sent["input_type"] == "stdin"
+
+
+@pytest.mark.parametrize(
+ "line, input_type, expected_line, expected_input_type",
+ [
+ # Testcase 1: Normal Zeek line
+ (
+ {"type": "zeek", "data": {"ts": 12345, "uid": "abcdef"}},
+ "pcap",
+ {"type": "zeek", "data": {"ts": 12345, "uid": "abcdef"}},
+ "pcap",
+ ),
+ # Testcase 2: Different line type
+ (
+ {
+ "type": "suricata",
+ "data": {
+ "timestamp": "2023-04-19T12:00:00.000000",
+ "flow_id": 12345,
+ },
+ },
+ "suricata",
+ {
+ "type": "suricata",
+ "data": {
+ "timestamp": "2023-04-19T12:00:00.000000",
+ "flow_id": 12345,
+ },
+ },
+ "suricata",
+ ),
+ ],
+)
+def test_give_profiler(line, input_type, expected_line, expected_input_type):
+ """Test that the give_profiler function correctly sends the given line to
+ the profiler queue."""
+ input_process = ModuleFactory().create_input_obj("", input_type)
+ input_process.total_flows = (
+ 1000 if expected_line.get("total_flows") else None
+ )
+ input_process.give_profiler(line)
+ line_sent = input_process.profiler_queue.get()
+ assert line_sent["line"] == expected_line
+ assert line_sent["input_type"] == expected_input_type
+
+
+@pytest.mark.parametrize(
+ "filepath, expected_result",
+ [ # Testcase 1: Supported file
+ ("path/to/conn.log", False),
+ # Testcase 2: Supported file
+ ("path/to/dns.log", False),
+ # Testcase 3: Supported file
+ ("path/to/http.log", False),
+ # Testcase 4: Supported file
+ ("path/to/ssl.log", False),
+ # Testcase 5: Supported file
+ ("path/to/ssh.log", False),
+ # Testcase 6: Supported file
+ ("path/to/dhcp.log", False),
+ # Testcase 7: Supported file
+ ("path/to/ftp.log", False),
+ # Testcase 8: Supported file
+ ("path/to/smtp.log", False),
+ # Testcase 9: Supported file
+ ("path/to/tunnel.log", False),
+ # Testcase 10: Supported file
+ ("path/to/notice.log", False),
+ # Testcase 11: Supported file
+ ("path/to/files.log", False),
+ # Testcase 12: Supported file
+ ("path/to/arp.log", False),
+ # Testcase 13: Supported file
+ ("path/to/software.log", False),
+ # Testcase 14: Supported file
+ ("path/to/weird.log", False),
+ # Testcase 15: Unsupported file
+ ("path/to/unsupported.log", True),
+ ],
+)
+def test_is_ignored_file(filepath, expected_result):
+ """
+ Test that the is_ignored_file method correctly
+ identifies ignored Zeek log files.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ assert input_process.is_ignored_file(filepath) == expected_result
+
+
+def test_get_file_handle_existing_file():
+ """
+ Test that the get_file_handle method correctly
+ returns the file handle for an existing file.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ filename = "test_file.log"
+ with open(filename, "w") as f:
+ f.write("test content")
+
+ file_handle = input_process.get_file_handle(filename)
+
+ assert file_handle is not False
+ assert file_handle.name == filename
+ os.remove(filename)
+
+
+def test_shutdown_gracefully_all_components_active():
+ """
+ Test shutdown_gracefully when all components (open files, zeek, remover thread) are active.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ input_process.stop_observer = MagicMock(return_value=True)
+ input_process.stop_queues = MagicMock(return_value=True)
+ input_process.remover_thread = MagicMock()
+ input_process.remover_thread.start()
+ input_process.zeek_thread = MagicMock()
+ input_process.zeek_thread.start()
+ input_process.open_file_handlers = {"test_file.log": MagicMock()}
+ input_process.zeek_pid = os.getpid()
+
+ with patch("os.kill") as mock_kill:
+ assert input_process.shutdown_gracefully() is True
+ mock_kill.assert_called_once_with(
+ input_process.zeek_pid, signal.SIGKILL
+ )
+ assert input_process.open_file_handlers["test_file.log"].close.called
+
+
+def test_shutdown_gracefully_no_open_files():
+ """
+ Test shutdown_gracefully when there are no open files.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ input_process.stop_observer = MagicMock(return_value=True)
+ input_process.stop_queues = MagicMock(return_value=True)
+ input_process.remover_thread = MagicMock()
+ input_process.remover_thread.start()
+ input_process.zeek_thread = MagicMock()
+ input_process.zeek_thread.start()
+ input_process.open_file_handlers = {}
+ input_process.zeek_pid = os.getpid()
+
+ with patch("os.kill") as mock_kill:
+ assert input_process.shutdown_gracefully() is True
+ mock_kill.assert_called_once_with(
+ input_process.zeek_pid, signal.SIGKILL
+ )
+
+
+def test_shutdown_gracefully_zeek_not_running():
+ """
+ Test shutdown_gracefully when Zeek is not running.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ input_process.stop_observer = MagicMock(return_value=True)
+ input_process.stop_queues = MagicMock(return_value=True)
+ input_process.remover_thread = MagicMock()
+ input_process.remover_thread.start()
+ input_process.open_file_handlers = {"test_file.log": MagicMock()}
+ input_process.zeek_pid = os.getpid()
+
+ with patch("os.kill") as mock_kill:
+ assert input_process.shutdown_gracefully() is True
+ mock_kill.assert_called_once_with(
+ input_process.zeek_pid, signal.SIGKILL
+ )
+ assert input_process.open_file_handlers["test_file.log"].close.called
+
+
+def test_shutdown_gracefully_remover_thread_not_running():
+ """
+ Test shutdown_gracefully when the remover thread is not running.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ input_process.stop_observer = MagicMock(return_value=True)
+ input_process.stop_queues = MagicMock(return_value=True)
+ input_process.zeek_thread = MagicMock()
+ input_process.zeek_thread.start()
+ input_process.open_file_handlers = {"test_file.log": MagicMock()}
+ input_process.zeek_pid = os.getpid()
+
+ with patch("os.kill") as mock_kill:
+ assert input_process.shutdown_gracefully() is True
+ mock_kill.assert_called_once_with(
+ input_process.zeek_pid, signal.SIGKILL
+ )
+ assert input_process.open_file_handlers["test_file.log"].close.called
+
+
+def test_close_all_handles():
+ """Test that the close_all_handles method closes all open file handles."""
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ mock_handle1 = MagicMock()
+ mock_handle2 = MagicMock()
+ input_process.open_file_handlers = {
+ "file1": mock_handle1,
+ "file2": mock_handle2,
+ }
+
+ input_process.close_all_handles()
+
+ mock_handle1.close.assert_called_once()
+ mock_handle2.close.assert_called_once()
+
+
+def test_shutdown_gracefully_no_zeek_pid():
+ """
+ Test shutdown_gracefully when the Zeek PID is not set.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ input_process.stop_observer = MagicMock(return_value=True)
+ input_process.stop_queues = MagicMock(return_value=True)
+ input_process.remover_thread = MagicMock()
+ input_process.remover_thread.start()
+ input_process.zeek_thread = MagicMock()
+ input_process.zeek_thread.start()
+ input_process.open_file_handlers = {"test_file.log": MagicMock()}
+
+ with patch("os.kill") as mock_kill:
+ assert input_process.shutdown_gracefully() is True
+ mock_kill.assert_not_called()
+ assert input_process.open_file_handlers["test_file.log"].close.called
+
+
+def test_get_file_handle_non_existing_file():
+ """
+ Test that the get_file_handle method returns False for a non-existing file.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ filename = "non_existing_file.log"
+ file_handle = input_process.get_file_handle(filename)
+ assert file_handle is False
+
+
+@pytest.mark.parametrize(
+ "data, expected_chunks",
+ [ # Testcase 1: String length is multiple of chunk size
+ (b"This is a test string.", [b"This is a", b" test str", b"ing."]),
+ # Testcase 2: String length is less than chunk size
+ (b"Hello", [b"Hello"]),
+ # Testcase 3: String length is more than chunk size
+ (
+ b"This is a longer string that exceeds the chunk size.",
+ [
+ b"This is a longer ",
+ b"string that exceed",
+ b"s the chunk size.",
+ ],
+ ),
+ ],
+)
+def test__make_gen(data, expected_chunks):
+ """
+ Test that the _make_gen function yields chunks of data from a file.
+ """
+ input_process = ModuleFactory().create_input_obj("", "zeek_log_file")
+ reader = MagicMock(side_effect=[*expected_chunks, b""])
+ gen = input_process._make_gen(reader)
+ for expected_chunk in expected_chunks:
+ assert next(gen) == expected_chunk
diff --git a/tests/test_ip_info.py b/tests/test_ip_info.py
index fe51e5940..c973a776d 100644
--- a/tests/test_ip_info.py
+++ b/tests/test_ip_info.py
@@ -2,66 +2,672 @@
from tests.module_factory import ModuleFactory
import maxminddb
+import pytest
+from unittest.mock import Mock, patch
+import json
+import requests
+import socket
+import subprocess
+from slips_files.core.evidence_structure.evidence import (
+ ThreatLevel,
+ Evidence,
+ Proto,
+ EvidenceType,
+ IoCType,
+ Direction,
+ IDEACategory,
+ Tag,
+)
-# ASN unit tests
-def test_get_asn_info_from_geolite(mock_db):
- """
- geolite is an offline db
- """
- ASN_info = ModuleFactory().create_asn_obj(mock_db)
- # check an ip that we know is in the db
- expected_asn_info = {"asn": {"number": "AS7018", "org": "ATT-INTERNET4"}}
- assert (
- ASN_info.get_asn_info_from_geolite("108.200.116.255")
- == expected_asn_info
+@pytest.mark.parametrize(
+ "ip_address, expected_geocountry",
+ [ # Testcase 1: Valid IP address
+ ("153.107.41.230", {"geocountry": "Australia"}),
+ # Testcase 2: Private IP address
+ ("192.168.1.1", {"geocountry": "Private"}),
+ # Testcase 3: Private IPv6 address
+ ("2001:db8::1", {"geocountry": "Private"}),
+ # Testcase 4: IP address not found in database
+ ("23.188.195.255", {"geocountry": "Unknown"}),
+ ],
+)
+def test_get_geocountry(ip_address, expected_geocountry):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_info.country_db = maxminddb.open_database(
+ "databases/GeoLite2-Country.mmdb"
)
- # test asn info not found in geolite
- assert ASN_info.get_asn_info_from_geolite("0.0.0.0") == {}
+ assert ip_info.get_geocountry(ip_address) == expected_geocountry
-# def test_cache_ip_range(mock_db):
-# # Patch the database object creation before it is instantiated
-# ASN_info = ModuleFactory().create_asn_obj(mock_db)
-# assert ASN_info.cache_ip_range("8.8.8.8") == {
-# "asn": {"number": "AS15169", "org": "GOOGLE, US"}
-# }
-#
+def test_get_vendor_from_database(mocker):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ mac_addr = "08:00:27:7f:09:e1"
+ profileid = "profile_10.0.2.15"
+ db_vendor = "Database Vendor"
+ ip_info.db.get_mac_vendor_from_profile.return_value = db_vendor
-# GEOIP unit tests
-def test_get_geocountry(mock_db):
- ip_info = ModuleFactory().create_ip_info_obj(mock_db)
+ result = ip_info.get_vendor(mac_addr, profileid)
+ expected_result = True
+ assert result == expected_result
+ (ip_info.db.get_mac_vendor_from_profile.assert_called_once_with(profileid))
+ mocker.patch.object(ip_info, "get_vendor_offline").assert_not_called()
+ mocker.patch.object(ip_info, "get_vendor_online").assert_not_called()
- # open the db we'll be using for this test
- # ip_info.wait_for_dbs()
- ip_info.country_db = maxminddb.open_database(
- "databases/GeoLite2-Country.mmdb"
+
+def test_get_vendor_from_offline(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ mac_addr = "08:00:27:7f:09:e2"
+ profileid = "profile_10.0.2.16"
+ offline_vendor = "Offline Vendor"
+
+ ip_info.db.get_mac_vendor_from_profile.return_value = None
+ mocker.patch.object(
+ ip_info, "get_vendor_offline", return_value=offline_vendor
)
- assert ip_info.get_geocountry("153.107.41.230") == {
- "geocountry": "Australia"
- }
- assert ip_info.get_geocountry("23.188.195.255") == {
- "geocountry": "Unknown"
+ result = ip_info.get_vendor(mac_addr, profileid)
+
+ assert result == {"MAC": mac_addr, "Vendor": offline_vendor}
+ (ip_info.db.get_mac_vendor_from_profile.assert_called_once_with(profileid))
+ (ip_info.get_vendor_offline.assert_called_once_with(mac_addr, profileid))
+ mocker.patch.object(ip_info, "get_vendor_online").assert_not_called()
+ ip_info.db.set_mac_vendor_to_profile.assert_called_once_with(
+ profileid, mac_addr, offline_vendor
+ )
+
+
+def test_get_vendor_from_online(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ mac_addr = "08:00:27:7f:09:e3"
+ profileid = "profile_10.0.2.17"
+ online_vendor = "Online Vendor"
+
+ ip_info.db.get_mac_vendor_from_profile.return_value = None
+ mocker.patch.object(ip_info, "get_vendor_offline", return_value=None)
+ mocker.patch.object(
+ ip_info, "get_vendor_online", return_value=online_vendor
+ )
+
+ result = ip_info.get_vendor(mac_addr, profileid)
+
+ assert result == {"MAC": mac_addr, "Vendor": online_vendor}
+ (ip_info.db.get_mac_vendor_from_profile.assert_called_once_with(profileid))
+ (ip_info.get_vendor_offline.assert_called_once_with(mac_addr, profileid))
+ (ip_info.get_vendor_online.assert_called_once_with(mac_addr))
+ ip_info.db.set_mac_vendor_to_profile.assert_called_once_with(
+ profileid, mac_addr, online_vendor
+ )
+
+
+def test_get_vendor_not_found(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ mac_addr = "08:00:27:7f:09:e4"
+ profileid = "profile_10.0.2.18"
+
+ ip_info.db.get_mac_vendor_from_profile.return_value = None
+ mocker.patch.object(ip_info, "get_vendor_offline", return_value=None)
+ mocker.patch.object(ip_info, "get_vendor_online", return_value=None)
+
+ result = ip_info.get_vendor(mac_addr, profileid)
+
+ assert result == {"MAC": mac_addr, "Vendor": "Unknown"}
+ (ip_info.db.get_mac_vendor_from_profile.assert_called_once_with(profileid))
+ (ip_info.get_vendor_offline.assert_called_once_with(mac_addr, profileid))
+ ip_info.get_vendor_online.assert_called_once_with(mac_addr)
+ ip_info.db.set_mac_vendor_to_profile.assert_not_called()
+
+
+def test_get_vendor_broadcast_mac(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ mac_addr = "ff:ff:ff:ff:ff:ff"
+ profileid = "profile_10.0.2.19"
+
+ result = ip_info.get_vendor(mac_addr, profileid)
+
+ assert result is False
+ ip_info.db.get_mac_vendor_from_profile.assert_not_called()
+ mocker.patch.object(ip_info, "get_vendor_offline").assert_not_called()
+ mocker.patch.object(ip_info, "get_vendor_online").assert_not_called()
+ ip_info.db.set_mac_vendor_to_profile.assert_not_called()
+
+
+def test_get_age_no_creation_date():
+ domain = "example.com"
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_info.db.get_domain_data.return_value = None
+ with patch("whois.query") as mock_whois:
+ mock_whois.return_value = Mock(creation_date=None)
+
+ result = ip_info.get_age(domain)
+
+ assert result is False
+ ip_info.db.set_info_for_domains.assert_not_called()
+
+
+def test_get_age_invalid_tld():
+ domain = "example.invalid"
+ ip_info = ModuleFactory().create_ip_info_obj()
+ result = ip_info.get_age(domain)
+
+ assert result is False
+ ip_info.db.get_domain_data.assert_not_called()
+ ip_info.db.set_info_for_domains.assert_not_called()
+
+
+def test_get_age_cached_data():
+ domain = "cached.com"
+ cached_age = 100
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_info.db.get_domain_data.return_value = {"Age": cached_age}
+
+ result = ip_info.get_age(domain)
+
+ assert result is False
+ ip_info.db.set_info_for_domains.assert_not_called()
+
+
+@pytest.mark.parametrize("domain", ["example.arpa", "example.local"])
+def test_get_age_special_domains(domain):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ result = ip_info.get_age(domain)
+
+ assert result is False
+ ip_info.db.get_domain_data.assert_not_called()
+ ip_info.db.set_info_for_domains.assert_not_called()
+
+
+def test_get_rdns_valid_ip():
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_address = "8.8.8.8"
+ expected_rdns = {"reverse_dns": "dns.google"}
+
+ with patch("socket.gethostbyaddr") as mock_gethostbyaddr, patch(
+ "socket.inet_pton"
+ ) as mock_inet_pton:
+
+ mock_gethostbyaddr.return_value = ("dns.google", [], ["8.8.8.8"])
+ mock_inet_pton.side_effect = socket.error
+
+ result = ip_info.get_rdns(ip_address)
+ assert result == expected_rdns
+
+ mock_gethostbyaddr.assert_called_once_with(ip_address)
+ mock_inet_pton.assert_called_once()
+ ip_info.db.set_ip_info.assert_called_once_with(
+ ip_address, expected_rdns
+ )
+
+
+def test_get_rdns_invalid_ip():
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_address = "invalid_ip"
+
+ with patch("socket.gethostbyaddr") as mock_gethostbyaddr:
+ mock_gethostbyaddr.side_effect = socket.gaierror
+
+ result = ip_info.get_rdns(ip_address)
+ assert result is False
+
+ mock_gethostbyaddr.assert_called_once_with(ip_address)
+
+
+def test_get_rdns_no_reverse_dns():
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_address = "1.1.1.1"
+
+ with patch("socket.gethostbyaddr") as mock_gethostbyaddr, patch(
+ "socket.inet_pton"
+ ) as mock_inet_pton:
+
+ mock_gethostbyaddr.return_value = ("1.1.1.1", [], ["1.1.1.1"])
+ mock_inet_pton.return_value = b"\x01\x01\x01\x01"
+
+ result = ip_info.get_rdns(ip_address)
+ assert result is False
+
+ mock_gethostbyaddr.assert_called_once_with(ip_address)
+ mock_inet_pton.assert_called_once()
+
+
+def test_get_rdns_localhost():
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_address = "127.0.0.1"
+ expected_rdns = {"reverse_dns": "localhost.localdomain"}
+
+ with patch("socket.gethostbyaddr") as mock_gethostbyaddr, patch(
+ "socket.inet_pton"
+ ) as mock_inet_pton:
+
+ mock_gethostbyaddr.return_value = (
+ "localhost.localdomain",
+ [],
+ ["127.0.0.1"],
+ )
+ mock_inet_pton.side_effect = socket.error
+
+ result = ip_info.get_rdns(ip_address)
+ assert result == expected_rdns
+
+ mock_gethostbyaddr.assert_called_once_with(ip_address)
+ mock_inet_pton.assert_called_once()
+ ip_info.db.set_ip_info.assert_called_once_with(
+ ip_address, expected_rdns
+ )
+
+
+def test_set_evidence_malicious_jarm_hash(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ flow = {
+ "dport": 443,
+ "daddr": "192.168.1.100",
+ "saddr": "192.168.1.10",
+ "starttime": 1625097600,
+ "proto": "tcp",
+ "uid": "CuTCcR1Bbp9Je7LVqa",
}
+ twid = "timewindow1"
+ ip_info.db.get_port_info.return_value = "https"
+ ip_info.db.get_ip_identification.return_value = "Known malicious server"
+ mock_set_evidence = mocker.patch.object(ip_info.db, "set_evidence")
+ ip_info.set_evidence_malicious_jarm_hash(flow, twid)
+ assert mock_set_evidence.call_count == 2
+ dst_evidence = mock_set_evidence.call_args_list[0][0][0]
+ assert isinstance(dst_evidence, Evidence)
+ assert dst_evidence.evidence_type == EvidenceType.MALICIOUS_JARM
+ assert dst_evidence.attacker.direction == Direction.DST
+ assert dst_evidence.attacker.attacker_type == IoCType.IP
+ assert dst_evidence.attacker.value == "192.168.1.100"
+ assert dst_evidence.threat_level == ThreatLevel.MEDIUM
+ assert dst_evidence.confidence == 0.7
+ assert "192.168.1.100" in dst_evidence.description
+ assert "port: 443/tcp (HTTPS)" in dst_evidence.description
+ assert "Known malicious server" in dst_evidence.description
+ assert dst_evidence.profile.ip == "192.168.1.100"
+ assert dst_evidence.timewindow.number == 1
+ assert dst_evidence.uid == ["CuTCcR1Bbp9Je7LVqa"]
+ assert dst_evidence.timestamp == 1625097600
+ assert dst_evidence.category == IDEACategory.ANOMALY_TRAFFIC
+ assert dst_evidence.proto == Proto.TCP
+ assert dst_evidence.port == 443
+ assert dst_evidence.source_target_tag == Tag.MALWARE
+ src_evidence = mock_set_evidence.call_args_list[1][0][0]
+ assert isinstance(src_evidence, Evidence)
+ assert src_evidence.evidence_type == EvidenceType.MALICIOUS_JARM
+ assert src_evidence.attacker.direction == Direction.SRC
+ assert src_evidence.attacker.attacker_type == IoCType.IP
+ assert src_evidence.attacker.value == "192.168.1.10"
+ assert src_evidence.threat_level == ThreatLevel.LOW
+ assert src_evidence.confidence == 0.7
+ assert "192.168.1.100" in src_evidence.description
+ assert "port: 443/tcp (HTTPS)" in src_evidence.description
+ assert "Known malicious server" in src_evidence.description
+ assert src_evidence.profile.ip == "192.168.1.10"
+ assert src_evidence.timewindow.number == 1
+ assert src_evidence.uid == ["CuTCcR1Bbp9Je7LVqa"]
+ assert src_evidence.timestamp == 1625097600
+ assert src_evidence.category == IDEACategory.ANOMALY_TRAFFIC
+ assert src_evidence.proto == Proto.TCP
+ assert src_evidence.port == 443
+ assert src_evidence.source_target_tag == Tag.MALWARE
-def test_get_vendor(mocker, mock_db):
- # make sure the mac db is download so that wai_for_dbs doesn't wait forever :'D
- ip_info = ModuleFactory().create_ip_info_obj(mock_db)
- profileid = "profile_10.0.2.15"
- mac_addr = "08:00:27:7f:09:e1"
+@pytest.mark.parametrize(
+ "status_code, response_text, expected_vendor, " "mock_side_effect",
+ [
+ (
+ 200,
+ "Valid Vendor",
+ "Valid Vendor",
+ None,
+ ),
+ (
+ 204,
+ "",
+ False,
+ None,
+ ),
+ (
+ None,
+ None,
+ False,
+ requests.exceptions.ReadTimeout(),
+ ),
+ (
+ None,
+ None,
+ False,
+ requests.exceptions.ConnectionError(),
+ ),
+ (
+ None,
+ None,
+ False,
+ json.decoder.JSONDecodeError("Msg", "Doc", 0),
+ ),
+ ],
+)
+def test_get_vendor_online(
+ mocker,
+ status_code,
+ response_text,
+ expected_vendor,
+ mock_side_effect,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ mock_response = Mock(status_code=status_code, text=response_text)
+ mock_requests = mocker.patch(
+ "requests.get",
+ return_value=mock_response,
+ side_effect=mock_side_effect,
+ )
+
+ vendor = ip_info.get_vendor_online("00:11:22:33:44:55")
+
+ assert vendor == expected_vendor
+ mock_requests.assert_called_once_with(
+ "https://api.macvendors.com/00:11:22:33:44:55", timeout=5
+ )
+
+
+def test_shutdown_gracefully(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+
+ mock_asn_db = mocker.Mock()
+ mock_country_db = mocker.Mock()
+ mock_mac_db = mocker.Mock()
+
+ ip_info.asn_db = mock_asn_db
+ ip_info.country_db = mock_country_db
+ ip_info.mac_db = mock_mac_db
+
+ ip_info.shutdown_gracefully()
+ mock_asn_db.close.assert_called_once()
+ mock_country_db.close.assert_called_once()
+ mock_mac_db.close.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "platform_system, subprocess_output, expected_ip",
+ [
+ # Testcase 1: MacOS (Darwin) with valid output
+ ("Darwin", b"gateway: 192.168.1.1", "192.168.1.1"),
+ # Testcase 2: Linux with valid output
+ ("Linux", b"default via 10.0.0.1 dev eth0", "10.0.0.1"),
+ # Testcase 3: MacOS with invalid output
+ ("Darwin", b"No default gateway", False),
+ # Testcase 4: Unsupported OS
+ ("Windows", b"", False),
+ ],
+)
+def test_get_gateway_ip(
+ mocker, platform_system, subprocess_output, expected_ip
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ mocker.patch("platform.system", return_value=platform_system)
+ mocker.patch("subprocess.check_output", return_value=subprocess_output)
+ mocker.patch("sys.argv", ["-i", "eth0"])
+ result = ip_info.get_gateway_ip()
+ assert result == expected_ip
+
+
+@pytest.mark.parametrize(
+ "ip, is_multicast, cached_info, expected_calls",
+ [
+ # Testcase 1: Valid IP, not multicast, no cached info
+ (
+ "192.168.1.1",
+ False,
+ {},
+ {"get_geocountry": 1, "get_asn": 1, "get_rdns": 1},
+ ),
+ # Testcase 2: Valid IP, multicast
+ ("224.0.0.1", True, {}, {}),
+ # Testcase 3: Valid IP, not multicast,
+ # with cached geocountry
+ (
+ "10.0.0.1",
+ False,
+ {"geocountry": "USA"},
+ {"get_asn": 1, "get_rdns": 1},
+ ),
+ ],
+)
+def test_handle_new_ip(mocker, ip, is_multicast, cached_info, expected_calls):
+ ip_info = ModuleFactory().create_ip_info_obj()
+
+ mock_ip_address = mocker.patch("ipaddress.ip_address")
+ mock_ip_address.return_value.is_multicast = is_multicast
+
+ ip_info.db.get_ip_info.return_value = cached_info
+
+ mock_get_geocountry = mocker.patch.object(ip_info, "get_geocountry")
+ mock_get_asn = mocker.patch.object(ip_info.asn, "get_asn")
+ mock_get_rdns = mocker.patch.object(ip_info, "get_rdns")
+
+ mocker.patch.object(ip_info.asn, "update_asn", return_value=True)
+ ip_info.handle_new_ip(ip)
+ assert mock_get_geocountry.call_count == expected_calls.get(
+ "get_geocountry", 0
+ )
+ assert mock_get_asn.call_count == expected_calls.get("get_asn", 0)
+ assert mock_get_rdns.call_count == expected_calls.get("get_rdns", 0)
+
+
+def test_check_if_we_have_pending_mac_queries_with_mac_db(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_info.mac_db = Mock()
+ ip_info.pending_mac_queries = Mock()
+ ip_info.pending_mac_queries.empty.side_effect = [False, False, True]
+ ip_info.pending_mac_queries.get.side_effect = [
+ ("00:11:22:33:44:55", "profile_1"),
+ ("AA:BB:CC:DD:EE:FF", "profile_2"),
+ Exception("Empty queue"),
+ ]
+ mock_get_vendor = mocker.patch.object(ip_info, "get_vendor")
+ ip_info.check_if_we_have_pending_mac_queries()
+ assert mock_get_vendor.call_count == 2
+ mock_get_vendor.assert_any_call("00:11:22:33:44:55", "profile_1")
+ mock_get_vendor.assert_any_call("AA:BB:CC:DD:EE:FF", "profile_2")
+
+
+def test_check_if_we_have_pending_mac_queries_empty_queue(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ ip_info.mac_db = Mock()
+ ip_info.pending_mac_queries = Mock()
+ ip_info.pending_mac_queries.empty.return_value = True
+ mock_get_vendor = mocker.patch.object(ip_info, "get_vendor")
+ ip_info.check_if_we_have_pending_mac_queries()
+ mock_get_vendor.assert_not_called()
+
+
+def test_get_gateway_MAC_cached():
+ ip_info = ModuleFactory().create_ip_info_obj()
+ gw_ip = "192.168.1.1"
+ cached_mac = "00:11:22:33:44:55"
+
+ ip_info.db.get_mac_addr_from_profile.return_value = cached_mac
+
+ result = ip_info.get_gateway_MAC(gw_ip)
+
+ assert result == cached_mac
+ ip_info.db.get_mac_addr_from_profile.assert_called_once_with(
+ f"profile_{gw_ip}"
+ )
+ (ip_info.db.set_default_gateway.assert_called_once_with("MAC", cached_mac))
+
+
+def test_get_gateway_MAC_arp_command(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ gw_ip = "172.16.0.1"
+ arp_output = "? (172.16.0.1) at 11:22:33:44:55:66 " "[ether] on eth0"
+ expected_mac = "11:22:33:44:55:66"
+
+ ip_info.db.get_mac_addr_from_profile.return_value = None
+ ip_info.db.is_growing_zeek_dir.return_value = True
+
+ mocker.patch("sys.argv", ["-i", "eth0"])
+
+ mock_subprocess_run = mocker.patch("subprocess.run")
+ mock_subprocess_run.side_effect = [
+ subprocess.CalledProcessError(1, "cmd"),
+ Mock(stdout=arp_output),
+ ]
+
+ result = ip_info.get_gateway_MAC(gw_ip)
- # # mock the online vendor
- # if the vendor isn't found offline, this mocker will run instead of get_vendor_online
- mock_requests = mocker.patch("requests.get")
- mock_requests.return_value.status_code = 200
- mock_requests.return_value.text = "PCS Systemtechnik GmbH"
- mock_db.get_mac_vendor_from_profile.return_value = False
+ assert result == expected_mac
+ assert mock_subprocess_run.call_count == 2
+ mock_subprocess_run.assert_any_call(
+ ["arp", "-an"], capture_output=True, check=True, text=True
+ )
+ (
+ ip_info.db.set_default_gateway.assert_called_once_with(
+ "MAC", expected_mac
+ )
+ )
+
+
+def test_get_gateway_MAC_not_found(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ gw_ip = "192.168.0.1"
+
+ ip_info.db.get_mac_addr_from_profile.return_value = None
+ ip_info.db.is_growing_zeek_dir.return_value = True
+
+ mocker.patch("sys.argv", ["-i", "eth0"])
+
+ mock_subprocess_run = mocker.patch("subprocess.run")
+ mock_subprocess_run.side_effect = subprocess.CalledProcessError(1, "cmd")
+
+ result = ip_info.get_gateway_MAC(gw_ip)
+
+ assert result is None
+ assert mock_subprocess_run.call_count == 2
+ ip_info.db.set_default_gateway.assert_not_called()
+
+
+def test_get_gateway_MAC_not_on_interface(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ gw_ip = "192.168.1.1"
+
+ ip_info.db.get_mac_addr_from_profile.return_value = None
+ ip_info.db.is_growing_zeek_dir.return_value = False
+
+ mocker.patch("sys.argv", [])
+
+ mock_subprocess_run = mocker.patch("subprocess.run")
+
+ result = ip_info.get_gateway_MAC(gw_ip)
+
+ assert result is None
+ mock_subprocess_run.assert_not_called()
+ ip_info.db.set_default_gateway.assert_not_called()
+
+
+def test_get_gateway_MAC_ip_command_success(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ gw_ip = "10.0.0.1"
+ ip_output = "10.0.0.1 dev eth0 lladdr aa:bb:cc:dd:ee:ff " "REACHABLE"
+ expected_mac = "aa:bb:cc:dd:ee:ff"
+
+ ip_info.db.get_mac_addr_from_profile.return_value = None
+ ip_info.db.is_growing_zeek_dir.return_value = True
+
+ mocker.patch("sys.argv", ["-i", "eth0"])
+
+ mock_subprocess_run = mocker.patch("subprocess.run")
+ mock_subprocess_run.return_value = Mock(stdout=ip_output)
+
+ result = ip_info.get_gateway_MAC(gw_ip)
+
+ assert result == expected_mac
+ mock_subprocess_run.assert_called_once_with(
+ ["ip", "neigh", "show", gw_ip],
+ capture_output=True,
+ check=True,
+ text=True,
+ )
+ (
+ ip_info.db.set_default_gateway.assert_called_once_with(
+ "MAC", expected_mac
+ )
+ )
+
+
+def test_get_gateway_MAC_ip_command_failure(
+ mocker,
+):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ gw_ip = "172.16.0.1"
+
+ ip_info.db.get_mac_addr_from_profile.return_value = None
+ ip_info.db.is_growing_zeek_dir.return_value = True
+
+ mocker.patch("sys.argv", ["-i", "eth0"])
+
+ mock_subprocess_run = mocker.patch("subprocess.run")
+ mock_subprocess_run.side_effect = [
+ subprocess.CalledProcessError(1, "cmd"),
+ subprocess.CalledProcessError(1, "cmd"),
+ ]
+
+ result = ip_info.get_gateway_MAC(gw_ip)
+
+ assert result is None
+ assert mock_subprocess_run.call_count == 2
+ mock_subprocess_run.assert_any_call(
+ ["ip", "neigh", "show", gw_ip],
+ capture_output=True,
+ check=True,
+ text=True,
+ )
+ mock_subprocess_run.assert_any_call(
+ ["arp", "-an"], capture_output=True, check=True, text=True
+ )
+ ip_info.db.set_default_gateway.assert_not_called()
- # tries to get vendor either online or from our offline db
- mac_info = ip_info.get_vendor(mac_addr, profileid)
- assert mac_info is not False
- assert mac_info["Vendor"].lower() == "Pcs Systemtechnik GmbH".lower()
+@pytest.mark.parametrize(
+ "ip_address, expected_family",
+ [
+ # Testcase 1: IPv4 address
+ ("192.168.1.1", socket.AF_INET),
+ # Testcase 2: IPv6 address
+ ("2001:db8::1", socket.AF_INET6),
+ # Testcase 3: Another IPv4 address
+ ("10.0.0.1", socket.AF_INET),
+ # Testcase 4: Another IPv6 address
+ ("::1", socket.AF_INET6),
+ # Testcase 5: IPv4-mapped IPv6 address
+ ("::ffff:192.0.2.1", socket.AF_INET6),
+ ],
+)
+def test_get_ip_family(ip_address, expected_family):
+ ip_info = ModuleFactory().create_ip_info_obj()
+ assert ip_info.get_ip_family(ip_address) == expected_family
diff --git a/tests/test_leak_detector.py b/tests/test_leak_detector.py
index fa7131e8a..0fe2ce8a7 100644
--- a/tests/test_leak_detector.py
+++ b/tests/test_leak_detector.py
@@ -25,14 +25,14 @@ def test_is_yara_installed(
"""Test that the is_yara_installed method correctly identifies if Yara is installed."""
mock_os_system.return_value = return_code
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
assert leak_detector.is_yara_installed() == expected_result
@patch("os.mkdir")
@patch("shutil.rmtree")
def test_delete_compiled_rules(mock_rmtree, mock_mkdir, mock_db):
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
leak_detector.delete_compiled_rules()
mock_rmtree.assert_called_once_with(leak_detector.compiled_yara_rules_path)
mock_mkdir.assert_called_once_with(leak_detector.compiled_yara_rules_path)
@@ -58,7 +58,7 @@ def test_pre_main(
expected_find_matches_call,
):
"""Tests the pre_main method."""
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
leak_detector.bin_found = yara_installed
leak_detector.compile_and_save_rules = MagicMock(
return_value=compile_rules_success
@@ -70,7 +70,7 @@ def test_pre_main(
def test_main(mock_db):
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
result = leak_detector.main()
assert result == 1
@@ -114,7 +114,7 @@ def test_fix_json_packet(
else lambda x: mock_json_loads_return
)
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
result = leak_detector.fix_json_packet(input_json)
assert result == expected_output
@@ -155,7 +155,7 @@ def test_find_matches(
):
"""Tests the find_matches method of LeakDetector."""
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
mock_listdir.return_value = listdir_return
mock_popen.return_value.communicate.return_value = popen_communicate_return
@@ -220,7 +220,7 @@ def test_get_packet_info(
):
"""Tests the get_packet_info method of LeakDetector."""
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
leak_detector.fix_json_packet = MagicMock()
with patch(
"builtins.open", mock_open(read_data=pcap_data)
@@ -295,7 +295,7 @@ def test_set_evidence_yara_match(
db_get_tw_of_ts_return,
expected_call_count,
):
- leak_detector = ModuleFactory().create_leak_detector_obj(mock_db)
+ leak_detector = ModuleFactory().create_leak_detector_obj()
leak_detector.get_packet_info = MagicMock(
return_value=get_packet_info_return
)
diff --git a/tests/test_main.py b/tests/test_main.py
index 9e368c3a4..dd3f1b281 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -16,9 +16,9 @@
],
)
def test_handle_flows_from_stdin_valid_input(
- input_information, mock_db, expected_input_type, expected_line_type
+ input_information, expected_input_type, expected_line_type
):
- main = ModuleFactory().create_main_obj(mock_db)
+ main = ModuleFactory().create_main_obj()
main.mode = "interactive"
input_type, line_type = main.handle_flows_from_stdin(input_information)
@@ -26,8 +26,8 @@ def test_handle_flows_from_stdin_valid_input(
assert line_type == expected_line_type
-def test_handle_flows_from_stdin_invalid_input(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_handle_flows_from_stdin_invalid_input():
+ main = ModuleFactory().create_main_obj()
main.mode = "interactive"
with pytest.raises(SystemExit):
@@ -50,8 +50,8 @@ def test_handle_flows_from_stdin_invalid_input(mock_db):
({"input_module": False, "growing": False}, "nfdump", False),
],
)
-def test_is_total_flows_unknown(args, mock_db, input_type, expected_result):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_is_total_flows_unknown(args, input_type, expected_result):
+ main = ModuleFactory().create_main_obj()
main.args = MagicMock(**args)
main.input_type = input_type
@@ -67,12 +67,11 @@ def test_is_total_flows_unknown(args, mock_db, input_type, expected_result):
],
)
def test_cpu_profiler_release_enabled(
- mock_db,
cpu_profiler_multiprocess,
expected_stop_calls,
expected_print_calls,
):
- main = ModuleFactory().create_main_obj(mock_db)
+ main = ModuleFactory().create_main_obj()
main.cpuProfilerEnabled = True
main.cpuProfilerMultiprocess = cpu_profiler_multiprocess
@@ -84,15 +83,15 @@ def test_cpu_profiler_release_enabled(
assert main.cpuProfiler.print.call_count == expected_print_calls
-def test_cpu_profiler_release_disabled(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_cpu_profiler_release_disabled():
+ main = ModuleFactory().create_main_obj()
main.cpuProfilerEnabled = False
main.cpu_profiler_release()
assert not hasattr(main, "memoryProfiler")
-def test_memory_profiler_release_enabled(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_memory_profiler_release_enabled():
+ main = ModuleFactory().create_main_obj()
main.memoryProfilerEnabled = True
main.memoryProfiler = MagicMock()
@@ -101,8 +100,8 @@ def test_memory_profiler_release_enabled(mock_db):
main.memoryProfiler.stop.assert_called_once()
-def test_memory_profiler_release_disabled(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_memory_profiler_release_disabled():
+ main = ModuleFactory().create_main_obj()
main.memoryProfilerEnabled = False
main.memory_profiler_release()
@@ -120,8 +119,8 @@ def test_memory_profiler_release_disabled(mock_db):
("daemonized", 10, 0),
],
)
-def test_update_stats(mock_db, mode, time_diff, expected_calls):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_update_stats(mode, time_diff, expected_calls):
+ main = ModuleFactory().create_main_obj()
main.mode = mode
main.last_updated_stats_time = datetime.now() - timedelta(
seconds=time_diff
@@ -149,14 +148,13 @@ def test_update_stats(mock_db, mode, time_diff, expected_calls):
],
)
def test_update_host_ip(
- mock_db,
is_interface,
host_ip,
modified_profiles,
expected_calls,
expected_result,
):
- main = ModuleFactory().create_main_obj(mock_db)
+ main = ModuleFactory().create_main_obj()
main.is_interface = is_interface
main.db = MagicMock()
main.db.set_host_ip = MagicMock()
@@ -183,7 +181,6 @@ def test_update_host_ip(
],
)
def test_setup_print_levels(
- mock_db,
args_verbose,
conf_verbose,
args_debug,
@@ -191,7 +188,7 @@ def test_setup_print_levels(
expected_verbose,
expected_debug,
):
- main = ModuleFactory().create_main_obj(mock_db)
+ main = ModuleFactory().create_main_obj()
main.args = MagicMock(verbose=args_verbose, debug=args_debug)
main.conf = MagicMock()
main.conf.verbose.return_value = conf_verbose
@@ -211,8 +208,8 @@ def test_setup_print_levels(
("daemonized", MagicMock(), "daemonized", MagicMock),
],
)
-def test_set_mode(mock_db, mode, daemon, expected_mode, expected_daemon_type):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_set_mode(mode, daemon, expected_mode, expected_daemon_type):
+ main = ModuleFactory().create_main_obj()
main.set_mode(mode, daemon)
assert main.mode == expected_mode
assert isinstance(main.daemon, expected_daemon_type)
@@ -226,8 +223,8 @@ def test_set_mode(mock_db, mode, daemon, expected_mode, expected_daemon_type):
("Another log", "Another log\n"),
],
)
-def test_log(mock_db, txt, expected_content):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_log(txt, expected_content):
+ main = ModuleFactory().create_main_obj()
main.daemon = MagicMock()
main.daemon.stdout = "test_stdout.log"
@@ -270,9 +267,9 @@ def test_log(mock_db, txt, expected_content):
],
)
def test_print(
- mock_db, text, verbose, debug, log_to_logfiles_only, expected_notification
+ text, verbose, debug, log_to_logfiles_only, expected_notification
):
- main = ModuleFactory().create_main_obj(mock_db)
+ main = ModuleFactory().create_main_obj()
main.name = "Main"
with patch.object(main, "notify_observers") as mock_notify:
@@ -301,10 +298,8 @@ def test_print(
),
],
)
-def test_get_input_file_type(
- mock_db, given_path, cmd_result, expected_input_type
-):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_get_input_file_type(given_path, cmd_result, expected_input_type):
+ main = ModuleFactory().create_main_obj()
with (
patch("subprocess.run") as mock_run,
@@ -332,8 +327,8 @@ def test_get_input_file_type(
("path/to/input.pcap", "output/input"),
],
)
-def test_save_the_db(mock_db, input_information, expected_filepath):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_save_the_db(input_information, expected_filepath):
+ main = ModuleFactory().create_main_obj()
main.input_information = input_information
main.args = MagicMock()
main.args.output = "output"
@@ -355,10 +350,8 @@ def test_save_the_db(mock_db, input_information, expected_filepath):
("binetflow", False, False),
],
)
-def test_was_running_zeek(
- mock_db, input_type, is_growing_zeek_dir, expected_result
-):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_was_running_zeek(input_type, is_growing_zeek_dir, expected_result):
+ main = ModuleFactory().create_main_obj()
main.db = MagicMock()
main.db.get_input_type.return_value = input_type
main.db.is_growing_zeek_dir.return_value = is_growing_zeek_dir
@@ -366,8 +359,8 @@ def test_was_running_zeek(
assert main.was_running_zeek() == expected_result
-def test_delete_zeek_files_enabled(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_delete_zeek_files_enabled():
+ main = ModuleFactory().create_main_obj()
main.conf = MagicMock()
main.conf.delete_zeek_files.return_value = True
main.zeek_dir = "zeek_dir"
@@ -377,8 +370,8 @@ def test_delete_zeek_files_enabled(mock_db):
mock_rmtree.assert_called_once_with("zeek_dir")
-def test_delete_zeek_files_disabled(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_delete_zeek_files_disabled():
+ main = ModuleFactory().create_main_obj()
main.conf = MagicMock()
main.conf.delete_zeek_files.return_value = False
main.zeek_dir = "zeek_dir"
@@ -388,8 +381,8 @@ def test_delete_zeek_files_disabled(mock_db):
mock_rmtree.assert_not_called()
-def test_get_slips_version(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_get_slips_version():
+ main = ModuleFactory().create_main_obj()
version_content = "1.2.3"
with patch(
@@ -401,8 +394,8 @@ def test_get_slips_version(mock_db):
assert result == version_content
-def test_check_zeek_or_bro_zeek_found(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_check_zeek_or_bro_zeek_found():
+ main = ModuleFactory().create_main_obj()
main.input_type = "pcap"
with patch("shutil.which") as mock_which:
@@ -412,8 +405,8 @@ def test_check_zeek_or_bro_zeek_found(mock_db):
assert result == "zeek"
-def test_check_zeek_or_bro_bro_found(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_check_zeek_or_bro_bro_found():
+ main = ModuleFactory().create_main_obj()
main.input_type = "pcap"
with patch("shutil.which") as mock_which:
@@ -423,8 +416,8 @@ def test_check_zeek_or_bro_bro_found(mock_db):
assert result == "bro"
-def test_check_zeek_or_bro_not_needed(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_check_zeek_or_bro_not_needed():
+ main = ModuleFactory().create_main_obj()
main.input_type = "file"
result = main.check_zeek_or_bro()
@@ -432,8 +425,8 @@ def test_check_zeek_or_bro_not_needed(mock_db):
assert result == expected_result
-def test_check_zeek_or_bro_not_found(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_check_zeek_or_bro_not_found():
+ main = ModuleFactory().create_main_obj()
main.input_type = "pcap"
with (
@@ -456,8 +449,8 @@ def test_check_zeek_or_bro_not_found(mock_db):
(False, "zeek_files_inputfile/"),
],
)
-def test_prepare_zeek_output_dir(mock_db, store_in_output, expected_dir):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_prepare_zeek_output_dir(store_in_output, expected_dir):
+ main = ModuleFactory().create_main_obj()
main.input_information = "/path/to/inputfile.pcap"
main.args = Mock()
main.args.output = "output"
@@ -470,8 +463,8 @@ def test_prepare_zeek_output_dir(mock_db, store_in_output, expected_dir):
assert main.zeek_dir == expected_dir
-def test_terminate_slips_interactive(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_terminate_slips_interactive():
+ main = ModuleFactory().create_main_obj()
main.mode = "interactive"
main.conf = MagicMock()
main.conf.get_cpu_profiler_enable.return_value = False
@@ -482,8 +475,8 @@ def test_terminate_slips_interactive(mock_db):
mock_exit.assert_called_once_with(0)
-def test_terminate_slips_daemonized(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_terminate_slips_daemonized():
+ main = ModuleFactory().create_main_obj()
main.mode = "daemonized"
main.daemon = MagicMock()
main.conf = MagicMock()
@@ -496,8 +489,8 @@ def test_terminate_slips_daemonized(mock_db):
mock_exit.assert_called_once_with(0)
-def test_terminate_slips_cpu_profiler_enabled(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_terminate_slips_cpu_profiler_enabled():
+ main = ModuleFactory().create_main_obj()
main.mode = "interactive"
main.conf = MagicMock()
main.conf.get_cpu_profiler_enable.return_value = True
@@ -508,24 +501,8 @@ def test_terminate_slips_cpu_profiler_enabled(mock_db):
mock_exit.assert_not_called()
-def test_print_version_no_git(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
- main.version = "1.0.0"
-
- with (
- patch(
- "slips_files.common.slips_utils.utils.get_branch_info"
- ) as mock_get_branch_info,
- patch("builtins.print") as mock_print,
- patch("slips_files.common.style.green", lambda x: x),
- ):
- mock_get_branch_info.return_value = False
- main.print_version()
- mock_print.assert_called_once_with("Slips. Version 1.0.0")
-
-
-def test_prepare_output_dir_with_o_flag(mock_db):
- main = ModuleFactory().create_main_obj(mock_db)
+def test_prepare_output_dir_with_o_flag():
+ main = ModuleFactory().create_main_obj()
main.args = MagicMock()
main.args.output = "custom_output_dir"
main.args.testing = False
@@ -560,9 +537,9 @@ def test_prepare_output_dir_with_o_flag(mock_db):
],
)
def test_prepare_output_dir_testing_mode(
- mock_db, testing, filename, expected_call_count
+ testing, filename, expected_call_count
):
- main = ModuleFactory().create_main_obj(mock_db)
+ main = ModuleFactory().create_main_obj()
main.args = MagicMock()
main.args.output = "test_output"
main.args.testing = testing
diff --git a/tests/test_markov_chain.py b/tests/test_markov_chain.py
new file mode 100644
index 000000000..53bd34ee5
--- /dev/null
+++ b/tests/test_markov_chain.py
@@ -0,0 +1,125 @@
+import pytest
+from tests.module_factory import ModuleFactory
+from slips_files.common.markov_chains import (
+ maximum_likelihood_probabilities,
+ Matrix,
+)
+import math
+
+
+@pytest.mark.parametrize(
+ "init_vector",
+ [
+ # testcase1: basic two-state vector
+ ({"A": 0.3, "B": 0.7}),
+ # testcase2: three-state vector
+ ({"X": 0.5, "Y": 0.2, "Z": 0.3}),
+ # testcase3: empty vector
+ ({}),
+ ],
+)
+def test_set_init_vector(init_vector):
+ matrix = ModuleFactory().create_markov_chain_obj()
+ matrix.set_init_vector(init_vector)
+ assert hasattr(matrix, "init_vector")
+ assert matrix.init_vector == init_vector
+
+
+@pytest.mark.parametrize(
+ "init_vector",
+ [
+ # testcase1: basic two-state vector
+ ({"A": 0.3, "B": 0.7}),
+ # testcase2: three-state vector
+ ({"X": 0.5, "Y": 0.2, "Z": 0.3}),
+ # testcase3: empty vector
+ ({}),
+ ],
+)
+def test_get_init_vector(init_vector):
+ matrix = ModuleFactory().create_markov_chain_obj()
+ matrix.set_init_vector(init_vector)
+ retrieved_vector = matrix.get_init_vector()
+ assert retrieved_vector == init_vector
+
+
+def test_get_init_vector_before_setting():
+ matrix = ModuleFactory().create_markov_chain_obj()
+ with pytest.raises(AttributeError):
+ matrix.get_init_vector()
+
+
+@pytest.mark.parametrize(
+ "matrix_data, states, expected_prob",
+ [
+ # testcase1: simple chain
+ (
+ {("A", "B"): 0.7, ("B", "A"): 0.3},
+ ["A", "B", "A"],
+ math.log(0.7) + math.log(0.3),
+ ),
+ # testcase2: longer chain
+ (
+ {("X", "Y"): 0.5, ("Y", "Z"): 0.6, ("Z", "X"): 0.4},
+ ["X", "Y", "Z", "X"],
+ math.log(0.5) + math.log(0.6) + math.log(0.4),
+ ),
+ # testcase3: self-loop
+ ({("A", "A"): 1.0}, ["A", "A", "A"], math.log(1.0) + math.log(1.0)),
+ # testcase4: non-existent transition
+ ({("A", "B"): 0.5}, ["C", "D"], float("-inf")),
+ # testcase5: single state (no transition)
+ ({}, ["A"], 0),
+ ],
+)
+def test_walk_probability(matrix_data, states, expected_prob):
+ matrix = ModuleFactory().create_markov_chain_obj()
+ matrix.update(matrix_data)
+ prob = matrix.walk_probability(states)
+ assert math.isclose(prob, expected_prob)
+
+
+@pytest.mark.parametrize(
+ "states, order, " "expected_init_vector, " "expected_matrix",
+ [
+ # testcase1: cyclic-chain
+ (
+ ["X", "Y", "Z", "X", "Y", "Z"],
+ 1,
+ {"X": 0.4, "Y": 0.4, "Z": 0.2},
+ {("X", "Y"): 1.0, ("Y", "Z"): 1.0, ("Z", "X"): 1.0},
+ ),
+ # testcase2: self-loop
+ (
+ ["A", "A", "A", "A"],
+ 1,
+ {"A": 1.0},
+ {("A", "A"): 1.0},
+ ),
+ # testcase3: empty chain
+ (
+ [],
+ 1,
+ {},
+ {},
+ ),
+ ],
+)
+def test_maximum_likelihood_probabilities(
+ states, order, expected_init_vector, expected_matrix
+):
+ init_vector, matrix = maximum_likelihood_probabilities(states, order)
+
+ assert isinstance(matrix, Matrix)
+ for key, value in expected_init_vector.items():
+ assert key in init_vector
+ assert math.isclose(init_vector[key], value, rel_tol=1e-9)
+ for key, value in expected_matrix.items():
+ assert key in matrix
+ assert math.isclose(matrix[key], value, rel_tol=1e-9)
+
+ assert len(matrix) == len(expected_matrix)
+ matrix_init_vector = matrix.get_init_vector()
+ for key, value in expected_init_vector.items():
+ assert key in matrix_init_vector
+ assert math.isclose(matrix_init_vector[key], value, rel_tol=1e-9)
diff --git a/tests/test_network_discovery.py b/tests/test_network_discovery.py
index 9bedf8dd8..a3e9b774b 100644
--- a/tests/test_network_discovery.py
+++ b/tests/test_network_discovery.py
@@ -1,5 +1,7 @@
import pytest
-from unittest.mock import patch
+from unittest.mock import (
+ Mock,
+)
from slips_files.core.evidence_structure.evidence import (
Victim,
EvidenceType,
@@ -45,8 +47,6 @@
],
)
def test_check_icmp_sweep_valid_scans(
- mocker,
- mock_db,
msg,
note,
profileid,
@@ -55,19 +55,16 @@ def test_check_icmp_sweep_valid_scans(
timestamp,
expected_evidence_type,
):
- network_discovery = ModuleFactory().create_network_discovery_obj(mock_db)
-
- mock_set_evidence = mocker.patch.object(
- network_discovery.db, "set_evidence"
- )
+ network_discovery = ModuleFactory().create_network_discovery_obj()
+ network_discovery.db.set_evidence = Mock()
network_discovery.check_icmp_sweep(
msg, note, profileid, uid, twid, timestamp
)
- assert mock_set_evidence.call_count == 1
+ assert network_discovery.db.set_evidence.call_count == 1
- called_evidence = mock_set_evidence.call_args[0][0]
+ called_evidence = network_discovery.db.set_evidence.call_args[0][0]
assert called_evidence.evidence_type == expected_evidence_type
assert called_evidence.attacker.value == profileid.split("_")[1]
assert called_evidence.profile.ip == profileid.split("_")[1]
@@ -78,12 +75,10 @@ def test_check_icmp_sweep_valid_scans(
assert called_evidence.timestamp == timestamp
-def test_check_icmp_sweep_unsupported_scan(mocker, mock_db):
- network_discovery = ModuleFactory().create_network_discovery_obj(mock_db)
+def test_check_icmp_sweep_unsupported_scan():
+ network_discovery = ModuleFactory().create_network_discovery_obj()
- mock_set_evidence = mocker.patch.object(
- network_discovery.db, "set_evidence"
- )
+ network_discovery.db.set_evidence = Mock()
network_discovery.check_icmp_sweep(
msg="Some other scan detected on 20 hosts",
@@ -94,7 +89,7 @@ def test_check_icmp_sweep_unsupported_scan(mocker, mock_db):
timestamp="2023-04-01 15:00:00",
)
- assert mock_set_evidence.call_count == 0
+ assert network_discovery.db.set_evidence.call_count == 0
@pytest.mark.parametrize(
@@ -139,35 +134,31 @@ def test_check_icmp_sweep_unsupported_scan(mocker, mock_db):
],
)
def test_check_dhcp_scan_no_evidence(
- mocker,
- mock_db,
flow_info,
existing_dhcp_flows,
expected_set_dhcp_flow_calls,
expected_get_dhcp_flows_calls,
):
- network_discovery = ModuleFactory().create_network_discovery_obj(mock_db)
+ network_discovery = ModuleFactory().create_network_discovery_obj()
network_discovery.minimum_requested_addrs = 4
- mock_get_dhcp_flows = mocker.patch.object(
- network_discovery.db,
- "get_dhcp_flows",
- return_value=existing_dhcp_flows,
- )
- mock_set_dhcp_flow = mocker.patch.object(
- network_discovery.db, "set_dhcp_flow"
- )
- mock_set_evidence = mocker.patch.object(
- network_discovery.db, "set_evidence"
- )
-
+ network_discovery.db.get_dhcp_flows = Mock()
+ network_discovery.db.get_dhcp_flows.return_value = existing_dhcp_flows
+ network_discovery.db.set_dhcp_flow = Mock()
+ network_discovery.db.set_evidence = Mock()
network_discovery.check_dhcp_scan(flow_info)
- assert mock_get_dhcp_flows.call_count == expected_get_dhcp_flows_calls
- assert mock_set_dhcp_flow.call_count == expected_set_dhcp_flow_calls
- assert mock_set_evidence.call_count == 0
+ assert (
+ network_discovery.db.get_dhcp_flows.call_count
+ == expected_get_dhcp_flows_calls
+ )
+ assert (
+ network_discovery.db.set_dhcp_flow.call_count
+ == expected_set_dhcp_flow_calls
+ )
+ assert network_discovery.db.set_evidence.call_count == 0
- mock_set_dhcp_flow.assert_called_with(
+ network_discovery.db.set_dhcp_flow.assert_called_with(
flow_info["profileid"],
flow_info["twid"],
flow_info["flow"]["requested_addr"],
@@ -175,7 +166,7 @@ def test_check_dhcp_scan_no_evidence(
)
-def test_check_dhcp_scan_with_evidence(mocker, mock_db):
+def test_check_dhcp_scan_with_evidence():
flow_info = {
"flow": {
"requested_addr": "192.168.1.104",
@@ -192,35 +183,27 @@ def test_check_dhcp_scan_with_evidence(mocker, mock_db):
"192.168.1.103": ["uid7890"],
}
- network_discovery = ModuleFactory().create_network_discovery_obj(mock_db)
+ network_discovery = ModuleFactory().create_network_discovery_obj()
network_discovery.minimum_requested_addrs = 4
- mock_get_dhcp_flows = mocker.patch.object(
- network_discovery.db,
- "get_dhcp_flows",
- return_value=existing_dhcp_flows,
- )
- mock_set_dhcp_flow = mocker.patch.object(
- network_discovery.db, "set_dhcp_flow"
- )
- mock_set_evidence = mocker.patch.object(
- network_discovery.db, "set_evidence"
- )
-
+ network_discovery.db.get_dhcp_flows = Mock()
+ network_discovery.db.get_dhcp_flows.return_value = existing_dhcp_flows
+ network_discovery.db.set_dhcp_flow = Mock()
+ network_discovery.db.set_evidence = Mock()
network_discovery.check_dhcp_scan(flow_info)
- assert mock_get_dhcp_flows.call_count == 2
- assert mock_set_dhcp_flow.call_count == 1
- assert mock_set_evidence.call_count == 1
+ assert network_discovery.db.get_dhcp_flows.call_count == 2
+ assert network_discovery.db.set_dhcp_flow.call_count == 1
+ assert network_discovery.db.set_evidence.call_count == 1
- mock_set_dhcp_flow.assert_called_with(
+ network_discovery.db.set_dhcp_flow.assert_called_with(
flow_info["profileid"],
flow_info["twid"],
flow_info["flow"]["requested_addr"],
flow_info["flow"]["uids"],
)
- called_evidence = mock_set_evidence.call_args[0][0]
+ called_evidence = network_discovery.db.set_evidence.call_args[0][0]
assert called_evidence.evidence_type == EvidenceType.DHCP_SCAN
assert (
called_evidence.attacker.value == flow_info["profileid"].split("_")[-1]
@@ -299,8 +282,6 @@ def test_check_dhcp_scan_with_evidence(mocker, mock_db):
],
)
def test_set_evidence_icmpscan(
- mocker,
- mock_db,
number_of_scanned_ips,
timestamp,
pkts_sent,
@@ -313,12 +294,8 @@ def test_set_evidence_icmpscan(
expected_description,
expected_victim,
):
- network_discovery = ModuleFactory().create_network_discovery_obj(mock_db)
-
- mock_set_evidence = mocker.patch.object(
- network_discovery.db, "set_evidence"
- )
-
+ network_discovery = ModuleFactory().create_network_discovery_obj()
+ network_discovery.db.set_evidence = Mock()
network_discovery.set_evidence_icmpscan(
number_of_scanned_ips,
timestamp,
@@ -331,9 +308,9 @@ def test_set_evidence_icmpscan(
scanned_ip,
)
- assert mock_set_evidence.call_count == 1
+ assert network_discovery.db.set_evidence.call_count == 1
- called_evidence = mock_set_evidence.call_args[0][0]
+ called_evidence = network_discovery.db.set_evidence.call_args[0][0]
assert called_evidence.evidence_type == attack
assert called_evidence.attacker.value == profileid.split("_")[1]
assert called_evidence.profile.ip == profileid.split("_")[1]
@@ -383,8 +360,6 @@ def test_set_evidence_icmpscan(
],
)
def test_set_evidence_dhcp_scan(
- mocker,
- mock_db,
timestamp,
profileid,
twid,
@@ -392,19 +367,15 @@ def test_set_evidence_dhcp_scan(
number_of_requested_addrs,
expected_description,
):
- network_discovery = ModuleFactory().create_network_discovery_obj(mock_db)
-
- mock_set_evidence = mocker.patch.object(
- network_discovery.db, "set_evidence"
- )
-
+ network_discovery = ModuleFactory().create_network_discovery_obj()
+ network_discovery.db.set_evidence = Mock()
network_discovery.set_evidence_dhcp_scan(
timestamp, profileid, twid, uids, number_of_requested_addrs
)
- assert mock_set_evidence.call_count == 1
+ assert network_discovery.db.set_evidence.call_count == 1
- called_evidence = mock_set_evidence.call_args[0][0]
+ called_evidence = network_discovery.db.set_evidence.call_args[0][0]
assert called_evidence.evidence_type == EvidenceType.DHCP_SCAN
assert called_evidence.attacker.value == profileid.split("_")[-1]
assert called_evidence.profile.ip == profileid.split("_")[-1]
@@ -542,39 +513,29 @@ def test_set_evidence_dhcp_scan(
],
)
def test_check_icmp_scan(
- mocker,
- mock_db,
profileid,
twid,
sports,
expected_set_evidence_calls,
expected_cache_det_thresholds,
):
- with patch(
- "modules.network_discovery.network_discovery."
- "NetworkDiscovery.__init__",
- return_value=None,
- ):
- network_discovery = ModuleFactory().create_network_discovery_obj(
- mock_db
- )
- network_discovery.pingscan_minimum_flows = 5
- network_discovery.pingscan_minimum_scanned_ips = 5
- network_discovery.cache_det_thresholds = {}
- mock_get_data_from_profile_tw = mocker.patch.object(
- network_discovery.db,
- "get_data_from_profile_tw",
- return_value=sports,
- )
- mock_set_evidence = mocker.patch.object(
- network_discovery.db, "set_evidence"
- )
-
- network_discovery.check_icmp_scan(profileid, twid)
-
- assert mock_get_data_from_profile_tw.call_count == 1
- assert mock_set_evidence.call_count == expected_set_evidence_calls
- assert (
- network_discovery.cache_det_thresholds
- == expected_cache_det_thresholds
- )
+ network_discovery = ModuleFactory().create_network_discovery_obj()
+ network_discovery.pingscan_minimum_flows = 5
+ network_discovery.pingscan_minimum_scanned_ips = 5
+ network_discovery.cache_det_thresholds = {}
+
+ network_discovery.db.get_data_from_profile_tw = Mock()
+ network_discovery.db.get_data_from_profile_tw.return_value = sports
+
+ network_discovery.db.set_evidence = Mock()
+
+ network_discovery.check_icmp_scan(profileid, twid)
+
+ assert network_discovery.db.get_data_from_profile_tw.call_count == 1
+ assert (
+ network_discovery.db.set_evidence.call_count
+ == expected_set_evidence_calls
+ )
+ assert (
+ network_discovery.cache_det_thresholds == expected_cache_det_thresholds
+ )
diff --git a/tests/test_notice.py b/tests/test_notice.py
index 35affb260..f06aa2eac 100644
--- a/tests/test_notice.py
+++ b/tests/test_notice.py
@@ -1,5 +1,7 @@
"""Unit test for modules/flowalerts/notice.py"""
+from unittest.mock import Mock
+
from tests.module_factory import ModuleFactory
import json
import pytest
@@ -29,25 +31,14 @@
),
],
)
-def test_check_vertical_portscan(mock_db, mocker, flow, expected_call_count):
- notice = ModuleFactory().create_notice_analyzer_obj(mock_db)
- mock_vertical_portscan = mocker.patch.object(
- notice.set_evidence, "vertical_portscan"
- )
-
+def test_check_vertical_portscan(flow, expected_call_count):
+ notice = ModuleFactory().create_notice_analyzer_obj()
+ notice.set_evidence.vertical_portscan = Mock()
notice.check_vertical_portscan(flow, "test_uid", "test_twid")
- assert mock_vertical_portscan.call_count == expected_call_count
- expected_calls = [
- mocker.call(
- flow["msg"],
- flow.get("scanning_ip", ""),
- flow["stime"],
- "test_twid",
- "test_uid",
- )
- ] * expected_call_count
- mock_vertical_portscan.assert_has_calls(expected_calls)
+ assert (
+ notice.set_evidence.vertical_portscan.call_count == expected_call_count
+ )
@pytest.mark.parametrize(
@@ -73,8 +64,8 @@ def test_check_vertical_portscan(mock_db, mocker, flow, expected_call_count):
),
],
)
-def test_check_horizontal_portscan(mock_db, mocker, flow, expected_call_count):
- notice = ModuleFactory().create_notice_analyzer_obj(mock_db)
+def test_check_horizontal_portscan(mocker, flow, expected_call_count):
+ notice = ModuleFactory().create_notice_analyzer_obj()
mock_horizontal_portscan = mocker.patch.object(
notice.set_evidence, "horizontal_portscan"
)
@@ -119,8 +110,8 @@ def test_check_horizontal_portscan(mock_db, mocker, flow, expected_call_count):
),
],
)
-def test_check_password_guessing(mock_db, mocker, flow, expected_call_count):
- notice = ModuleFactory().create_notice_analyzer_obj(mock_db)
+def test_check_password_guessing(mocker, flow, expected_call_count):
+ notice = ModuleFactory().create_notice_analyzer_obj()
mock_pw_guessing = mocker.patch.object(notice.set_evidence, "pw_guessing")
notice.check_password_guessing(flow, "test_uid", "test_twid")
@@ -159,22 +150,15 @@ def test_check_password_guessing(mock_db, mocker, flow, expected_call_count):
True,
{"vertical": 1, "horizontal": 1, "password": 1},
),
- # Test case 2: No message
- (None, False, {"vertical": 0, "horizontal": 0, "password": 0}),
],
)
-def test_analyze(mock_db, mocker, msg, expected_result, expected_call_counts):
- notice = ModuleFactory().create_notice_analyzer_obj(mock_db)
- mock_get_msg = mocker.patch.object(
- notice.flowalerts, "get_msg", return_value=msg
- )
+def test_analyze(mocker, msg, expected_result, expected_call_counts):
+ notice = ModuleFactory().create_notice_analyzer_obj()
mock_vertical = mocker.patch.object(notice, "check_vertical_portscan")
mock_horizontal = mocker.patch.object(notice, "check_horizontal_portscan")
mock_password = mocker.patch.object(notice, "check_password_guessing")
-
- result = notice.analyze()
-
- mock_get_msg.assert_called_once_with("new_notice")
+ msg.update({"channel": "new_notice"})
+ result = notice.analyze(msg)
assert mock_vertical.call_count == expected_call_counts["vertical"]
assert mock_horizontal.call_count == expected_call_counts["horizontal"]
assert mock_password.call_count == expected_call_counts["password"]
diff --git a/tests/test_notify.py b/tests/test_notify.py
new file mode 100644
index 000000000..1c9c28f23
--- /dev/null
+++ b/tests/test_notify.py
@@ -0,0 +1,103 @@
+from unittest.mock import patch, MagicMock
+import pytest
+from tests.module_factory import ModuleFactory
+
+
+@pytest.mark.parametrize(
+ "returncode, expected_result",
+ [
+ # Testcase 1: notify-send is installed
+ (256, True),
+ # Testcase 2: notify-send is not installed
+ (32512, False),
+ # Testcase 3: Other return code (potentially an error)
+ (1, False),
+ ],
+)
+def test_is_notify_send_installed(returncode, expected_result):
+ with patch("os.system") as mock_system:
+ mock_system.return_value = returncode
+ notify = ModuleFactory().create_notify_obj()
+ result = notify.is_notify_send_installed()
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "system, euid, environ, who_output, " "users, pwd_output, expected_cmd",
+ [
+ # Testcase 1: Non-Linux system
+ ("Darwin", 0, {}, "", [], None, "notify-send -t 5000 "),
+ # Testcase 2: Linux, non-root user
+ ("Linux", 1000, {}, "", [], None, "notify-send -t 5000 "),
+ # Testcase 3: Linux, root user, 'who' command successful
+ (
+ "Linux",
+ 0,
+ {"DISPLAY": ":0"},
+ "testuser tty1 2023-07-25 10:00 (:0)",
+ [],
+ MagicMock(pw_uid=1000),
+ "sudo -u testuser DISPLAY=:0 DBUS_SESSION_BUS_ADDRESS="
+ "unix:path=/run/user/1000/bus notify-send -t 5000 ",
+ ),
+ ],
+)
+def test_setup_notifications(
+ system, euid, environ, who_output, users, pwd_output, expected_cmd
+):
+ with patch("platform.system", return_value=system), patch(
+ "os.geteuid", return_value=euid
+ ), patch(
+ "psutil.Process", return_value=MagicMock(environ=lambda: environ)
+ ), patch(
+ "os.popen", return_value=MagicMock(read=lambda: who_output)
+ ), patch(
+ "psutil.users", return_value=users
+ ), patch(
+ "pwd.getpwnam", return_value=pwd_output
+ ):
+
+ notify = ModuleFactory().create_notify_obj()
+ notify.setup_notifications()
+ assert notify.notify_cmd == expected_cmd
+
+
+@pytest.mark.parametrize(
+ "system, notify_cmd, alert, expected_partial_command",
+ [
+ # Testcase 1: Linux system
+ (
+ "Linux",
+ "notify-send -t 5000 ",
+ "Test alert",
+ '"Slips" "Test alert"',
+ ),
+ # Testcase 2: macOS (Darwin) system
+ (
+ "Darwin",
+ "",
+ "Test alert",
+ 'display notification "Test alert" ' 'with title "Slips"',
+ ),
+ # Testcase 3: Linux system with custom notify command
+ ("Linux", "custom_notify_cmd ", "Test alert", '"Slips" "Test alert"'),
+ ],
+)
+def test_show_popup(system, notify_cmd, alert, expected_partial_command):
+ with patch("platform.system", return_value=system), patch(
+ "os.system"
+ ) as mock_system:
+
+ notify = ModuleFactory().create_notify_obj()
+ notify.notify_cmd = notify_cmd
+ mock_system.reset_mock()
+
+ notify.show_popup(alert)
+ print(f"Calls to os.system: {mock_system.call_args_list}")
+ assert any(
+ expected_partial_command in str(call)
+ for call in mock_system.call_args_list
+ ), (
+ f"Expected command containing '{expected_partial_command}' "
+ f"not found in calls to os.system"
+ )
diff --git a/tests/test_output.py b/tests/test_output.py
new file mode 100644
index 000000000..e7b5ec5e6
--- /dev/null
+++ b/tests/test_output.py
@@ -0,0 +1,513 @@
+from unittest.mock import MagicMock, mock_open, patch, call as mockedcall
+import pytest
+from tests.module_factory import ModuleFactory
+from pathlib import Path
+import sys
+from io import StringIO
+
+
+@pytest.mark.parametrize(
+ "msg, expected_log_content",
+ [
+ (
+ # Testcase1:Regular message logging
+ {"from": "sender", "txt": "message_text"},
+ "formatted_datetime [sender] message_text\n",
+ ),
+ (
+ # Testcase2:Empty message handling
+ {"from": "sender", "txt": ""},
+ "formatted_datetime [sender] \n",
+ ),
+ (
+ # Testcase3:Message with special characters
+ {"from": "sender", "txt": "Message with !@#$%^&*()_+=-`~"},
+ "formatted_datetime [sender] Message with !@#$%^&*()_+=-`~\n",
+ ),
+ ],
+)
+@patch("slips_files.common.slips_utils.Utils.convert_format")
+def test_log_line(mock_convert_format, msg, expected_log_content):
+ """Test that the log_line method logs the correct message
+ to the slips.log file."""
+ mock_convert_format.return_value = "formatted_datetime"
+
+ output = ModuleFactory().create_output_obj()
+ output.slips_logfile = "path/to/slips.log"
+
+ with patch("builtins.open", mock_open()) as mock_file:
+ output.log_line(msg)
+
+ mock_file.assert_called_once_with("path/to/slips.log", "a")
+ handle = mock_file()
+ handle.write.assert_called_once_with(expected_log_content)
+
+
+def test_print_no_pbar():
+ """Test printing when has_pbar is False."""
+ output = ModuleFactory().create_output_obj()
+ output.has_pbar = False
+ output.tell_pbar = MagicMock()
+ sender = "SenderName"
+ txt = "This is a test message."
+
+ with patch("builtins.print") as mock_print:
+ output.print(sender, txt)
+
+ (mock_print.assert_called_once_with(f"[{sender}] {txt}", end="\n"))
+ output.tell_pbar.assert_not_called()
+
+
+def test_print_pbar_finished():
+ """Test printing when pbar is finished."""
+ output = ModuleFactory().create_output_obj()
+ output.has_pbar = True
+ output.pbar_finished = MagicMock()
+ output.pbar_finished.is_set.return_value = True
+ output.tell_pbar = MagicMock()
+ sender = "SenderName"
+ txt = "This is a test message."
+
+ with patch("builtins.print") as mock_print:
+ output.print(sender, txt)
+
+ (mock_print.assert_called_once_with(f"[{sender}] {txt}", end="\n"))
+ output.tell_pbar.assert_not_called()
+
+
+def test_print_pbar_active_with_sender():
+ """Test printing with active pbar and a sender."""
+ output = ModuleFactory().create_output_obj()
+ output.has_pbar = True
+ output.pbar_finished = MagicMock()
+ output.pbar_finished.is_set.return_value = False
+ output.tell_pbar = MagicMock()
+ sender = "SenderName"
+ txt = "This is a test message."
+
+ with patch("builtins.print") as mock_print:
+ output.print(sender, txt)
+
+ (
+ output.tell_pbar.assert_called_once_with(
+ {"event": "print", "txt": f"[{sender}] {txt}"}
+ )
+ )
+ mock_print.assert_not_called()
+
+
+def test_print_pbar_active_no_sender():
+ """Test printing with active pbar and no sender."""
+ output = ModuleFactory().create_output_obj()
+ output.has_pbar = True
+ output.pbar_finished = MagicMock()
+ output.pbar_finished.is_set.return_value = False
+ output.tell_pbar = MagicMock()
+ sender = ""
+ txt = "This is a message with no sender."
+
+ with patch("builtins.print") as mock_print:
+ output.print(sender, txt)
+
+ (output.tell_pbar.assert_called_once_with({"event": "print", "txt": txt}))
+ mock_print.assert_not_called()
+
+
+def test_handle_printing_stats_pbar_not_finished():
+ """Test when pbar is not finished, stats should be sent to pbar."""
+ output = ModuleFactory().create_output_obj()
+ output.has_pbar = True
+ output.pbar_finished = MagicMock()
+ output.pbar_finished.is_set.return_value = False
+ output.tell_pbar = MagicMock()
+ stats = "Analyzed IPs: 10"
+
+ output.handle_printing_stats(stats)
+
+ output.tell_pbar.assert_called_once_with(
+ {"event": "update_stats", "stats": stats}
+ )
+
+
+def test_handle_printing_stats_pbar_finished():
+ """Test when pbar is finished, stats should be printed directly."""
+ output = ModuleFactory().create_output_obj()
+ output.has_pbar = True
+ output.pbar_finished = MagicMock()
+ output.pbar_finished.is_set.return_value = True
+
+ original_stdout = sys.stdout
+ captured_output = StringIO()
+ sys.stdout = captured_output
+
+ stats = "Analyzed IPs: 20"
+ output.handle_printing_stats(stats)
+
+ sys.stdout = original_stdout
+ assert captured_output.getvalue().strip() == stats
+
+
+@pytest.mark.parametrize(
+ "output_verbose, input_verbose, expected_result",
+ [ # Testcase1: Input verbose less than output verbose
+ (2, 1, True),
+ # Testcase2: Input verbose equal to output verbose
+ (2, 2, True),
+ # Testcase3: Input verbose greater than output verbose
+ (2, 3, False),
+ # Testcase4: Input verbose is 0
+ (1, 0, False),
+ # Testcase5: Input verbose is negative
+ (1, -1, False),
+ # Testcase6: Input verbose greater than the maximum
+ (3, 4, False),
+ ],
+)
+def test_enough_verbose(output_verbose, input_verbose, expected_result):
+ """Test that the enough_verbose method returns the correct result."""
+ output = ModuleFactory().create_output_obj()
+ output.verbose = output_verbose
+
+ assert output.enough_verbose(input_verbose) == expected_result
+
+
+@pytest.mark.parametrize(
+ "output_debug, input_debug, expected_result",
+ [ # Testcase1: Input debug less than output debug
+ (2, 1, True),
+ # Testcase2: Input debug equal to output debug
+ (2, 2, True),
+ # Testcase3: Input debug greater than output debug
+ (2, 3, False),
+ # Testcase4: Input debug is 0
+ (1, 0, False),
+ # Testcase5: Input debug is negative
+ (1, -1, False),
+ # Testcase6: Input debug greater than the maximum
+ (3, 4, False),
+ ],
+)
+def test_enough_debug(output_debug, input_debug, expected_result):
+ """Test that the enough_debug method returns the correct result."""
+ output = ModuleFactory().create_output_obj()
+ output.debug = output_debug
+
+ assert output.enough_debug(input_debug) == expected_result
+
+
+@patch("slips_files.core.output.Output.print")
+@patch("slips_files.core.output.Output.log_line")
+@patch("slips_files.core.output.Output.log_error")
+def test_output_line_all_outputs(mock_log_error, mock_log_line, mock_print):
+ output = ModuleFactory().create_output_obj()
+ output.verbose = 2
+ output.debug = 2
+
+ msg = {
+ "from": "SenderName",
+ "txt": "Normal message",
+ "verbose": 2,
+ "debug": 1,
+ }
+
+ output.output_line(msg)
+
+ mock_print.assert_called_with(msg["from"], msg["txt"])
+ mock_log_line.assert_called_with(msg)
+ mock_log_error.assert_called_with(msg)
+
+
+@patch("slips_files.core.output.Output.print")
+@patch("slips_files.core.output.Output.log_line")
+@patch("slips_files.core.output.Output.log_error")
+def test_output_line_no_outputs(mock_log_error, mock_log_line, mock_print):
+ """
+ Test that output_line doesn't print or log when the provided
+ verbose level (3) is higher than the module's verbose level (2).
+ """
+ output = ModuleFactory().create_output_obj()
+ output.verbose = 2
+ output.debug = 2
+
+ msg = {
+ "from": "SenderName",
+ "txt": "High verbose message",
+ "verbose": 3,
+ "debug": 0,
+ }
+
+ output.output_line(msg)
+
+ mock_print.assert_not_called()
+ mock_log_line.assert_not_called()
+ mock_log_error.assert_not_called()
+
+
+@patch("slips_files.core.output.Output.print")
+@patch("slips_files.core.output.Output.log_line")
+@patch("slips_files.core.output.Output.log_error")
+def test_output_line_no_error_log(mock_log_error, mock_log_line, mock_print):
+ output = ModuleFactory().create_output_obj()
+ output.verbose = 2
+ output.debug = 2
+
+ msg = {
+ "from": "SenderName",
+ "txt": "Non-error debug message",
+ "verbose": 1,
+ "debug": 2,
+ }
+
+ output.output_line(msg)
+
+ mock_print.assert_called_with(msg["from"], msg["txt"])
+ mock_log_line.assert_called_with(msg)
+ mock_log_error.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "is_set_return_value, expected_result",
+ [ # Testcase 1: pbar_finished is set
+ (True, True),
+ # Testcase 2: pbar_finished is not set
+ (False, False),
+ ],
+)
+def test_is_pbar_finished(is_set_return_value, expected_result):
+ """Test that the is_pbar_finished method returns the correct result."""
+ output = ModuleFactory().create_output_obj()
+ output.pbar_finished = MagicMock()
+ output.pbar_finished.is_set.return_value = is_set_return_value
+
+ assert output.is_pbar_finished() == expected_result
+
+
+@pytest.mark.parametrize(
+ "msg, expected_forward_progress_bar_calls, " "expected_output_line_calls",
+ [
+ ( # Testcase 1: msg contains 'bar' key with 'init'
+ {"bar": "init", "bar_info": {"total_flows": 1000}},
+ [{"bar": "init", "bar_info": {"total_flows": 1000}}],
+ [],
+ ),
+ ( # Testcase 2: msg contains 'bar' key with 'update'
+ {"bar": "update"},
+ [{"bar": "update"}],
+ [],
+ ),
+ ( # Testcase 3: msg does not contain 'bar' key
+ {"from": "SenderName", "txt": "This is a test message."},
+ [],
+ [{"from": "SenderName", "txt": "This is a test message."}],
+ ),
+ ( # Testcase 4: Empty message
+ {},
+ [],
+ [{}],
+ ),
+ ],
+)
+def test_update(
+ msg, expected_forward_progress_bar_calls, expected_output_line_calls
+):
+ """Test that the update method handles
+ different cases correctly."""
+ output = ModuleFactory().create_output_obj()
+
+ output.forward_progress_bar_msgs = MagicMock()
+ output.output_line = MagicMock()
+
+ output.update(msg)
+
+ assert output.forward_progress_bar_msgs.call_count == len(
+ expected_forward_progress_bar_calls
+ )
+ for call in expected_forward_progress_bar_calls:
+ output.forward_progress_bar_msgs.assert_any_call(call)
+
+ assert output.output_line.call_count == len(expected_output_line_calls)
+ for call in expected_output_line_calls:
+ output.output_line.assert_any_call(call)
+
+
+def test_update_log_to_logfiles_only():
+ """Test that the update method handles
+ log_to_logfiles_only correctly."""
+ output = ModuleFactory().create_output_obj()
+ output.log_line = MagicMock()
+
+ msg = {
+ "from": "SenderName",
+ "txt": "Log only message",
+ "log_to_logfiles_only": True,
+ }
+ output.update(msg)
+
+ output.log_line.assert_called_once_with(msg)
+
+
+@pytest.mark.parametrize(
+ "msg, expected_call",
+ [
+ ( # Testcase 1: Initialization message
+ {"bar": "init", "bar_info": {"total_flows": 1000}},
+ {"event": "init", "total_flows": 1000},
+ ),
+ ( # Testcase 2: Update message
+ {"bar": "update"},
+ {"event": "update_bar"},
+ ),
+ ],
+)
+def test_forward_progress_bar_msgs_valid(msg, expected_call):
+ """Test valid progress bar messages."""
+ output = ModuleFactory().create_output_obj()
+ output.tell_pbar = MagicMock()
+ output.is_pbar_finished = MagicMock(return_value=False)
+
+ output.forward_progress_bar_msgs(msg)
+
+ output.tell_pbar.assert_called_once_with(expected_call)
+
+
+def test_forward_progress_bar_msgs_update_finished():
+ """Test update message when progress bar is finished."""
+ output = ModuleFactory().create_output_obj()
+ output.tell_pbar = MagicMock()
+ output.is_pbar_finished = MagicMock(return_value=True)
+
+ output.forward_progress_bar_msgs({"bar": "update"})
+
+ output.tell_pbar.assert_not_called()
+
+
+def test_forward_progress_bar_msgs_unknown_bar():
+ """Test message with unknown 'bar' value."""
+ output = ModuleFactory().create_output_obj()
+ output.tell_pbar = MagicMock()
+
+ output.forward_progress_bar_msgs({"bar": "unknown"})
+
+ output.tell_pbar.assert_not_called()
+
+
+def test_tell_pbar():
+ """Test that tell_pbar sends the message through the pipe."""
+ output = ModuleFactory().create_output_obj()
+ output.pbar_sender_pipe = MagicMock()
+
+ msg = {"event": "update", "progress": 50}
+ output.tell_pbar(msg)
+
+ output.pbar_sender_pipe.send.assert_called_once_with(msg)
+
+
+def test_tell_pbar_empty_message():
+ """Test that tell_pbar handles empty messages correctly."""
+ output = ModuleFactory().create_output_obj()
+ output.pbar_sender_pipe = MagicMock()
+
+ msg = {}
+ output.tell_pbar(msg)
+
+ output.pbar_sender_pipe.send.assert_called_once_with(msg)
+
+
+def test_tell_pbar_none_message():
+ """Test that tell_pbar handles None messages correctly."""
+ output = ModuleFactory().create_output_obj()
+ output.pbar_sender_pipe = MagicMock()
+
+ msg = None
+ output.tell_pbar(msg)
+
+ output.pbar_sender_pipe.send.assert_called_once_with(msg)
+
+
+def test_tell_pbar_large_message():
+ """Test that tell_pbar can handle large messages."""
+ output = ModuleFactory().create_output_obj()
+ output.pbar_sender_pipe = MagicMock()
+
+ msg = {"event": "update", "data": "x" * 1000000}
+ output.tell_pbar(msg)
+
+ output.pbar_sender_pipe.send.assert_called_once_with(msg)
+
+
+def test_tell_pbar_multiple_calls():
+ """Test that tell_pbar works correctly
+ for multiple consecutive calls."""
+ output = ModuleFactory().create_output_obj()
+ output.pbar_sender_pipe = MagicMock()
+
+ msgs = [
+ {"event": "init"},
+ {"event": "update", "progress": 25},
+ {"event": "update", "progress": 50},
+ {"event": "update", "progress": 75},
+ {"event": "finish"},
+ ]
+
+ for msg in msgs:
+ output.tell_pbar(msg)
+
+ assert output.pbar_sender_pipe.send.call_count == len(msgs)
+ for msg in msgs:
+ output.pbar_sender_pipe.send.assert_any_call(msg)
+
+
+def test_create_logfile_existing():
+ output = ModuleFactory().create_output_obj()
+ path = "/existing/path/file.log"
+
+ with patch("builtins.open", mock_open()) as mocked_open:
+ with patch.object(Path, "mkdir") as mock_mkdir:
+ output.create_logfile(path)
+
+ mocked_open.assert_called_once_with(path, "a")
+ mock_mkdir.assert_not_called()
+ mocked_open().close.assert_called_once()
+
+
+def test_create_logfile_new():
+ output = ModuleFactory().create_output_obj()
+ path = "/new/path/newfile.log"
+ mock_file_error = mock_open()
+ mock_file_error.side_effect = FileNotFoundError
+ mock_file_success = mock_open()
+
+ open_mocks = [mock_file_error, mock_file_success]
+
+ def side_effect(*args, **kwargs):
+ return open_mocks.pop(0)()
+
+ with patch("builtins.open", side_effect=side_effect) as mocked_open:
+ with patch.object(Path, "mkdir") as mock_mkdir:
+ with patch("os.path.dirname", return_value="/new/path"):
+ output.create_logfile(path)
+
+ assert mocked_open.call_count == 2
+ mocked_open.assert_has_calls(
+ [mockedcall(path, "a"), mockedcall(path, "w")]
+ )
+ mock_mkdir.assert_called_once_with(parents=True, exist_ok=True)
+ mock_file_success().close.assert_called_once()
+
+
+def test_create_logfile_permission_error():
+ output = ModuleFactory().create_output_obj()
+ path = "/root/restricted.log"
+
+ with patch("builtins.open", side_effect=PermissionError):
+ with pytest.raises(PermissionError):
+ output.create_logfile(path)
+
+
+def test_create_logfile_disk_full():
+ output = ModuleFactory().create_output_obj()
+ path = "/mnt/full_disk/file.log"
+
+ with patch("builtins.open", side_effect=IOError):
+ with pytest.raises(IOError):
+ output.create_logfile(path)
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index cdd025cc9..e4efcdf95 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -1,4 +1,5 @@
"""Unit test for slips_files/core/performance_profiler.py"""
+
from unittest.mock import Mock
from tests.module_factory import ModuleFactory
@@ -9,7 +10,7 @@
from slips_files.core.profiler import SUPPORTED_INPUT_TYPES, SEPARATORS
from slips_files.core.flows.zeek import Conn
import ipaddress
-from unittest.mock import Mock, patch
+from unittest.mock import patch
import queue
@@ -17,8 +18,12 @@
"file,input_type,expected_value",
[("dataset/test6-malicious.suricata.json", "suricata", "suricata")],
)
-def test_define_separator_suricata(file, input_type, expected_value, mock_db):
- profilerProcess = ModuleFactory().create_profiler_obj(mock_db)
+def test_define_separator_suricata(
+ file,
+ input_type,
+ expected_value,
+):
+ profiler = ModuleFactory().create_profiler_obj()
with open(file) as f:
while True:
sample_flow = f.readline().replace("\n", "")
@@ -29,7 +34,7 @@ def test_define_separator_suricata(file, input_type, expected_value, mock_db):
sample_flow = {
"data": sample_flow,
}
- profiler_detected_type: str = profilerProcess.define_separator(
+ profiler_detected_type: str = profiler.define_separator(
sample_flow, input_type
)
assert profiler_detected_type == expected_value
@@ -39,8 +44,12 @@ def test_define_separator_suricata(file, input_type, expected_value, mock_db):
"file,input_type,expected_value",
[("dataset/test10-mixed-zeek-dir/conn.log", "zeek_log_file", "zeek-tabs")],
)
-def test_define_separator_zeek_tab(file, input_type, expected_value, mock_db):
- profilerProcess = ModuleFactory().create_profiler_obj(mock_db)
+def test_define_separator_zeek_tab(
+ file,
+ input_type,
+ expected_value,
+):
+ profiler = ModuleFactory().create_profiler_obj()
with open(file) as f:
while True:
sample_flow = f.readline().replace("\n", "")
@@ -51,7 +60,7 @@ def test_define_separator_zeek_tab(file, input_type, expected_value, mock_db):
sample_flow = {
"data": sample_flow,
}
- profiler_detected_type: str = profilerProcess.define_separator(
+ profiler_detected_type: str = profiler.define_separator(
sample_flow, input_type
)
assert profiler_detected_type == expected_value
@@ -61,12 +70,16 @@ def test_define_separator_zeek_tab(file, input_type, expected_value, mock_db):
"file, input_type,expected_value",
[("dataset/test9-mixed-zeek-dir/conn.log", "zeek_log_file", "zeek")],
)
-def test_define_separator_zeek_dict(file, input_type, expected_value, mock_db):
+def test_define_separator_zeek_dict(
+ file,
+ input_type,
+ expected_value,
+):
"""
:param input_type: as determined by slips.py
"""
- profilerProcess = ModuleFactory().create_profiler_obj(mock_db)
+ profiler = ModuleFactory().create_profiler_obj()
with open(file) as f:
sample_flow = f.readline().replace("\n", "")
@@ -74,14 +87,16 @@ def test_define_separator_zeek_dict(file, input_type, expected_value, mock_db):
sample_flow = {
"data": sample_flow,
}
- profiler_detected_type: str = profilerProcess.define_separator(
+ profiler_detected_type: str = profiler.define_separator(
sample_flow, input_type
)
assert profiler_detected_type == expected_value
@pytest.mark.parametrize("nfdump_file", [("dataset/test1-normal.nfdump")])
-def test_define_separator_nfdump(nfdump_file, mock_db):
+def test_define_separator_nfdump(
+ nfdump_file,
+):
# nfdump files aren't text files so we need to process them first
command = f"nfdump -b -N -o csv -q -r {nfdump_file}"
# Execute command
@@ -98,11 +113,11 @@ def test_define_separator_nfdump(nfdump_file, mock_db):
else:
break
- profilerProcess = ModuleFactory().create_profiler_obj(mock_db)
+ profiler = ModuleFactory().create_profiler_obj()
sample_flow = {
"data": nfdump_line,
}
- profiler_detected_type: str = profilerProcess.define_separator(
+ profiler_detected_type: str = profiler.define_separator(
sample_flow, input_type
)
assert profiler_detected_type == "nfdump"
@@ -133,10 +148,10 @@ def test_define_separator_nfdump(nfdump_file, mock_db):
# line = f.readline()
# if line.startswith('#fields'):
# break
-# profilerProcess = ModuleFactory().create_profiler_obj(mock_db)
+# profiler = ModuleFactory().create_profiler_obj()
# line = {'data': line}
-# profilerProcess.separator = separator
-# assert profilerProcess.define_columns(line) == expected_value
+# profiler.separator = separator
+# assert profiler.define_columns(line) == expected_value
# pcaps are treated as zeek files in slips, no need to test twice
@@ -153,8 +168,11 @@ def test_define_separator_nfdump(nfdump_file, mock_db):
# ('dataset/test9-mixed-zeek-dir/files.log', 'files.log'),
],
)
-def test_process_line(file, flow_type, mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_process_line(
+ file,
+ flow_type,
+):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.symbol = Mock()
# we're testing another functionality here
profiler.whitelist.is_whitelisted_flow = do_nothing
@@ -191,8 +209,8 @@ def test_process_line(file, flow_type, mock_db):
assert added_flow is not None
-def test_get_rev_profile(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_get_rev_profile():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Conn(
"1.0",
"1234",
@@ -212,80 +230,103 @@ def test_get_rev_profile(mock_db):
"Established",
"",
)
- mock_db.get_profileid_from_ip.return_value = None
- mock_db.get_timewindow.return_value = "timewindow1"
+ profiler.db.get_profileid_from_ip.return_value = None
+ profiler.db.get_timewindow.return_value = "timewindow1"
assert profiler.get_rev_profile() == ("profile_8.8.8.8", "timewindow1")
-def test_get_rev_profile_no_daddr(flow, mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_get_rev_profile_no_daddr(
+ flow,
+):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = flow
profiler.flow.daddr = None
profiler.daddr_as_obj = None
assert profiler.get_rev_profile() == (False, False)
-def test_get_rev_profile_existing_profileid(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+def test_get_rev_profile_existing_profileid():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Conn(
- '1.0',
- '1234',
- '192.168.1.1',
- '8.8.8.8',
- 5,
- 'TCP',
- 'dhcp',
- 80,88,
- 20,20,
- 20,20,
- '','',
- 'Established',''
- )
- mock_db.get_profileid_from_ip.return_value = "existing_profile"
- mock_db.get_timewindow.return_value = "existing_timewindow"
- assert profiler.get_rev_profile() == ("existing_profile", "existing_timewindow")
-
-
-def test_get_rev_profile_no_timewindow(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ "1.0",
+ "1234",
+ "192.168.1.1",
+ "8.8.8.8",
+ 5,
+ "TCP",
+ "dhcp",
+ 80,
+ 88,
+ 20,
+ 20,
+ 20,
+ 20,
+ "",
+ "",
+ "Established",
+ "",
+ )
+ profiler.db.get_profileid_from_ip.return_value = "existing_profile"
+ profiler.db.get_timewindow.return_value = "existing_timewindow"
+ assert profiler.get_rev_profile() == (
+ "existing_profile",
+ "existing_timewindow",
+ )
+
+
+def test_get_rev_profile_no_timewindow():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Conn(
- '1.0',
- '1234',
- '192.168.1.1',
- '8.8.8.8',
- 5,
- 'TCP',
- 'dhcp',
- 80, 88,
- 20, 20,
- 20, 20,
- '', '',
- 'Established', ''
- )
- mock_db.get_profileid_from_ip.return_value = "profile_8.8.8.8"
- mock_db.get_timewindow.return_value = None
+ "1.0",
+ "1234",
+ "192.168.1.1",
+ "8.8.8.8",
+ 5,
+ "TCP",
+ "dhcp",
+ 80,
+ 88,
+ 20,
+ 20,
+ 20,
+ 20,
+ "",
+ "",
+ "Established",
+ "",
+ )
+ profiler.db.get_profileid_from_ip.return_value = "profile_8.8.8.8"
+ profiler.db.get_timewindow.return_value = None
profile_id, tw_id = profiler.get_rev_profile()
assert profile_id == "profile_8.8.8.8"
assert tw_id is None
-
-def test_define_separator_direct_support(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
- sample_flow = {'data': 'some_data'}
- input_type = 'nfdump'
+
+
+def test_define_separator_direct_support():
+ profiler = ModuleFactory().create_profiler_obj()
+ sample_flow = {"data": "some_data"}
+ input_type = "nfdump"
separator = profiler.define_separator(sample_flow, input_type)
- assert separator == 'nfdump'
-
-
-@pytest.mark.parametrize('client_ips, expected_private_ips', [
- (['192.168.1.1', '10.0.0.1'], ['192.168.1.1', '10.0.0.1']),
- (['8.8.8.8', '1.1.1.1'], []),
- (['192.168.1.1', '8.8.8.8'], ['192.168.1.1']),
-])
-def test_get_private_client_ips(client_ips, expected_private_ips, mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ assert separator == "nfdump"
+
+
+@pytest.mark.parametrize(
+ "client_ips, expected_private_ips",
+ [
+ (["192.168.1.1", "10.0.0.1"], ["192.168.1.1", "10.0.0.1"]),
+ (["8.8.8.8", "1.1.1.1"], []),
+ (["192.168.1.1", "8.8.8.8"], ["192.168.1.1"]),
+ ],
+)
+def test_get_private_client_ips(client_ips, expected_private_ips, monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.client_ips = client_ips
- with patch('slips_files.core.profiler.utils.is_private_ip') as mock_is_private_ip:
+ with patch(
+ "slips_files.core.profiler.utils.is_private_ip"
+ ) as mock_is_private_ip:
+
def is_private_ip(ip):
ip_obj = ipaddress.ip_address(ip)
return ipaddress.ip_address(ip_obj).is_private
@@ -294,30 +335,40 @@ def is_private_ip(ip):
private_ips = profiler.get_private_client_ips()
assert set(private_ips) == set(expected_private_ips)
-
-def test_convert_starttime_to_epoch(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+
+def test_convert_starttime_to_epoch():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
profiler.flow.starttime = "2023-04-04 12:00:00"
- with patch('slips_files.core.profiler.utils.convert_format') as mock_convert_format:
- mock_convert_format.return_value = 1680604800
+ with patch(
+ "slips_files.core.profiler.utils.convert_format"
+ ) as mock_convert_format:
+ mock_convert_format.return_value = 1680604800
profiler.convert_starttime_to_epoch()
- mock_convert_format.assert_called_once_with("2023-04-04 12:00:00", "unixtimestamp")
- assert profiler.flow.starttime == 1680604800
-
-def test_convert_starttime_to_epoch_invalid_format(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ mock_convert_format.assert_called_once_with(
+ "2023-04-04 12:00:00", "unixtimestamp"
+ )
+ assert profiler.flow.starttime == 1680604800
+
+
+def test_convert_starttime_to_epoch_invalid_format(monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
profiler.flow.starttime = "not a real time"
- monkeypatch.setattr('slips_files.core.profiler.utils.convert_format', Mock(side_effect=ValueError))
+ monkeypatch.setattr(
+ "slips_files.core.profiler.utils.convert_format",
+ Mock(side_effect=ValueError),
+ )
profiler.convert_starttime_to_epoch()
- assert profiler.flow.starttime == "not a real time"
-
-def test_should_set_localnet(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ assert profiler.flow.starttime == "not a real time"
+
+
+def test_should_set_localnet():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
profiler.flow.saddr = "192.168.1.1"
@@ -330,15 +381,17 @@ def test_should_set_localnet(mock_db):
profiler.is_localnet_set = False
profiler.flow.saddr = "8.8.8.8"
assert profiler.should_set_localnet() is False
-
-def test_should_set_localnet_already_set(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
- profiler.is_localnet_set = True
+
+
+def test_should_set_localnet_already_set():
+ profiler = ModuleFactory().create_profiler_obj()
+ profiler.is_localnet_set = True
result = profiler.should_set_localnet()
- assert result is False
+ assert result is False
+
-def test_check_for_stop_msg(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_check_for_stop_msg(monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
monkeypatch.setattr(profiler, "shutdown_gracefully", Mock())
monkeypatch.setattr(profiler, "is_done_processing", Mock())
assert profiler.check_for_stop_msg("stop") is True
@@ -346,33 +399,40 @@ def test_check_for_stop_msg(mock_db, monkeypatch):
profiler.is_done_processing.assert_called_once()
assert profiler.check_for_stop_msg("not_stop") is False
-
-def test_pre_main(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+
+def test_pre_main(monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
with monkeypatch.context() as m:
mock_drop_root_privs = Mock()
- m.setattr("slips_files.core.profiler.utils.drop_root_privs", mock_drop_root_privs)
+ m.setattr(
+ "slips_files.core.profiler.utils.drop_root_privs",
+ mock_drop_root_privs,
+ )
profiler.pre_main()
mock_drop_root_privs.assert_called_once()
-
-def test_main(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+
+def test_main_stop_msg_received():
+ profiler = ModuleFactory().create_profiler_obj()
+ profiler.should_stop = Mock(side_effect=[False, True])
+
profiler.profiler_queue = Mock(spec=queue.Queue)
- profiler.profiler_queue.get.side_effect = ["stop"]
- profiler.check_for_stop_msg = Mock(return_value=True)
+ profiler.profiler_queue.get.return_value = ["stop"]
+
+ stopped = profiler.main()
+ assert stopped
+ # profiler.check_for_st op_msg.assert_called()
- profiler.main()
- profiler.check_for_stop_msg.assert_called()
-
-
def mock_print(*args, **kwargs):
pass
-def test_is_done_processing(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+def test_is_done_processing(monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.done_processing = Mock()
profiler.is_profiler_done_event = Mock()
@@ -382,32 +442,34 @@ def test_is_done_processing(mock_db, monkeypatch):
profiler.done_processing.release.assert_called_once()
profiler.is_profiler_done_event.set.assert_called_once()
-
+
+
@patch("slips_files.core.profiler.Profiler.add_flow_to_profile")
@patch("slips_files.core.profiler.Profiler.handle_setting_local_net")
-def test_main_flow_processing(mock_handle_setting_local_net, mock_add_flow_to_profile, mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_main(mock_handle_setting_local_net, mock_add_flow_to_profile):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.profiler_queue = Mock(spec=queue.Queue)
profiler.profiler_queue.get.side_effect = [
{"line": "sample_line", "input_type": "zeek", "total_flows": 100},
- "stop"
+ "stop",
]
- profiler.check_for_stop_msg = Mock(side_effect=[False, True])
- monkeypatch.setattr(profiler, "define_separator", Mock(return_value="zeek"))
- profiler.input = None
- monkeypatch.setattr(profiler, "input", Mock())
+ profiler.should_stop = Mock(side_effect=[False, True])
+ profiler.db.define_separator = Mock()
+ profiler.db.define_separator.return_value = "zeek"
+ profiler.input = Mock()
profiler.input.process_line = Mock(return_value="sample_flow")
profiler.main()
mock_add_flow_to_profile.assert_called_once()
mock_handle_setting_local_net.assert_called_once()
-
-
+
@patch("slips_files.core.profiler.ConfigParser")
-def test_read_configuration(mock_config_parser, mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_read_configuration(
+ mock_config_parser,
+):
+ profiler = ModuleFactory().create_profiler_obj()
mock_conf = mock_config_parser.return_value
mock_conf.whitelist_path.return_value = "path/to/whitelist"
@@ -424,23 +486,25 @@ def test_read_configuration(mock_config_parser, mock_db):
assert profiler.analysis_direction == "all"
assert profiler.label == "malicious"
assert profiler.width == 1.0
- assert profiler.client_ips == ["192.168.1.1", "10.0.0.1"]
+ assert profiler.client_ips == ["192.168.1.1", "10.0.0.1"]
-
-def test_add_flow_to_profile_unsupported_flow(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_add_flow_to_profile_unsupported_flow():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
profiler.flow.type_ = "unsupported"
profiler.flow_parser = Mock()
profiler.flow_parser.is_supported_flow.return_value = False
-
+
result = profiler.add_flow_to_profile()
- assert result is False
-
+ assert result is False
+
+
@patch("slips_files.core.profiler.FlowHandler")
-def test_store_features_going_out(mock_flow_handler, mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+def test_store_features_going_out(
+ mock_flow_handler,
+):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
profiler.flow.type_ = "conn"
profiler.flow_parser = mock_flow_handler.return_value
@@ -450,137 +514,152 @@ def test_store_features_going_out(mock_flow_handler, mock_db):
profiler.store_features_going_out()
profiler.flow_parser.handle_conn.assert_called_once()
-
-def test_store_features_going_in_non_conn_flow(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
- profiler.flow = Mock(type_='dns', saddr='192.168.1.1', dport=53, proto='UDP')
- profiler.saddr_as_obj = ipaddress.ip_address('192.168.1.1')
- profileid = 'profile_test_dns'
- twid = 'tw_test_dns'
+
+
+def test_store_features_going_in_non_conn_flow():
+ profiler = ModuleFactory().create_profiler_obj()
+ profiler.flow = Mock(
+ type_="dns", saddr="192.168.1.1", dport=53, proto="UDP"
+ )
+ profiler.saddr_as_obj = ipaddress.ip_address("192.168.1.1")
+ profileid = "profile_test_dns"
+ twid = "tw_test_dns"
profiler.store_features_going_in(profileid, twid)
- mock_db.add_tuple.assert_not_called()
- mock_db.add_ips.assert_not_called()
- mock_db.add_port.assert_not_called()
- mock_db.add_flow.assert_not_called()
- mock_db.mark_profile_tw_as_modified.assert_not_called()
-
-def test_store_features_going_out_unsupported_type(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ profiler.db.add_tuple.assert_not_called()
+ profiler.db.add_ips.assert_not_called()
+ profiler.db.add_port.assert_not_called()
+ profiler.db.add_flow.assert_not_called()
+ profiler.db.mark_profile_tw_as_modified.assert_not_called()
+
+
+def test_store_features_going_out_unsupported_type():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
profiler.flow.type_ = "unsupported_type"
profiler.flow_parser = Mock()
result = profiler.store_features_going_out()
profiler.flow_parser.handle_conn.assert_not_called()
- assert result is False
-
-def test_handle_in_flows_valid_daddr(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
- profiler.flow = Mock(type_='conn', daddr='8.8.8.8')
- profiler.get_rev_profile = Mock(return_value=('rev_profile', 'rev_twid'))
+ assert result is False
+
+
+def test_handle_in_flows_valid_daddr():
+ profiler = ModuleFactory().create_profiler_obj()
+ profiler.flow = Mock(type_="conn", daddr="8.8.8.8")
+ profiler.get_rev_profile = Mock(return_value=("rev_profile", "rev_twid"))
profiler.store_features_going_in = Mock()
profiler.handle_in_flows()
profiler.get_rev_profile.assert_called_once()
- profiler.store_features_going_in.assert_called_once_with('rev_profile', 'rev_twid')
+ profiler.store_features_going_in.assert_called_once_with(
+ "rev_profile", "rev_twid"
+ )
-
-def test_shutdown_gracefully(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+def test_shutdown_gracefully(monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.rec_lines = 100
monkeypatch.setattr(profiler, "print", Mock())
profiler.shutdown_gracefully()
- profiler.print.assert_called_with("Stopping. Total lines read: 100", log_to_logfiles_only=True)
-
-def test_init_pbar(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ profiler.print.assert_called_with(
+ "Stopping. Total lines read: 100", log_to_logfiles_only=True
+ )
+
+
+def test_init_pbar():
+ profiler = ModuleFactory().create_profiler_obj()
profiler.notify_observers = Mock()
total_flows = 500
profiler.init_pbar(total_flows)
- profiler.notify_observers.assert_called_once_with({
- 'bar': 'init',
- 'bar_info': {
- 'input_type': profiler.input_type,
- 'total_flows': total_flows
+ profiler.notify_observers.assert_called_once_with(
+ {
+ "bar": "init",
+ "bar_info": {
+ "input_type": profiler.input_type,
+ "total_flows": total_flows,
+ },
}
- })
+ )
assert profiler.supported_pbar is True
-def test_get_local_net_from_flow(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+def test_get_local_net_from_flow(monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
- profiler.flow.saddr = '10.0.0.1'
+ profiler.flow.saddr = "10.0.0.1"
profiler.client_ips = []
local_net = profiler.get_local_net()
- assert local_net == '10.0.0.0/8'
-
+ assert local_net == "10.0.0.0/8"
-@pytest.mark.parametrize('client_ips, expected_cidr', [
- (['192.168.1.1'], '192.168.0.0/16'),
- (['172.16.0.1'], '172.16.0.0/12'),
- ([], '192.168.0.0/16')
-])
-def test_get_local_net(client_ips, expected_cidr, mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+@pytest.mark.parametrize(
+ "client_ips, expected_cidr",
+ [
+ (["192.168.1.1"], "192.168.0.0/16"),
+ (["172.16.0.1"], "172.16.0.0/12"),
+ ([], "192.168.0.0/16"),
+ ],
+)
+def test_get_local_net(client_ips, expected_cidr, monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.client_ips = client_ips
profiler.flow = Mock()
- profiler.flow.saddr = '192.168.1.1'
+ profiler.flow.saddr = "192.168.1.1"
local_net = profiler.get_local_net()
- assert local_net == expected_cidr
-
-
-def test_handle_setting_local_net_when_already_set(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
- profiler.is_localnet_set = True
+ assert local_net == expected_cidr
+
+
+def test_handle_setting_local_net_when_already_set():
+ profiler = ModuleFactory().create_profiler_obj()
+ profiler.is_localnet_set = True
profiler.handle_setting_local_net()
- mock_db.set_local_network.assert_not_called()
-
-def test_handle_setting_local_net(mock_db, monkeypatch):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ profiler.db.set_local_network.assert_not_called()
+
+
+def test_handle_setting_local_net(monkeypatch):
+ profiler = ModuleFactory().create_profiler_obj()
profiler.flow = Mock()
profiler.flow.saddr = "192.168.1.1"
- monkeypatch.setattr(profiler, "should_set_localnet", Mock(return_value=True))
+ monkeypatch.setattr(
+ profiler, "should_set_localnet", Mock(return_value=True)
+ )
- monkeypatch.setattr(profiler, "get_local_net", Mock(return_value="192.168.1.0/24"))
+ monkeypatch.setattr(
+ profiler, "get_local_net", Mock(return_value="192.168.1.0/24")
+ )
profiler.handle_setting_local_net()
- profiler.db.set_local_network.assert_called_once_with("192.168.1.0/24")
-
-def test_notify_observers_no_observers(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
- test_msg = {'action': 'test'}
+ profiler.db.set_local_network.assert_called_once_with("192.168.1.0/24")
+
+
+def test_notify_observers_no_observers():
+ profiler = ModuleFactory().create_profiler_obj()
+ test_msg = {"action": "test"}
try:
profiler.notify_observers(test_msg)
except Exception as e:
pytest.fail(f"Unexpected error occurred: {e}")
-
-def test_notify_observers(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+
+
+def test_notify_observers():
+ profiler = ModuleFactory().create_profiler_obj()
observer_mock = Mock()
profiler.observers.append(observer_mock)
- test_msg = {'test': 'message'}
+ test_msg = {"test": "message"}
profiler.notify_observers(test_msg)
observer_mock.update.assert_called_once_with(test_msg)
-
-def test_notify_observers_with_correct_message(mock_db):
+
+
+def test_notify_observers_with_correct_message():
observer_mock = Mock()
- profiler = ModuleFactory().create_profiler_obj(mock_db)
+ profiler = ModuleFactory().create_profiler_obj()
profiler.observers.append(observer_mock)
- test_msg = {'action': 'test_action'}
+ test_msg = {"action": "test_action"}
profiler.notify_observers(test_msg)
observer_mock.update.assert_called_once_with(test_msg)
-
-
-def test_should_stop_false(mock_db):
- profiler = ModuleFactory().create_profiler_obj(mock_db)
- profiler.some_condition = False
- assert profiler.should_stop() is False
-
-
diff --git a/tests/test_progress_bar.py b/tests/test_progress_bar.py
new file mode 100644
index 000000000..df5ff9400
--- /dev/null
+++ b/tests/test_progress_bar.py
@@ -0,0 +1,207 @@
+import pytest
+from unittest.mock import Mock, patch
+from multiprocessing import Event
+from tests.module_factory import ModuleFactory
+
+
+@pytest.mark.parametrize(
+ "initial_value, update_count, expected_final_value, total_flows",
+ [
+ # testcase1: Normal update
+ (0, 1, 1, 100),
+ # testcase2: Multiple updates
+ (50, 5, 55, 100),
+ ],
+)
+def test_update_bar_normal(
+ initial_value, update_count, expected_final_value, total_flows
+):
+ pbar = ModuleFactory().create_progress_bar_obj()
+ pbar.slips_mode = "interactive"
+ pbar.total_flows = total_flows
+ pbar.pbar_finished = Event()
+
+ mock_progress_bar = Mock()
+ mock_progress_bar.n = initial_value
+ pbar.progress_bar = mock_progress_bar
+
+ def update_side_effect(amount):
+ mock_progress_bar.n += amount
+
+ mock_progress_bar.update.side_effect = update_side_effect
+
+ for _ in range(update_count):
+ pbar.update_bar()
+
+ assert mock_progress_bar.update.call_count == update_count
+ assert mock_progress_bar.n == expected_final_value
+
+
+def test_update_bar_termination():
+ pbar = ModuleFactory().create_progress_bar_obj()
+ pbar.slips_mode = "normal"
+ pbar.total_flows = 100
+ pbar.pbar_finished = Event()
+
+ mock_progress_bar = Mock()
+ mock_progress_bar.n = 99
+ pbar.progress_bar = mock_progress_bar
+
+ def update_side_effect(amount):
+ mock_progress_bar.n += amount
+
+ mock_progress_bar.update.side_effect = update_side_effect
+
+ with patch.object(pbar, "terminate") as mock_terminate:
+ pbar.update_bar()
+
+ assert mock_progress_bar.update.call_count == 1
+ assert mock_progress_bar.n == 100
+ mock_terminate.assert_called_once()
+
+
+def test_update_bar_no_progress_bar():
+ pbar = ModuleFactory().create_progress_bar_obj()
+ pbar.slips_mode = "normal"
+
+ assert not hasattr(pbar, "progress_bar")
+
+ try:
+ pbar.update_bar()
+ except AttributeError:
+ pytest.fail("update_bar() raised AttributeError unexpectedly")
+
+ assert not hasattr(pbar, "progress_bar")
+
+
+def test_update_bar_daemonized_mode():
+ pbar = ModuleFactory().create_progress_bar_obj()
+ pbar.slips_mode = "daemonized"
+ pbar.progress_bar = Mock()
+
+ pbar.update_bar()
+
+ pbar.progress_bar.update.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "msg, expected_output",
+ [
+ # testcase1: Normal message
+ ({"txt": "Test message"}, "Test message"),
+ # testcase2: Empty message
+ ({"txt": ""}, ""),
+ # testcase3: Message with special characters
+ (
+ {"txt": "Test\nmessage\twith\rspecial\fcharacters"},
+ "Test\nmessage\twith\rspecial\fcharacters",
+ ),
+ ],
+)
+def test_print_to_cli(
+ msg,
+ expected_output,
+):
+ pbar = ModuleFactory().create_progress_bar_obj()
+
+ with patch("tqdm.auto.tqdm.write") as mock_write:
+ pbar.print_to_cli(msg)
+
+ mock_write.assert_called_once_with(expected_output)
+
+
+@pytest.mark.parametrize(
+ "msg, expected_stats",
+ [
+ # testcase1: Normal stats
+ ({"stats": "Processing: 50%"}, "Processing: 50%"),
+ # testcase2: Empty stats
+ ({"stats": ""}, ""),
+ # testcase3: Stats with special characters
+ ({"stats": "CPU: 80%\nRAM: 4GB"}, "CPU: 80%\nRAM: 4GB"),
+ ],
+)
+def test_update_stats(
+ msg,
+ expected_stats,
+):
+ pbar = ModuleFactory().create_progress_bar_obj()
+ mock_progress_bar = Mock()
+ pbar.progress_bar = mock_progress_bar
+
+ pbar.update_stats(msg)
+
+ (
+ mock_progress_bar.set_postfix_str.assert_called_once_with(
+ expected_stats, refresh=True
+ )
+ )
+
+
+def test_shutdown_gracefully_event_not_set():
+ pbar = ModuleFactory().create_progress_bar_obj()
+ pbar.pbar_finished = Event()
+
+ pbar.shutdown_gracefully()
+
+ assert pbar.pbar_finished.is_set()
+
+
+def test_shutdown_gracefully_event_already_set():
+ pbar = ModuleFactory().create_progress_bar_obj()
+ pbar.pbar_finished = Event()
+ pbar.pbar_finished.set()
+
+ pbar.shutdown_gracefully()
+
+ assert pbar.pbar_finished.is_set()
+
+
+def test_remove_stats():
+ pbar = ModuleFactory().create_progress_bar_obj()
+ mock_progress_bar = Mock()
+ pbar.progress_bar = mock_progress_bar
+
+ pbar.remove_stats()
+
+ (
+ mock_progress_bar.set_postfix_str.assert_called_once_with(
+ "", refresh=True
+ )
+ )
+
+
+@pytest.mark.parametrize(
+ "total_flows, current_n",
+ [
+ # testcase1: Normal case
+ (100, 100),
+ # testcase2: Edge case - zero flows
+ (0, 0),
+ # testcase3: Large number of flows
+ (1000000, 1000000),
+ ],
+)
+def test_terminate(
+ total_flows,
+ current_n,
+):
+ pbar = ModuleFactory().create_progress_bar_obj()
+ pbar.total_flows = total_flows
+ pbar.pbar_finished = Event()
+
+ mock_progress_bar = Mock()
+ mock_progress_bar.n = current_n
+ pbar.progress_bar = mock_progress_bar
+
+ with patch.object(pbar, "remove_stats") as mock_remove_stats, patch(
+ "tqdm.auto.tqdm.write"
+ ) as mock_write:
+ pbar.terminate()
+
+ mock_remove_stats.assert_called_once()
+ mock_write.assert_called_once_with(
+ "Profiler is done reading all flows. "
+ "Slips is now processing them."
+ )
+ assert pbar.pbar_finished.is_set()
diff --git a/tests/test_set_evidence.py b/tests/test_set_evidence.py
index 161e0d92d..e6c23c83b 100644
--- a/tests/test_set_evidence.py
+++ b/tests/test_set_evidence.py
@@ -50,7 +50,6 @@
],
)
def test_young_domain(
- mock_db,
domain,
age,
stime,
@@ -61,8 +60,8 @@ def test_young_domain(
expected_call_count,
):
"""Testing the young_domain method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.young_domain(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.young_domain(
domain=domain,
age=age,
stime=stime,
@@ -72,7 +71,7 @@ def test_young_domain(
answers=answers,
)
- assert mock_db.set_evidence.call_count == expected_call_count
+ assert set_ev.db.set_evidence.call_count == expected_call_count
@pytest.mark.parametrize(
@@ -118,11 +117,12 @@ def test_young_domain(
],
)
def test_multiple_ssh_versions(
- mock_db, cached_versions, current_versions, role, expected_description
+ cached_versions, current_versions, role, expected_description
):
- """Test cases for multiple_ssh_versions with different versions, roles, and edge cases."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.multiple_ssh_versions(
+ """Test cases for multiple_ssh_versions with different versions,
+ roles, and edge cases."""
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.multiple_ssh_versions(
srcip="192.168.0.1",
cached_versions=cached_versions,
current_versions=current_versions,
@@ -133,11 +133,11 @@ def test_multiple_ssh_versions(
)
if expected_description is None:
- assert mock_db.set_evidence.call_count == 0
+ assert set_ev.db.set_evidence.call_count == 0
return
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.MULTIPLE_SSH_VERSIONS
assert evidence.attacker.value == "192.168.0.1"
@@ -195,7 +195,6 @@ def test_multiple_ssh_versions(
],
)
def test_different_localnet_usage(
- mock_db,
daddr,
portproto,
ip_outside_localnet,
@@ -209,11 +208,11 @@ def test_different_localnet_usage(
- dst IP outside localnet using ARP
- dst IP outside localnet using port
"""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- mock_db.get_local_network.return_value = "192.168.0.0/16"
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.db.get_local_network.return_value = "192.168.0.0/16"
start_time = datetime.datetime(2023, 5, 6, 12, 0, 0)
- mock_db.get_slips_start_time.return_value = start_time.timestamp()
- set_evidence_helper.different_localnet_usage(
+ set_ev.db.get_slips_start_time.return_value = start_time.timestamp()
+ set_ev.different_localnet_usage(
daddr=daddr,
portproto=portproto,
profileid="profile_192.168.0.1",
@@ -223,8 +222,8 @@ def test_different_localnet_usage(
ip_outside_localnet=ip_outside_localnet,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.DIFFERENT_LOCALNET
assert evidence.attacker.direction == expected_attacker_direction
@@ -237,10 +236,10 @@ def test_different_localnet_usage(
assert evidence.description == expected_description
-def test_device_changing_ips(mock_db):
+def test_device_changing_ips():
"""Testing the device_changing_ips method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.device_changing_ips(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.device_changing_ips(
smac="00:11:22:33:44:55",
old_ip="10.0.0.1",
profileid="profile_192.168.0.1",
@@ -249,8 +248,8 @@ def test_device_changing_ips(mock_db):
timestamp="2023-05-06T12:00:00Z",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.DEVICE_CHANGING_IP
assert evidence.attacker.value == "192.168.0.1"
@@ -261,10 +260,10 @@ def test_device_changing_ips(mock_db):
assert evidence.uid == ["unique_id"]
-def test_non_ssl_port_443_conn(mock_db):
+def test_non_ssl_port_443_conn():
"""Testing the non_ssl_port_443_conn method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.non_ssl_port_443_conn(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.non_ssl_port_443_conn(
daddr="10.0.0.1",
profileid="profile_192.168.0.1",
timestamp="2023-05-06T12:00:00Z",
@@ -272,8 +271,8 @@ def test_non_ssl_port_443_conn(mock_db):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.NON_SSL_PORT_443_CONNECTION
assert evidence.attacker.value == "192.168.0.1"
@@ -311,11 +310,11 @@ def test_non_ssl_port_443_conn(mock_db):
),
],
)
-def test_incompatible_cn(mock_db, org, daddr, expected_description):
+def test_incompatible_cn(org, daddr, expected_description):
"""Testing the incompatible_CN method."""
- mock_db.get_ip_identification.return_value = "- Some Information -"
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.incompatible_cn(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.db.get_ip_identification.return_value = "- Some Information -"
+ set_ev.incompatible_cn(
org=org,
timestamp="2023-05-06T12:00:00Z",
daddr=daddr,
@@ -324,8 +323,8 @@ def test_incompatible_cn(mock_db, org, daddr, expected_description):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.INCOMPATIBLE_CN
assert evidence.attacker.value == "192.168.0.1"
@@ -351,10 +350,10 @@ def test_incompatible_cn(mock_db, org, daddr, expected_description):
(300, 3.00),
],
)
-def test_dga(mock_db, nxdomains, expected_confidence):
+def test_dga(nxdomains, expected_confidence):
"""Testing the DGA method with different nxdomains values."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.dga(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.dga(
nxdomains=nxdomains,
stime="2023-05-06T12:00:00Z",
profileid="profile_192.168.0.1",
@@ -362,8 +361,8 @@ def test_dga(mock_db, nxdomains, expected_confidence):
uid=["unique_id"],
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.DGA_NXDOMAINS
assert evidence.attacker.value == "192.168.0.1"
@@ -388,12 +387,10 @@ def test_dga(mock_db, nxdomains, expected_confidence):
(2.5 * 1024 * 1024, 2.62144),
],
)
-def test_pastebin_download(
- mock_db, bytes_downloaded, expected_response_body_len
-):
+def test_pastebin_download(bytes_downloaded, expected_response_body_len):
"""Testing the pastebin_download method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- result = set_evidence_helper.pastebin_download(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ result = set_ev.pastebin_download(
bytes_downloaded=bytes_downloaded,
timestamp="2023-05-06T12:00:00Z",
profileid="profile_192.168.0.1",
@@ -402,8 +399,8 @@ def test_pastebin_download(
)
assert result is True
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.PASTEBIN_DOWNLOAD
assert evidence.attacker.value == "192.168.0.1"
@@ -446,7 +443,6 @@ def test_pastebin_download(
],
)
def test_dns_without_conn(
- mock_db,
domain,
timestamp,
profileid,
@@ -456,8 +452,8 @@ def test_dns_without_conn(
expected_victim,
):
"""Testing the dns_without_conn method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.dns_without_conn(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.dns_without_conn(
domain=domain,
timestamp=timestamp,
profileid=profileid,
@@ -465,8 +461,8 @@ def test_dns_without_conn(
uid=uid,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.DNS_WITHOUT_CONNECTION
assert evidence.attacker.value == expected_attacker
@@ -479,10 +475,10 @@ def test_dns_without_conn(
assert evidence.confidence == 0.8
-def test_dns_arpa_scan(mock_db):
+def test_dns_arpa_scan():
"""Testing the dns_arpa_scan method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- result = set_evidence_helper.dns_arpa_scan(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ result = set_ev.dns_arpa_scan(
arpa_scan_threshold=150,
stime="2023-05-06T12:00:00Z",
profileid="profile_192.168.0.1",
@@ -491,8 +487,8 @@ def test_dns_arpa_scan(mock_db):
)
assert result is True
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.DNS_ARPA_SCAN
assert evidence.attacker.value == "192.168.0.1"
@@ -513,14 +509,14 @@ def test_dns_arpa_scan(mock_db):
(6, 0.8),
],
)
-def test_conn_without_dns(mock_db, time_difference_hours, expected_confidence):
+def test_conn_without_dns(time_difference_hours, expected_confidence):
"""Testing the conn_without_dns method, including time-based confidence adjustment."""
start_time = datetime.datetime.now() - datetime.timedelta(
hours=time_difference_hours
)
- mock_db.get_slips_start_time.return_value = start_time.timestamp()
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.conn_without_dns(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.db.get_slips_start_time.return_value = start_time.timestamp()
+ set_ev.conn_without_dns(
daddr="10.0.0.1",
timestamp=(start_time + datetime.timedelta(minutes=15)).strftime(
"%Y-%m-%dT%H:%M:%SZ"
@@ -530,8 +526,8 @@ def test_conn_without_dns(mock_db, time_difference_hours, expected_confidence):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.confidence == expected_confidence
@@ -565,11 +561,11 @@ def test_conn_without_dns(mock_db, time_difference_hours, expected_confidence):
),
],
)
-def test_unknown_port(mock_db, daddr, dport, proto, expected_description):
+def test_unknown_port(daddr, dport, proto, expected_description):
"""Testing the unknown_port method."""
- mock_db.get_ip_identification.return_value = ""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.unknown_port(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.db.get_ip_identification.return_value = ""
+ set_ev.unknown_port(
daddr=daddr,
dport=dport,
proto=proto,
@@ -579,8 +575,8 @@ def test_unknown_port(mock_db, daddr, dport, proto, expected_description):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.UNKNOWN_PORT
assert evidence.attacker.value == "192.168.0.1"
@@ -593,10 +589,10 @@ def test_unknown_port(mock_db, daddr, dport, proto, expected_description):
assert evidence.description == expected_description
-def test_pw_guessing(mock_db):
+def test_pw_guessing():
"""Testing the pw_guessing method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.pw_guessing(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.pw_guessing(
msg="192.168.0.1 appears to be guessing "
"SSH passwords (seen in 30 connections)",
timestamp="2023-05-06T12:00:00Z",
@@ -605,8 +601,8 @@ def test_pw_guessing(mock_db):
by="detection_model",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.PASSWORD_GUESSING
assert evidence.attacker.value == "192.168.0.1"
@@ -632,10 +628,10 @@ def test_pw_guessing(mock_db):
("Seen at least 1000 unique hosts scanned on 53/tcp", 1000),
],
)
-def test_horizontal_portscan(mock_db, msg, expected_conn_count):
+def test_horizontal_portscan(msg, expected_conn_count):
"""Testing the horizontal_portscan method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.horizontal_portscan(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.horizontal_portscan(
msg=msg,
timestamp="2023-05-06T12:00:00Z",
profileid="profile_192.168.0.1",
@@ -643,8 +639,8 @@ def test_horizontal_portscan(mock_db, msg, expected_conn_count):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.HORIZONTAL_PORT_SCAN
assert evidence.attacker.value == "192.168.0.1"
@@ -690,12 +686,10 @@ def test_horizontal_portscan(mock_db, msg, expected_conn_count):
),
],
)
-def test_conn_to_private_ip(
- mock_db, proto, daddr, dport, expected_description
-):
+def test_conn_to_private_ip(proto, daddr, dport, expected_description):
"""Testing the conn_to_private_ip method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.conn_to_private_ip(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.conn_to_private_ip(
proto=proto,
daddr=daddr,
dport=dport,
@@ -705,8 +699,8 @@ def test_conn_to_private_ip(
timestamp="2023-05-06T12:00:00Z",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.CONNECTION_TO_PRIVATE_IP
assert evidence.attacker.value == "192.168.0.1"
@@ -719,7 +713,7 @@ def test_conn_to_private_ip(
assert evidence.description == expected_description
-def test_gre_tunnel(mock_db):
+def test_gre_tunnel():
"""Testing the GRE_tunnel method."""
tunnel_info = {
"profileid": "profile_192.168.0.1",
@@ -731,11 +725,11 @@ def test_gre_tunnel(mock_db):
"uid": "unique_id",
},
}
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.GRE_tunnel(tunnel_info)
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.GRE_tunnel(tunnel_info)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.GRE_TUNNEL
assert evidence.attacker.value == "192.168.0.1"
@@ -793,12 +787,11 @@ def test_gre_tunnel(mock_db):
],
)
def test_ssh_successful(
- mock_db, twid, saddr, daddr, size, uid, timestamp, by, expected_description
+ twid, saddr, daddr, size, uid, timestamp, by, expected_description
):
- """Testing the ssh_successful method."""
- mock_db.get_ip_identification.return_value = ""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.ssh_successful(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.db.get_ip_identification.return_value = ""
+ set_ev.ssh_successful(
twid=twid,
saddr=saddr,
daddr=daddr,
@@ -808,8 +801,8 @@ def test_ssh_successful(
by=by,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.SSH_SUCCESSFUL
assert evidence.attacker.value == saddr
@@ -823,10 +816,10 @@ def test_ssh_successful(
assert evidence.description == expected_description
-def test_long_connection(mock_db):
+def test_long_connection():
"""Testing the long_connection method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.long_connection(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.long_connection(
daddr="10.0.0.1",
duration=7200,
profileid="profile_192.168.0.1",
@@ -835,8 +828,8 @@ def test_long_connection(mock_db):
timestamp="2023-05-06T12:00:00Z",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.LONG_CONNECTION
assert evidence.attacker.value == "192.168.0.1"
@@ -894,7 +887,6 @@ def test_long_connection(mock_db):
],
)
def test_self_signed_certificates(
- mock_db,
profileid,
daddr,
uid,
@@ -906,8 +898,8 @@ def test_self_signed_certificates(
expected_profile_ip_2,
):
"""Testing the self_signed_certificates method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.self_signed_certificates(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.self_signed_certificates(
profileid=profileid,
twid="timewindow1",
daddr=daddr,
@@ -916,8 +908,8 @@ def test_self_signed_certificates(
server_name=server_name,
)
- assert mock_db.set_evidence.call_count == 2
- call_args, _ = mock_db.set_evidence.call_args_list[0]
+ assert set_ev.db.set_evidence.call_count == 2
+ call_args, _ = set_ev.db.set_evidence.call_args_list[0]
evidence = call_args[0]
assert evidence.evidence_type == EvidenceType.SELF_SIGNED_CERTIFICATE
assert evidence.attacker.value == expected_attacker_ip_1
@@ -926,7 +918,7 @@ def test_self_signed_certificates(
assert evidence.category == IDEACategory.ANOMALY_BEHAVIOUR
assert evidence.profile.ip == expected_profile_ip_1
assert evidence.timewindow.number == 1
- call_args, _ = mock_db.set_evidence.call_args_list[1]
+ call_args, _ = set_ev.db.set_evidence.call_args_list[1]
evidence = call_args[0]
assert evidence.evidence_type == EvidenceType.SELF_SIGNED_CERTIFICATE
assert evidence.attacker.value == expected_attacker_ip_2
@@ -937,10 +929,10 @@ def test_self_signed_certificates(
assert evidence.timewindow.number == 1
-def test_multiple_reconnection_attempts(mock_db):
+def test_multiple_reconnection_attempts():
"""Testing the multiple_reconnection_attempts method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.multiple_reconnection_attempts(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.multiple_reconnection_attempts(
profileid="profile_192.168.0.1",
twid="timewindow2",
daddr="10.0.0.1",
@@ -949,8 +941,8 @@ def test_multiple_reconnection_attempts(mock_db):
reconnections=10,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert (
evidence.evidence_type == EvidenceType.MULTIPLE_RECONNECTION_ATTEMPTS
@@ -989,7 +981,6 @@ def test_multiple_reconnection_attempts(mock_db):
],
)
def test_connection_to_multiple_ports(
- mock_db,
profileid,
attacker,
victim,
@@ -999,8 +990,8 @@ def test_connection_to_multiple_ports(
"""Testing the connection_to_multiple_ports method with parametrization.
This test verifies the correct direction and profile_ip based on the input parameters.
"""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.connection_to_multiple_ports(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.connection_to_multiple_ports(
profileid=profileid,
twid="timewindow3",
uid=["unique_id"],
@@ -1010,8 +1001,8 @@ def test_connection_to_multiple_ports(
attacker=attacker,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.CONNECTION_TO_MULTIPLE_PORTS
assert evidence.attacker.direction == expected_attacker_direction
@@ -1052,11 +1043,11 @@ def test_connection_to_multiple_ports(
],
)
def test_suspicious_dns_answer(
- mock_db, query, answer, entropy, daddr, profileid, twid, stime, uid
+ query, answer, entropy, daddr, profileid, twid, stime, uid
):
"""Testing the suspicious_dns_answer method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.suspicious_dns_answer(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.suspicious_dns_answer(
query=query,
answer=answer,
entropy=entropy,
@@ -1067,8 +1058,8 @@ def test_suspicious_dns_answer(
uid=uid,
)
- assert mock_db.set_evidence.call_count == 2
- args, _ = mock_db.set_evidence.call_args_list[0]
+ assert set_ev.db.set_evidence.call_count == 2
+ args, _ = set_ev.db.set_evidence.call_args_list[0]
evidence = args[0]
assert evidence.evidence_type == EvidenceType.HIGH_ENTROPY_DNS_ANSWER
assert evidence.attacker.value == daddr
@@ -1078,7 +1069,7 @@ def test_suspicious_dns_answer(
assert evidence.profile.ip == daddr
assert evidence.timewindow.number == int(twid.replace("timewindow", ""))
assert evidence.uid == [uid]
- args, _ = mock_db.set_evidence.call_args_list[1]
+ args, _ = set_ev.db.set_evidence.call_args_list[1]
evidence = args[0]
assert evidence.evidence_type == EvidenceType.HIGH_ENTROPY_DNS_ANSWER
assert evidence.attacker.value == profileid.split("_")[-1]
@@ -1109,10 +1100,10 @@ def test_suspicious_dns_answer(
("test.com", "", "The DNS query test.com was resolved to "),
],
)
-def test_invalid_dns_answer(mock_db, query, answer, expected_description):
+def test_invalid_dns_answer(query, answer, expected_description):
"""Testing the invalid_dns_answer method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.invalid_dns_answer(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.invalid_dns_answer(
query=query,
answer=answer,
profileid="profile_192.168.0.1",
@@ -1121,8 +1112,8 @@ def test_invalid_dns_answer(mock_db, query, answer, expected_description):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.INVALID_DNS_RESOLUTION
assert evidence.attacker.value == "192.168.0.1"
@@ -1158,7 +1149,6 @@ def test_invalid_dns_answer(mock_db, query, answer, expected_description):
],
)
def test_for_port_0_connection(
- mock_db,
profileid,
attacker,
victim,
@@ -1167,8 +1157,8 @@ def test_for_port_0_connection(
victim_direction,
):
"""Testing the for_port_0_connection method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.for_port_0_connection(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.for_port_0_connection(
saddr="192.168.0.1",
daddr="10.0.0.1",
sport=12345,
@@ -1181,8 +1171,8 @@ def test_for_port_0_connection(
attacker=attacker,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.PORT_0_CONNECTION
assert evidence.attacker.value == attacker
@@ -1206,7 +1196,7 @@ def test_for_port_0_connection(
("192.168.0.1", ThreatLevel.LOW, "192.168.0.1"),
],
)
-def test_malicious_ja3s(mock_db, attacker_ip, threat_level, profile_ip):
+def test_malicious_ja3s(attacker_ip, threat_level, profile_ip):
"""Testing the malicious_ja3s method."""
malicious_ja3_dict = {
"ja3_hash_1": '{"threat_level": "high", '
@@ -1214,8 +1204,8 @@ def test_malicious_ja3s(mock_db, attacker_ip, threat_level, profile_ip):
"ja3_hash_2": '{"threat_level": "medium", '
'"description": "Suspicious activity", "tags": "suspicious"}',
}
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.malicious_ja3s(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.malicious_ja3s(
malicious_ja3_dict=malicious_ja3_dict,
twid="timewindow9",
uid="unique_id",
@@ -1225,8 +1215,8 @@ def test_malicious_ja3s(mock_db, attacker_ip, threat_level, profile_ip):
ja3="ja3_hash_1",
)
- assert mock_db.set_evidence.call_count == 2
- call_args_list = mock_db.set_evidence.call_args_list
+ assert set_ev.db.set_evidence.call_count == 2
+ call_args_list = set_ev.db.set_evidence.call_args_list
assert any(
args[0].attacker.value == attacker_ip
and args[0].threat_level == threat_level
@@ -1252,7 +1242,7 @@ def test_malicious_ja3s(mock_db, attacker_ip, threat_level, profile_ip):
),
],
)
-def test_malicious_ja3(mock_db, attacker_ip, threat_level, description, tags):
+def test_malicious_ja3(attacker_ip, threat_level, description, tags):
"""Testing the malicious_ja3 method."""
malicious_ja3_dict = {
"ja3_hash_8": '{"threat_level": "high", '
@@ -1262,9 +1252,9 @@ def test_malicious_ja3(mock_db, attacker_ip, threat_level, description, tags):
'"description": "Suspicious activity", '
'"tags": ""}',
}
- mock_db.get_ip_identification.return_value = ""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.malicious_ja3(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.db.get_ip_identification.return_value = ""
+ set_ev.malicious_ja3(
malicious_ja3_dict=malicious_ja3_dict,
twid="timewindow10",
uid="unique_id",
@@ -1274,8 +1264,8 @@ def test_malicious_ja3(mock_db, attacker_ip, threat_level, description, tags):
ja3=f"ja3_hash_{int(threat_level.value * 10)}",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.MALICIOUS_JA3
assert evidence.attacker.value == attacker_ip
@@ -1303,10 +1293,10 @@ def test_malicious_ja3(mock_db, attacker_ip, threat_level, description, tags):
("10.0.0.1", ThreatLevel.HIGH, "10.0.0.1"),
],
)
-def test_data_exfiltration(mock_db, attacker_ip, threat_level, profile_ip):
+def test_data_exfiltration(attacker_ip, threat_level, profile_ip):
"""Testing the data_exfiltration method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.data_exfiltration(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.data_exfiltration(
daddr="10.0.0.1",
src_mbs=100.0,
profileid="profile_192.168.0.1",
@@ -1315,14 +1305,14 @@ def test_data_exfiltration(mock_db, attacker_ip, threat_level, profile_ip):
timestamp="2023-05-06T12:00:00Z",
)
- assert mock_db.set_evidence.call_count == 2
+ assert set_ev.db.set_evidence.call_count == 2
- call_args_1, _ = mock_db.set_evidence.call_args_list[0]
+ call_args_1, _ = set_ev.db.set_evidence.call_args_list[0]
evidence_1 = call_args_1[0]
assert evidence_1.attacker.value == "192.168.0.1"
assert evidence_1.threat_level == ThreatLevel.INFO
assert evidence_1.profile.ip == "192.168.0.1"
- call_args_2, _ = mock_db.set_evidence.call_args_list[1]
+ call_args_2, _ = set_ev.db.set_evidence.call_args_list[1]
evidence_2 = call_args_2[0]
assert evidence_2.attacker.value == "10.0.0.1"
assert evidence_2.threat_level == ThreatLevel.HIGH
@@ -1350,10 +1340,10 @@ def test_data_exfiltration(mock_db, attacker_ip, threat_level, profile_ip):
),
],
)
-def test_bad_smtp_login(mock_db, saddr, daddr, stime, twid, uid):
+def test_bad_smtp_login(saddr, daddr, stime, twid, uid):
"""Testing the bad_smtp_login method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.bad_smtp_login(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.bad_smtp_login(
saddr=saddr,
daddr=daddr,
stime=stime,
@@ -1361,8 +1351,8 @@ def test_bad_smtp_login(mock_db, saddr, daddr, stime, twid, uid):
uid=uid,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.BAD_SMTP_LOGIN
assert evidence.attacker.value == saddr
@@ -1401,17 +1391,17 @@ def test_bad_smtp_login(mock_db, saddr, daddr, stime, twid, uid):
),
],
)
-def test_smtp_bruteforce(mock_db, flow, twid, uid, smtp_bruteforce_threshold):
+def test_smtp_bruteforce(flow, twid, uid, smtp_bruteforce_threshold):
"""Testing the smtp_bruteforce method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.smtp_bruteforce(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.smtp_bruteforce(
flow=flow,
twid=twid,
uid=uid,
smtp_bruteforce_threshold=smtp_bruteforce_threshold,
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.SMTP_LOGIN_BRUTEFORCE
assert evidence.attacker.value == flow["saddr"]
@@ -1469,7 +1459,6 @@ def test_smtp_bruteforce(mock_db, flow, twid, uid, smtp_bruteforce_threshold):
],
)
def test_malicious_ssl(
- mock_db,
ssl_info,
ssl_info_from_db,
expected_threat_levels,
@@ -1477,13 +1466,13 @@ def test_malicious_ssl(
):
"""Testing the malicious_ssl method with parametrization
and mocking for get_ip_identification."""
- mock_db.get_ip_identification.return_value = ""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.malicious_ssl(ssl_info, ssl_info_from_db)
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.db.get_ip_identification.return_value = ""
+ set_ev.malicious_ssl(ssl_info, ssl_info_from_db)
- assert mock_db.set_evidence.call_count == 2
- for i, (args, _) in enumerate(mock_db.set_evidence.call_args_list):
+ assert set_ev.db.set_evidence.call_count == 2
+ for i, (args, _) in enumerate(set_ev.db.set_evidence.call_args_list):
evidence = args[0]
assert evidence.threat_level == expected_threat_levels[i]
assert evidence.description == expected_descriptions[i]
@@ -1498,10 +1487,10 @@ def test_malicious_ssl(
("192.168.0.1", "8.8.8.8", "8.8.8.8"),
],
)
-def test_doh(mock_db, attacker_ip, victim_ip, profile_ip):
+def test_doh(attacker_ip, victim_ip, profile_ip):
"""Testing the doh method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.doh(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.doh(
daddr=attacker_ip,
profileid=f"profile_{profile_ip}",
twid="timewindow1",
@@ -1509,8 +1498,8 @@ def test_doh(mock_db, attacker_ip, victim_ip, profile_ip):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.DIFFERENT_LOCALNET
assert evidence.attacker.value == attacker_ip
@@ -1522,10 +1511,10 @@ def test_doh(mock_db, attacker_ip, victim_ip, profile_ip):
assert evidence.uid == ["unique_id"]
-def test_non_http_port_80_conn(mock_db):
+def test_non_http_port_80_conn():
"""Testing the non_http_port_80_conn method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.non_http_port_80_conn(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.non_http_port_80_conn(
daddr="10.0.0.1",
profileid="profile_192.168.0.1",
timestamp="2023-05-06T12:00:00Z",
@@ -1533,8 +1522,8 @@ def test_non_http_port_80_conn(mock_db):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 2
- args, _ = mock_db.set_evidence.call_args_list[0]
+ assert set_ev.db.set_evidence.call_count == 2
+ args, _ = set_ev.db.set_evidence.call_args_list[0]
evidence = args[0]
assert evidence.evidence_type == EvidenceType.NON_HTTP_PORT_80_CONNECTION
assert evidence.attacker.value == "192.168.0.1"
@@ -1545,7 +1534,7 @@ def test_non_http_port_80_conn(mock_db):
assert evidence.timewindow.number == 2
assert evidence.uid == ["unique_id"]
- args, _ = mock_db.set_evidence.call_args_list[1]
+ args, _ = set_ev.db.set_evidence.call_args_list[1]
evidence = args[0]
assert evidence.evidence_type == EvidenceType.NON_HTTP_PORT_80_CONNECTION
assert evidence.attacker.value == "10.0.0.1"
@@ -1557,10 +1546,10 @@ def test_non_http_port_80_conn(mock_db):
assert evidence.uid == ["unique_id"]
-def test_vertical_portscan(mock_db):
+def test_vertical_portscan():
"""Testing the vertical_portscan method."""
- set_evidence_helper = ModuleFactory().create_set_evidence_helper(mock_db)
- set_evidence_helper.vertical_portscan(
+ set_ev = ModuleFactory().create_set_evidence_helper()
+ set_ev.vertical_portscan(
msg="192.168.0.1 has scanned at least 60 unique ports of host 192.168.0.2 in",
scanning_ip="192.168.0.1",
timestamp="2023-05-06T12:00:00Z",
@@ -1568,8 +1557,8 @@ def test_vertical_portscan(mock_db):
uid="unique_id",
)
- assert mock_db.set_evidence.call_count == 1
- args, _ = mock_db.set_evidence.call_args
+ assert set_ev.db.set_evidence.call_count == 1
+ args, _ = set_ev.db.set_evidence.call_args
evidence = args[0]
assert evidence.evidence_type == EvidenceType.VERTICAL_PORT_SCAN
assert evidence.attacker.value == "192.168.0.1"
diff --git a/tests/test_slips.py b/tests/test_slips.py
index 8f517ffa1..b8fa8ed0a 100644
--- a/tests/test_slips.py
+++ b/tests/test_slips.py
@@ -1,7 +1,6 @@
"""Unit test for ../slips.py"""
from tests.module_factory import ModuleFactory
-from ..slips import *
def test_load_modules():
@@ -47,6 +46,6 @@ def test_load_modules():
def test_clear_redis_cache_database():
- main = ModuleFactory().create_main_obj("test.pcap")
+ main = ModuleFactory().create_main_obj()
redis_manager = ModuleFactory().create_redis_manager_obj(main)
- assert redis_manager.clear_redis_cache_database() == True
+ assert redis_manager.clear_redis_cache_database()
diff --git a/tests/test_smtp.py b/tests/test_smtp.py
index e733f9ed9..9b90a82bd 100644
--- a/tests/test_smtp.py
+++ b/tests/test_smtp.py
@@ -27,9 +27,9 @@
([timestamp, timestamp + 6, timestamp + 11], 0),
],
)
-def test_check_smtp_bruteforce(mock_db, timestamps, expected_call_count):
+def test_check_smtp_bruteforce(timestamps, expected_call_count):
"""Tests the check_smtp_bruteforce method of the SMTP class."""
- smtp = ModuleFactory().create_smtp_analyzer_obj(mock_db)
+ smtp = ModuleFactory().create_smtp_analyzer_obj()
mock_set_evidence = MagicMock()
smtp.set_evidence.smtp_bruteforce = mock_set_evidence
@@ -80,29 +80,20 @@ def test_check_smtp_bruteforce(mock_db, timestamps, expected_call_count):
),
],
)
-def test_analyze_with_valid_message(mock_db, msg_data, expected_check_args):
- """Tests the analyze method of the SMTP class when a valid message is received."""
- smtp = ModuleFactory().create_smtp_analyzer_obj(mock_db)
+def test_analyze_with_valid_message(msg_data, expected_check_args):
+ """Tests the analyze method of the SMTP class when
+ a valid message is received."""
+ smtp = ModuleFactory().create_smtp_analyzer_obj()
smtp.check_smtp_bruteforce = MagicMock()
- mock_flowalerts = MagicMock()
- smtp.flowalerts = mock_flowalerts
- mock_flowalerts.get_msg.return_value = {"data": json.dumps(msg_data)}
-
- smtp.analyze()
-
- smtp.flowalerts.get_msg.assert_called_once_with("new_smtp")
+ msg = {"channel": "new_smtp", "data": json.dumps(msg_data)}
+ smtp.analyze(msg)
smtp.check_smtp_bruteforce.assert_called_once_with(*expected_check_args)
-def test_analyze_with_no_message(mock_db):
- """Tests the analyze method of the SMTP class when no message is received."""
- smtp = ModuleFactory().create_smtp_analyzer_obj(mock_db)
+def test_analyze_with_no_message():
+ """Tests the analyze method of the SMTP class when no message
+ is received."""
+ smtp = ModuleFactory().create_smtp_analyzer_obj()
smtp.check_smtp_bruteforce = MagicMock()
- mock_flowalerts = MagicMock()
- smtp.flowalerts = mock_flowalerts
- mock_flowalerts.get_msg.return_value = None
-
- smtp.analyze()
-
- smtp.flowalerts.get_msg.assert_called_once_with("new_smtp")
+ smtp.analyze({})
smtp.check_smtp_bruteforce.assert_not_called()
diff --git a/tests/test_software.py b/tests/test_software.py
index d6938609f..97dc772a3 100644
--- a/tests/test_software.py
+++ b/tests/test_software.py
@@ -115,11 +115,9 @@
),
],
)
-def test_check_multiple_ssh_versions(
- mock_db, cached_software, flow, expected_result
-):
- software = ModuleFactory().create_software_analyzer_obj(mock_db)
- mock_db.get_software_from_profile.return_value = cached_software
+def test_check_multiple_ssh_versions(cached_software, flow, expected_result):
+ software = ModuleFactory().create_software_analyzer_obj()
+ software.db.get_software_from_profile.return_value = cached_software
assert (
software.check_multiple_ssh_versions(flow, "timewindow1")
is expected_result
@@ -165,83 +163,17 @@ def test_check_multiple_ssh_versions(
),
],
)
-def test_analyze_version_change_detected(mock_db, msg_data):
- software = ModuleFactory().create_software_analyzer_obj(mock_db)
- software.flowalerts = MagicMock()
- software.set_evidence = MagicMock()
- mock_db.get_software_from_profile.return_value = {
- "SSH::CLIENT": {
- "version-major": 8,
- "version-minor": 1,
- "uid": "YTYwNjBiMjIxZDkzOWYyYTc4",
- },
- "SSH::SERVER": {
- "version-major": 8,
- "version-minor": 1,
- "uid": "some_other_uid",
- },
- }
- msg = {"data": json.dumps(msg_data)}
- software.flowalerts.get_msg.return_value = msg
+def test_analyze_version_change_detected(msg_data):
+ software = ModuleFactory().create_software_analyzer_obj()
+ software.check_multiple_ssh_versions = MagicMock()
+ msg = {"channel": "new_software", "data": json.dumps(msg_data)}
+ software.analyze(msg)
- software.analyze()
+ assert software.check_multiple_ssh_versions.call_count == 2
- software.set_evidence.multiple_ssh_versions.assert_called()
-
-@pytest.mark.parametrize(
- "msg_data, expected_msg",
- [
- # Testcase1: No version change detected
- (
- {
- "sw_flow": {
- "starttime": 1632302619.444328,
- "uid": "M2VhNTA3ZmZiYjU3OGMxMzJk",
- "saddr": "192.168.1.247",
- "daddr": "192.168.1.50",
- "software": "SSH::CLIENT",
- "unparsed_version": "OpenSSH_8.1",
- "version_major": 8,
- "version_minor": 1,
- "type_": "software",
- },
- "twid": "timewindow1",
- },
- {
- "data": '{"sw_flow": {"starttime": 1632302619.444328,'
- ' "uid": "M2VhNTA3ZmZiYjU3OGMxMzJk", '
- '"saddr": "192.168.1.247", "daddr": "192.168.1.50", '
- '"software": "SSH::CLIENT", '
- '"unparsed_version": "OpenSSH_8.1", '
- '"version_major": 8, '
- '"version_minor": 1, "type_": "software"}, '
- '"twid": "timewindow1"}'
- },
- ),
- # Testcase2: No message in queue
- (None, None),
- ],
-)
-def test_analyze_no_version_change(mock_db, msg_data, expected_msg):
- software = ModuleFactory().create_software_analyzer_obj(mock_db)
- software.flowalerts = MagicMock()
- software.set_evidence = MagicMock()
- mock_db.get_software_from_profile.return_value = {
- "SSH::CLIENT": {
- "version-major": 8,
- "version-minor": 1,
- "uid": "YTYwNjBiMjIxZDkzOWYyYTc4",
- },
- "SSH::SERVER": {
- "version-major": 8,
- "version-minor": 1,
- "uid": "some_other_uid",
- },
- }
-
- software.flowalerts.get_msg.return_value = expected_msg
-
- software.analyze()
-
- software.set_evidence.multiple_ssh_versions.assert_not_called()
+def test_analyze_no_version_change():
+ software = ModuleFactory().create_software_analyzer_obj()
+ software.check_multiple_ssh_versions = MagicMock()
+ software.analyze({})
+ software.check_multiple_ssh_versions.assert_not_called()
diff --git a/tests/test_ssh.py b/tests/test_ssh.py
index 62ad6c619..12da8e6a7 100644
--- a/tests/test_ssh.py
+++ b/tests/test_ssh.py
@@ -35,9 +35,9 @@
],
)
def test_check_successful_ssh(
- mocker, mock_db, auth_success, expected_called_zeek, expected_called_slips
+ mocker, auth_success, expected_called_zeek, expected_called_slips
):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
mock_detect_zeek = mocker.patch(
"modules.flowalerts.ssh.SSH.detect_successful_ssh_by_zeek"
)
@@ -62,10 +62,8 @@ def test_check_successful_ssh(
("F", True),
],
)
-def test_check_ssh_password_guessing(
- mock_db, auth_success, expected_call_count
-):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+def test_check_ssh_password_guessing(auth_success, expected_call_count):
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
mock_set_evidence = MagicMock()
ssh.set_evidence.pw_guessing = mock_set_evidence
for i in range(ssh.pw_guessing_threshold):
@@ -77,17 +75,19 @@ def test_check_ssh_password_guessing(
@patch("slips_files.common.parsers.config_parser.ConfigParser")
-def test_read_configuration(mock_config_parser, mock_db):
+def test_read_configuration(
+ mock_config_parser,
+):
"""Test the read_configuration method."""
mock_parser = mock_config_parser.return_value
mock_parser.ssh_succesful_detection_threshold.return_value = 12345
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
ssh.read_configuration()
assert ssh.ssh_succesful_detection_threshold == 4290
-def test_detect_successful_ssh_by_slips(mock_db):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+def test_detect_successful_ssh_by_slips():
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
ssh.ssh_succesful_detection_threshold = 1000
mock_db_return = {
@@ -121,8 +121,8 @@ def test_detect_successful_ssh_by_slips(mock_db):
assert "test_uid" not in ssh.connections_checked_in_ssh_timer_thread
-def test_detect_successful_ssh_by_zeek(mock_db):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+def test_detect_successful_ssh_by_zeek():
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
uid = "test_uid"
timestamp = "1234567890"
@@ -154,8 +154,8 @@ def test_detect_successful_ssh_by_zeek(mock_db):
ssh.db.search_tws_for_flow.assert_called_once_with(profileid, twid, uid)
-def test_detect_successful_ssh_by_zeek_flow_exists_auth_success(mock_db):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+def test_detect_successful_ssh_by_zeek_flow_exists_auth_success():
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
mock_flow = {
"test_uid": json.dumps(
@@ -190,8 +190,8 @@ def test_detect_successful_ssh_by_zeek_flow_exists_auth_success(mock_db):
assert "test_uid" not in ssh.connections_checked_in_ssh_timer_thread
-def test_detect_successful_ssh_by_zeek_flow_exists_auth_fail(mock_db):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+def test_detect_successful_ssh_by_zeek_flow_exists_auth_fail():
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
mock_flow = {
"test_uid": json.dumps(
@@ -226,23 +226,22 @@ def test_detect_successful_ssh_by_zeek_flow_exists_auth_fail(mock_db):
assert "test_uid" not in ssh.connections_checked_in_ssh_timer_thread
-def test_analyze_no_message(mock_db):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
+def test_analyze_no_message():
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
ssh.flowalerts = MagicMock()
ssh.flowalerts.get_msg.return_value = None
ssh.check_successful_ssh = MagicMock()
ssh.check_ssh_password_guessing = MagicMock()
- ssh.analyze()
+ ssh.analyze({})
ssh.check_successful_ssh.assert_not_called()
ssh.check_ssh_password_guessing.assert_not_called()
@pytest.mark.parametrize("auth_success", ["true", "false"])
-def test_analyze_with_message(mock_db, auth_success):
- ssh = ModuleFactory().create_ssh_analyzer_obj(mock_db)
- ssh.flowalerts = MagicMock()
+def test_analyze_with_message(auth_success):
+ ssh = ModuleFactory().create_ssh_analyzer_obj()
ssh.check_successful_ssh = MagicMock()
ssh.check_ssh_password_guessing = MagicMock()
@@ -257,9 +256,8 @@ def test_analyze_with_message(mock_db, auth_success):
"twid": twid,
"flow": json.dumps(flow_data),
}
- ssh.flowalerts.get_msg.return_value = {"data": json.dumps(msg_data)}
- ssh.analyze()
+ ssh.analyze({"channel": "new_ssh", "data": json.dumps(msg_data)})
ssh.check_successful_ssh.assert_called_once_with(
uid, timestamp, profileid, twid, auth_success
diff --git a/tests/test_ssl.py b/tests/test_ssl.py
index dca00f8c2..7aebe7bbd 100644
--- a/tests/test_ssl.py
+++ b/tests/test_ssl.py
@@ -1,10 +1,10 @@
"""Unit test for modules/flowalerts/ssl.py"""
-from tests.module_factory import ModuleFactory
from unittest.mock import Mock
+from tests.module_factory import ModuleFactory
+
import json
-from queue import Queue
import pytest
# dummy params used for testing
@@ -15,94 +15,92 @@
daddr = "192.168.1.2"
-@pytest.mark.parametrize(
- "test_flows, mock_get_flow_responses, "
- "expected_check_calls, final_queue_size",
- [
- # Test Case 1: Single flow, found in conn.log
- (
- [
- {
- "daddr": "192.168.1.2",
- "server_name": "example.com",
- "uid": "flow1",
- "ts": 1234,
- "profileid": "profile1",
- "twid": "tw1",
- }
- ],
- [{"flow1": json.dumps({"starttime": 1234, "uid": "flow1"})}],
- 1,
- 0,
- ),
- # Test Case 2: Single flow, not found in conn.log
- (
- [
- {
- "daddr": "192.168.1.2",
- "server_name": "example.com",
- "uid": "flow1",
- "ts": 1234,
- "profileid": "profile1",
- "twid": "tw1",
- }
- ],
- [{}],
- 0,
- 1,
- ),
- # Test Case 3: Multiple flows, one found, one not found
- (
- [
- {
- "daddr": "192.168.1.2",
- "server_name": "example.com",
- "uid": "flow1",
- "ts": 1234,
- "profileid": "profile1",
- "twid": "tw1",
- },
- {
- "daddr": "10.0.0.1",
- "server_name": "another.com",
- "uid": "flow2",
- "ts": 5678,
- "profileid": "profile2",
- "twid": "tw2",
- },
- ],
- [{"flow1": json.dumps({"starttime": 1234, "uid": "flow1"})}, {}],
- 1,
- 1,
- ),
- ],
-)
-def test_wait_for_ssl_flows_to_appear_in_connlog(
- mocker,
- mock_db,
- test_flows,
- mock_get_flow_responses,
- expected_check_calls,
- final_queue_size,
-):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
- ssl.pending_ssl_flows = Queue()
-
- mock_get_flow = mocker.patch.object(ssl.db, "get_flow")
- mock_check_pastebin = mocker.patch.object(ssl, "check_pastebin_download")
- mock_sleep = mocker.patch("time.sleep")
-
- for flow in test_flows:
- ssl.pending_ssl_flows.put(tuple(flow.values()))
-
- mock_get_flow.side_effect = mock_get_flow_responses
- ssl.flowalerts.should_stop = Mock()
- ssl.flowalerts.should_stop.side_effect = [False, True]
-
- ssl.wait_for_ssl_flows_to_appear_in_connlog()
-
- assert mock_check_pastebin.call_count == expected_check_calls
- assert ssl.pending_ssl_flows.qsize() == final_queue_size
+# @pytest.mark.parametrize(
+# "test_flows, mock_get_flow_responses, "
+# "expected_check_calls, final_queue_size",
+# [
+# # Test Case 1: Single flow, found in conn.log
+# (
+# [
+# {
+# "daddr": "192.168.1.2",
+# "server_name": "example.com",
+# "uid": "flow1",
+# "ts": 1234,
+# "profileid": "profile1",
+# "twid": "tw1",
+# }
+# ],
+# [{"flow1": json.dumps({"starttime": 1234, "uid": "flow1"})}],
+# 1,
+# 0,
+# ),
+# # Test Case 2: Single flow, not found in conn.log
+# (
+# [
+# {
+# "daddr": "192.168.1.2",
+# "server_name": "example.com",
+# "uid": "flow1",
+# "ts": 1234,
+# "profileid": "profile1",
+# "twid": "tw1",
+# }
+# ],
+# [{}],
+# 0,
+# 1,
+# ),
+# # Test Case 3: Multiple flows, one found, one not found
+# (
+# [
+# {
+# "daddr": "192.168.1.2",
+# "server_name": "example.com",
+# "uid": "flow1",
+# "ts": 1234,
+# "profileid": "profile1",
+# "twid": "tw1",
+# },
+# {
+# "daddr": "10.0.0.1",
+# "server_name": "another.com",
+# "uid": "flow2",
+# "ts": 5678,
+# "profileid": "profile2",
+# "twid": "tw2",
+# },
+# ],
+# [{"flow1": json.dumps({"starttime": 1234, "uid": "flow1"})}, {}],
+# 1,
+# 1,
+# ),
+# ],
+# )
+# def test_wait_for_ssl_flows_to_appear_in_connlog(
+# mocker,
+#
+# test_flows,
+# mock_get_flow_responses,
+# expected_check_calls,
+# final_queue_size,
+# ):
+# ssl = ModuleFactory().create_ssl_analyzer_obj()
+# ssl.pending_ssl_flows = Queue()
+#
+# mock_get_flow = mocker.patch.object(ssl.db, "get_flow")
+# mock_check_pastebin = mocker.patch.object(ssl, "check_pastebin_download")
+# for flow in test_flows:
+# ssl.pending_ssl_flows.put(tuple(flow.values()))
+#
+# mock_get_flow.side_effect = mock_get_flow_responses
+# ssl.flowalerts.should_stop = Mock()
+# ssl.flowalerts.should_stop.side_effect = [False, True]
+#
+# ssl.wait_for_ssl_flows_to_appear_in_connlog()
+#
+# assert mock_check_pastebin.call_count == expected_check_calls
+# assert ssl.pending_ssl_flows.qsize() == final_queue_size
@pytest.mark.parametrize(
@@ -136,8 +134,8 @@ def test_wait_for_ssl_flows_to_appear_in_connlog(
),
],
)
-def test_check_self_signed_certs(mocker, mock_db, test_input, expected):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
+def test_check_self_signed_certs(mocker, test_input, expected):
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence."
"SetEvidnceHelper.self_signed_certificates"
@@ -163,13 +161,12 @@ def test_check_self_signed_certs(mocker, mock_db, test_input, expected):
)
def test_detect_malicious_ja3(
mocker,
- mock_db,
test_ja3,
test_ja3s,
expected_ja3_calls,
expected_ja3s_calls,
):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
mock_set_evidence_ja3 = mocker.patch(
"modules.flowalerts.set_evidence.SetEvidnceHelper.malicious_ja3"
)
@@ -179,7 +176,7 @@ def test_detect_malicious_ja3(
saddr = "192.168.1.1"
- mock_db.get_ja3_in_IoC.return_value = {
+ ssl.db.get_ja3_in_IoC.return_value = {
"malicious_ja3": "Malicious JA3",
"malicious_ja3s": "Malicious JA3S",
}
@@ -202,17 +199,17 @@ def test_detect_malicious_ja3(
(False, 0),
],
)
-def test_detect_doh(mocker, mock_db, test_is_doh, expected_calls):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
+def test_detect_doh(mocker, test_is_doh, expected_calls):
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
mock_set_evidence_doh = mocker.patch(
"modules.flowalerts.set_evidence.SetEvidnceHelper.doh"
)
- mock_db_set_ip_info = mocker.patch.object(ssl.db, "set_ip_info")
+ ssl.db.set_ip_info = Mock()
ssl.detect_doh(test_is_doh, daddr, profileid, twid, timestamp, uid)
assert mock_set_evidence_doh.call_count == expected_calls
- assert mock_db_set_ip_info.call_count == expected_calls
+ assert ssl.db.set_ip_info.call_count == expected_calls
@pytest.mark.parametrize(
@@ -230,12 +227,11 @@ def test_detect_doh(mocker, mock_db, test_is_doh, expected_calls):
)
def test_check_pastebin_download(
mocker,
- mock_db,
test_server_name,
test_downloaded_bytes,
expected_call_count,
):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
ssl.pastebin_downloads_threshold = 12000
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence." "SetEvidnceHelper.pastebin_download"
@@ -267,18 +263,16 @@ def test_check_pastebin_download(
),
],
)
-def test_detect_incompatible_cn(mocker, mock_db, issuer, expected_call_count):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
+def test_detect_incompatible_cn(mocker, issuer, expected_call_count):
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence." "SetEvidnceHelper.incompatible_cn"
)
- (mock_db.whitelist.organization_whitelist.is_ip_in_org).return_value = (
+ (ssl.db.whitelist.organization_whitelist.is_ip_in_org).return_value = False
+ (ssl.db.whitelist.organization_whitelist.is_domain_in_org).return_value = (
False
)
- (
- mock_db.whitelist.organization_whitelist.is_domain_in_org
- ).return_value = False
ssl.detect_incompatible_cn(
daddr, "example.com", issuer, profileid, twid, uid, timestamp
@@ -407,10 +401,8 @@ def test_detect_incompatible_cn(mocker, mock_db, issuer, expected_call_count):
),
],
)
-def test_check_non_ssl_port_443_conns(
- mocker, mock_db, test_input, expected_call_count
-):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
+def test_check_non_ssl_port_443_conns(mocker, test_input, expected_call_count):
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence."
"SetEvidnceHelper.non_ssl_port_443_conn"
@@ -419,27 +411,11 @@ def test_check_non_ssl_port_443_conns(
assert mock_set_evidence.call_count == expected_call_count
-def test_analyze(mocker, mock_db):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
- mock_pending_ssl_flows_put = mocker.patch.object(
- ssl.pending_ssl_flows, "put"
- )
- mock_check_self_signed_certs = mocker.patch.object(
- ssl, "check_self_signed_certs"
- )
- mock_detect_malicious_ja3 = mocker.patch.object(
- ssl, "detect_malicious_ja3"
- )
- mock_detect_incompatible_cn = mocker.patch.object(
- ssl, "detect_incompatible_cn"
- )
- mock_detect_doh = mocker.patch.object(ssl, "detect_doh")
- mock_check_non_ssl_port_443_conns = mocker.patch.object(
- ssl, "check_non_ssl_port_443_conns"
- )
-
- ssl_flow_msg = {
- "data": json.dumps(
+@pytest.mark.parametrize(
+ "channel, msg_data",
+ [
+ (
+ "new_ssl",
{
"flow": json.dumps(
{
@@ -456,37 +432,33 @@ def test_analyze(mocker, mock_db):
),
"profileid": "profile_192.168.1.1",
"twid": "timewindow1",
- }
- )
- }
- new_flow_msg = {
- "data": json.dumps(
- {
- "profileid": "profile_192.168.1.1",
- "twid": "timewindow1",
- "stime": 1635765895.037696,
- "flow": json.dumps(
- {
- "test_uid": json.dumps(
- {
- "daddr": "192.168.1.2",
- "state": "Established",
- "dport": 443,
- "proto": "tcp",
- "allbytes": 1024,
- "appproto": "http",
- }
- )
- }
- ),
- }
- )
- }
+ },
+ ),
+ ],
+)
+def test_analyze_new_ssl_msg(
+ mocker,
+ channel,
+ msg_data,
+):
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
+ mock_pending_ssl_flows_put = mocker.patch.object(
+ ssl.pending_ssl_flows, "put"
+ )
+ mock_check_self_signed_certs = mocker.patch.object(
+ ssl, "check_self_signed_certs"
+ )
+ mock_detect_malicious_ja3 = mocker.patch.object(
+ ssl, "detect_malicious_ja3"
+ )
+ mock_detect_incompatible_cn = mocker.patch.object(
+ ssl, "detect_incompatible_cn"
+ )
+ mock_detect_doh = mocker.patch.object(ssl, "detect_doh")
- ssl.flowalerts.get_msg = mocker.Mock(side_effect=[ssl_flow_msg, None])
- ssl.get_msg = mocker.Mock(return_value=new_flow_msg)
+ msg = {"channel": channel, "data": json.dumps(msg_data)}
- ssl.analyze()
+ ssl.analyze(msg)
mock_pending_ssl_flows_put.assert_called_once_with(
(
@@ -538,6 +510,43 @@ def test_analyze(mocker, mock_db):
"test_uid",
)
+
+@pytest.mark.parametrize(
+ "channel, msg_data",
+ [
+ (
+ "new_flow",
+ {
+ "profileid": "profile_192.168.1.1",
+ "twid": "timewindow1",
+ "stime": 1635765895.037696,
+ "flow": json.dumps(
+ {
+ "test_uid": json.dumps(
+ {
+ "daddr": "192.168.1.2",
+ "state": "Established",
+ "dport": 443,
+ "proto": "tcp",
+ "allbytes": 1024,
+ "appproto": "http",
+ }
+ )
+ }
+ ),
+ },
+ )
+ ],
+)
+def test_analyze_new_flow_msg(mocker, channel, msg_data):
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
+ mock_check_non_ssl_port_443_conns = mocker.patch.object(
+ ssl, "check_non_ssl_port_443_conns"
+ )
+ msg = {"channel": channel, "data": json.dumps(msg_data)}
+
+ ssl.analyze(msg)
+
mock_check_non_ssl_port_443_conns.assert_called_once()
call_arg = mock_check_non_ssl_port_443_conns.call_args[0][0]
assert isinstance(call_arg, dict)
@@ -545,14 +554,12 @@ def test_analyze(mocker, mock_db):
assert call_arg["twid"] == "timewindow1"
assert call_arg["stime"] == 1635765895.037696
assert "flow" in call_arg
- flow_data = json.loads(call_arg["flow"])
- assert "test_uid" in flow_data
- assert json.loads(flow_data["test_uid"])["dport"] == 443
- assert json.loads(flow_data["test_uid"])["appproto"] == "http"
-def test_analyze_no_messages(mocker, mock_db):
- ssl = ModuleFactory().create_ssl_analyzer_obj(mock_db)
+def test_analyze_no_messages(
+ mocker,
+):
+ ssl = ModuleFactory().create_ssl_analyzer_obj()
mock_pending_ssl_flows_put = mocker.patch.object(
ssl.pending_ssl_flows, "put"
@@ -571,10 +578,7 @@ def test_analyze_no_messages(mocker, mock_db):
ssl, "check_non_ssl_port_443_conns"
)
- ssl.flowalerts.get_msg = mocker.Mock(return_value=None)
- ssl.get_msg = mocker.Mock(return_value=None)
-
- ssl.analyze()
+ ssl.analyze({})
mock_pending_ssl_flows_put.assert_not_called()
mock_check_self_signed_certs.assert_not_called()
diff --git a/tests/test_threat_intelligence.py b/tests/test_threat_intelligence.py
index 4e799d94b..f5601108a 100644
--- a/tests/test_threat_intelligence.py
+++ b/tests/test_threat_intelligence.py
@@ -9,55 +9,44 @@
from slips_files.core.evidence_structure.evidence import ThreatLevel
-def test_parse_local_ti_file(mock_db):
+def test_parse_local_ti_file():
"""
Test parsing of a local threat intelligence file.
Ensures that the `parse_local_ti_file` method successfully parses known threat
intelligence entries from "own_malicious_iocs.csv" and properly integrates
them into the system.
-
- Args:
- mock_db: A fixture or mock representing the database to prevent actual
- database modifications during testing.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
local_ti_files_dir = threatintel.path_to_local_ti_files
local_ti_file = os.path.join(local_ti_files_dir, "own_malicious_iocs.csv")
assert threatintel.parse_local_ti_file(local_ti_file) is True
-def test_parse_ja3_file(mock_db):
+def test_parse_ja3_file():
"""
Test parsing of a JA3 hash file.
Validates that the `parse_ja3_file` method can accurately process and store
entries from "own_malicious_JA3.csv" containing JA3 hashes and associated
threat levels and descriptions.
-
- Args:
- mock_db: A mock database object to intercept database calls for isolation.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
local_ja3_file_dir = threatintel.path_to_local_ti_files
local_ja3_file = os.path.join(local_ja3_file_dir, "own_malicious_JA3.csv")
assert threatintel.parse_ja3_file(local_ja3_file) is True
-def test_parse_jarm_file(mock_db):
+def test_parse_jarm_file():
"""
Test parsing of a JARM hash file.
Confirms that the `parse_jarm_file` method is capable of interpreting and storing
data from "own_malicious_JARM.csv", which includes JARM hashes along with their
threat assessments and descriptions.
-
- Args:
- mock_db: A mock database object used to verify interactions without affecting
- real data.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
local_jarm_file_dir = threatintel.path_to_local_ti_files
local_jarm_file = os.path.join(
local_jarm_file_dir, "own_malicious_JARM.csv"
@@ -75,7 +64,7 @@ def test_parse_jarm_file(mock_db):
],
)
def test_check_local_ti_files_for_update(
- current_hash, old_hash, expected_return, mocker, mock_db
+ current_hash, old_hash, expected_return, mocker
):
"""
Test the logic for updating local threat intelligence files based on hash comparison.
@@ -90,10 +79,9 @@ def test_check_local_ti_files_for_update(
old_hash: The previously stored hash value for comparison.
expected_return: The expected outcome of the comparison (new hash or False).
mocker: The pytest-mock mocker object for patching dependencies.
- mock_db: A mock database object for simulating database interactions.
"""
# since this is a clear db, then we should update the local ti file
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
own_malicious_iocs = os.path.join(
threatintel.path_to_local_ti_files, "own_malicious_iocs.csv"
)
@@ -104,21 +92,23 @@ def test_check_local_ti_files_for_update(
mock_hash.return_value = current_hash
- mock_db.get_TI_file_info.return_value = {"hash": old_hash}
+ threatintel.db.get_TI_file_info.return_value = {"hash": old_hash}
- # the test asserts return value of should_update_local_tii_file matches expected_return
- # for each scenario. This method should return new hash if an update is needed or False if not
+ # the test asserts return value of should_update_local_tii_file
+ # matches expected_return
+ # for each scenario. This method should return new hash if an
+ # update is needed or False if not
assert (
threatintel.should_update_local_ti_file(own_malicious_iocs)
== expected_return
)
-def test_create_circl_lu_session(mock_db):
+def test_create_circl_lu_session():
"""
Test the creation of a session for Circl.lu API requests.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
threatintel.create_circl_lu_session()
assert threatintel.circl_session.verify is True
assert threatintel.circl_session.headers == {"accept": "application/json"}
@@ -130,9 +120,14 @@ def test_create_circl_lu_session(mock_db):
# Test case 1: Both IPv4 and IPv6 ranges
(
{
- "192.168.1.0/24": '{"description": "Example range", "source": "local_file", "threat_level": "high"}',
- "10.0.0.0/16": '{"description": "Another range", "source": "remote_feed", "threat_level": "medium"}',
- "2001:db8::/64": '{"description": "IPv6 range", "source": "custom", "threat_level": "low"}',
+ "192.168.1.0/24": '{"description": "Example range",'
+ ' "source": "local_file", '
+ '"threat_level": "high"}',
+ "10.0.0.0/16": '{"description": "Another range", '
+ '"source": "remote_feed",'
+ ' "threat_level": "medium"}',
+ "2001:db8::/64": '{"description": "IPv6 range", '
+ '"source": "custom", "threat_level": "low"}',
},
{"192": ["192.168.1.0/24"], "10": ["10.0.0.0/16"]},
{"2001": ["2001:db8::/64"]},
@@ -140,8 +135,10 @@ def test_create_circl_lu_session(mock_db):
# Test case 2: Only IPv4 ranges
(
{
- "172.17.0.0/16": '{"description": "Example range", "source": "local_file", "threat_level": "high"}',
- "10.0.0.0/8": '{"description": "Another range", "source": "remote_feed", "threat_level": "medium"}',
+ "172.17.0.0/16": '{"description": "Example range", "source":'
+ ' "local_file", "threat_level": "high"}',
+ "10.0.0.0/8": '{"description": "Another range", "source": '
+ '"remote_feed", "threat_level": "medium"}',
},
{"172": ["172.17.0.0/16"], "10": ["10.0.0.0/8"]},
{},
@@ -149,8 +146,12 @@ def test_create_circl_lu_session(mock_db):
# Test case 3: Only IPv6 ranges
(
{
- "2001:0db8:0:0:0:0:0:0/32": '{"description": "Example range", "source": "local_file", "threat_level": "high"}',
- "2002:c0a8:0:1::/64": '{"description": "Another range", "source": "remote_feed", "threat_level": "medium"}',
+ "2001:0db8:0:0:0:0:0:0/32": '{"description": "Example range",'
+ ' "source": "local_file",'
+ ' "threat_level": "high"}',
+ "2002:c0a8:0:1::/64": '{"description": "Another range", '
+ '"source": "remote_feed",'
+ ' "threat_level": "medium"}',
},
{},
{
@@ -161,14 +162,14 @@ def test_create_circl_lu_session(mock_db):
],
)
def test_get_malicious_ip_ranges(
- mock_db, mock_ip_ranges, expected_ipv4_ranges, expected_ipv6_ranges
+ mock_ip_ranges, expected_ipv4_ranges, expected_ipv6_ranges
):
"""
Test the retrieval and caching of malicious IP ranges from the database.
This test covers both IPv4 and IPv6 range scenarios.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_malicious_ip_ranges.return_value = mock_ip_ranges
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_malicious_ip_ranges.return_value = mock_ip_ranges
threatintel.get_malicious_ip_ranges()
assert threatintel.cached_ipv4_ranges == expected_ipv4_ranges
@@ -242,7 +243,6 @@ def test_get_malicious_ip_ranges(
],
)
def test_set_evidence_malicious_asn(
- mock_db,
daddr,
uid,
timestamp,
@@ -255,8 +255,10 @@ def test_set_evidence_malicious_asn(
"""
Test `set_evidence_malicious_asn` for setting evidence of malicious ASN interactions.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_ip_identification.return_value = " (Organization: Example Org)"
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_ip_identification.return_value = (
+ " (Organization: Example Org)"
+ )
threatintel.set_evidence_malicious_asn(
daddr=daddr,
uid=uid,
@@ -267,7 +269,7 @@ def test_set_evidence_malicious_asn(
asn_info=asn_info,
is_dns_response=is_dns_response,
)
- mock_db.set_evidence.assert_called()
+ threatintel.db.set_evidence.assert_called()
@pytest.mark.parametrize(
@@ -324,7 +326,6 @@ def test_set_evidence_malicious_asn(
],
)
def test_set_evidence_malicious_ip(
- mock_db,
ip,
uid,
daddr,
@@ -338,8 +339,10 @@ def test_set_evidence_malicious_ip(
"""
Test `set_evidence_malicious_ip` for recording evidence of traffic with malicious IPs.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_ip_identification.return_value = " (Organization: Example Org)"
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_ip_identification.return_value = (
+ " (Organization: Example Org)"
+ )
threatintel.set_evidence_malicious_ip(
ip=ip,
uid=uid,
@@ -350,7 +353,7 @@ def test_set_evidence_malicious_ip(
twid=twid,
ip_state=ip_state,
)
- assert mock_db.set_evidence.call_count == expected_call_count
+ assert threatintel.db.set_evidence.call_count == expected_call_count
@pytest.mark.parametrize(
@@ -363,9 +366,9 @@ def test_set_evidence_malicious_ip(
("invalid", False),
],
)
-def test_is_valid_threat_level(threat_level, expected, mock_db):
+def test_is_valid_threat_level(threat_level, expected):
"""Test `is_valid_threat_level` for recognizing valid threat levels."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
assert threatintel.is_valid_threat_level(threat_level) is expected
@@ -377,9 +380,9 @@ def test_is_valid_threat_level(threat_level, expected, mock_db):
("ICMP", "srcip", False),
],
)
-def test_is_outgoing_icmp_packet(protocol, ip_address, expected, mock_db):
+def test_is_outgoing_icmp_packet(protocol, ip_address, expected):
"""Test `is_outgoing_icmp_packet` for identifying outbound ICMP packets."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
assert (
threatintel.is_outgoing_icmp_packet(protocol, ip_address) is expected
)
@@ -410,15 +413,15 @@ def test_is_outgoing_icmp_packet(protocol, ip_address, expected, mock_db):
],
)
def test_delete_old_source_ips_with_deletions(
- mock_db, mock_ioc_data, file_to_delete, expected_deleted_ips
+ mock_ioc_data, file_to_delete, expected_deleted_ips
):
"""
Test `__delete_old_source_ips` when there are IPs to delete.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_IPs_in_IoC.return_value = mock_ioc_data
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_IPs_in_IoC.return_value = mock_ioc_data
threatintel._ThreatIntel__delete_old_source_ips(file_to_delete)
- mock_db.delete_ips_from_IoC_ips.assert_called_once_with(
+ threatintel.db.delete_ips_from_IoC_ips.assert_called_once_with(
expected_deleted_ips
)
@@ -437,16 +440,14 @@ def test_delete_old_source_ips_with_deletions(
({}, "old_file.txt"),
],
)
-def test_delete_old_source_ips_no_deletions(
- mock_db, mock_ioc_data, file_to_delete
-):
+def test_delete_old_source_ips_no_deletions(mock_ioc_data, file_to_delete):
"""
Test `__delete_old_source_ips` when there are no IPs to delete.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_IPs_in_IoC.return_value = mock_ioc_data
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_IPs_in_IoC.return_value = mock_ioc_data
threatintel._ThreatIntel__delete_old_source_ips(file_to_delete)
- mock_db.delete_ips_from_IoC_ips.assert_not_called()
+ threatintel.db.delete_ips_from_IoC_ips.assert_not_called()
@pytest.mark.parametrize(
@@ -483,16 +484,19 @@ def test_delete_old_source_ips_no_deletions(
],
)
def test_delete_old_source_domains(
- mock_db, domains_in_ioc, file_to_delete, expected_calls
+ domains_in_ioc, file_to_delete, expected_calls
):
"""
Test the `__delete_old_source_domains` method
for removing outdated domain IoCs.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_Domains_in_IoC.return_value = domains_in_ioc
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_Domains_in_IoC.return_value = domains_in_ioc
threatintel._ThreatIntel__delete_old_source_domains(file_to_delete)
- assert mock_db.delete_domains_from_IoC_domains.call_count == expected_calls
+ assert (
+ threatintel.db.delete_domains_from_IoC_domains.call_count
+ == expected_calls
+ )
@pytest.mark.parametrize(
@@ -551,7 +555,6 @@ def test_delete_old_source_domains(
],
)
def test_delete_old_source_data_from_database(
- mock_db,
data_file,
mock_ips_ioc,
mock_domains_ioc,
@@ -563,18 +566,19 @@ def test_delete_old_source_data_from_database(
method for removing both
outdated IP and domain IoCs.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
- mock_db.get_IPs_in_IoC.return_value = mock_ips_ioc
- mock_db.get_Domains_in_IoC.return_value = mock_domains_ioc
+ threatintel.db.get_IPs_in_IoC.return_value = mock_ips_ioc
+ threatintel.db.get_Domains_in_IoC.return_value = mock_domains_ioc
threatintel._ThreatIntel__delete_old_source_data_from_database(data_file)
assert (
- mock_db.delete_ips_from_IoC_ips.call_count == expected_delete_ips_calls
+ threatintel.db.delete_ips_from_IoC_ips.call_count
+ == expected_delete_ips_calls
)
assert (
- mock_db.delete_domains_from_IoC_domains.call_count
+ threatintel.db.delete_domains_from_IoC_domains.call_count
== expected_delete_domains_calls
)
@@ -592,13 +596,12 @@ def test_should_update_local_ti_file(
old_hash,
expected_return,
mocker,
- mock_db,
):
"""
Test the logic for updating local threat
intelligence files based on hash comparison.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
own_malicious_iocs = os.path.join(
threatintel.path_to_local_ti_files, "own_malicious_iocs.csv"
)
@@ -606,7 +609,7 @@ def test_should_update_local_ti_file(
"slips_files.common.slips_utils.Utils.get_sha256_hash"
)
mock_hash.return_value = current_hash
- mock_db.get_TI_file_info.return_value = {"hash": old_hash}
+ threatintel.db.get_TI_file_info.return_value = {"hash": old_hash}
assert (
threatintel.should_update_local_ti_file(own_malicious_iocs)
@@ -652,23 +655,23 @@ def test_should_update_local_ti_file(
],
)
def test_spamhaus_success(
- mocker, mock_db, ip_address, mock_resolver_return, expected_result
+ mocker, ip_address, mock_resolver_return, expected_result
):
"""
Test the `spamhaus` method for successful Spamhaus DNSBL queries.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_resolver = mocker.patch("dns.resolver.resolve")
mock_resolver.return_value = mock_resolver_return
result = threatintel.spamhaus(ip_address)
assert result == expected_result
-def test_spamhaus_dns_error(mocker, mock_db):
+def test_spamhaus_dns_error(mocker):
"""
Test the `spamhaus` method's handling of DNS resolution errors.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_resolver = mocker.patch("dns.resolver.resolve")
mock_resolver.side_effect = Exception("DNS resolution error")
result = threatintel.spamhaus("13.14.15.16")
@@ -683,9 +686,9 @@ def test_spamhaus_dns_error(mocker, mock_db):
("malicious.com", None),
],
)
-def test_is_ignored_domain(domain, expected, mock_db):
+def test_is_ignored_domain(domain, expected):
"""Test `is_ignored_domain` for filtering out irrelevant domains."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
assert threatintel.is_ignored_domain(domain) is expected
@@ -745,7 +748,6 @@ def test_is_ignored_domain(domain, expected, mock_db):
],
)
def test_set_evidence_malicious_hash(
- mock_db,
file_info,
expected_description,
expected_threat_level,
@@ -756,10 +758,12 @@ def test_set_evidence_malicious_hash(
covering different threat levels,
confidence scores, and blacklist sources.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_ip_identification.return_value = " (Organization: Example Org)"
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_ip_identification.return_value = (
+ " (Organization: Example Org)"
+ )
threatintel.set_evidence_malicious_hash(file_info)
- call_args = mock_db.set_evidence.call_args_list
+ call_args = threatintel.db.set_evidence.call_args_list
for call in call_args:
evidence = call[0][0]
assert expected_description in evidence.description
@@ -786,13 +790,13 @@ def test_set_evidence_malicious_hash(
],
)
def test_search_online_for_hash(
- mocker, mock_db, circl_lu_return, urlhaus_lookup_return, expected_result
+ mocker, circl_lu_return, urlhaus_lookup_return, expected_result
):
"""
Test `search_online_for_hash` for querying
online threat intelligence sources.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_circl_lu = mocker.patch.object(threatintel, "circl_lu")
mock_urlhaus_lookup = mocker.patch.object(
threatintel.urlhaus, "urlhaus_lookup"
@@ -821,12 +825,10 @@ def test_search_online_for_hash(
("10.0.0.1", None, False),
],
)
-def test_search_offline_for_ip(
- mock_db, ip_address, mock_return_value, expected_result
-):
+def test_search_offline_for_ip(ip_address, mock_return_value, expected_result):
"""Test `search_offline_for_ip` for querying local threat intelligence data."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.search_IP_in_IoC.return_value = mock_return_value
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.search_IP_in_IoC.return_value = mock_return_value
result = threatintel.search_offline_for_ip(ip_address)
assert result == expected_result
@@ -840,11 +842,11 @@ def test_search_offline_for_ip(
)
@patch("modules.threat_intelligence.threat_intelligence.ThreatIntel.spamhaus")
def test_search_online_for_ip(
- mock_spamhaus, ip_address, mock_return_value, expected_result, mock_db
+ mock_spamhaus, ip_address, mock_return_value, expected_result
):
"""Test `search_online_for_ip` for querying online threat intelligence sources."""
mock_spamhaus.return_value = mock_return_value
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
result = threatintel.search_online_for_ip(ip_address)
assert result == expected_result
@@ -867,12 +869,11 @@ def test_search_online_for_ip(
],
)
def test_ip_belongs_to_blacklisted_range(
- mocker, mock_db, ip, ip_type, in_blacklist, expected_result
+ mocker, ip, ip_type, in_blacklist, expected_result
):
"""Test `ip_belongs_to_blacklisted_range`
for checking malicious IP ranges."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db = mocker.patch.object(threatintel, "db")
+ threatintel = ModuleFactory().create_threatintel_obj()
first_octet = str(
ipaddress.ip_address(ip).exploded.split("/")[0].split(".")[0]
if ip_type == "ipv4"
@@ -890,7 +891,7 @@ def test_ip_belongs_to_blacklisted_range(
threatintel.cached_ipv6_ranges = (
{first_octet: [range_value]} if ip_type == "ipv6" else {}
)
- mock_db.get_malicious_ip_ranges.return_value = (
+ threatintel.db.get_malicious_ip_ranges.return_value = (
{
range_value: '{"description": "Bad range", "source": "Example Source", "threat_level": "high"}'
}
@@ -922,11 +923,11 @@ def test_ip_belongs_to_blacklisted_range(
],
)
def test_search_online_for_url(
- mocker, mock_db, url, mock_return_value, expected_result
+ mocker, url, mock_return_value, expected_result
):
"""Test `search_online_for_url` for
querying online threat intelligence sources."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_urlhaus_lookup = mocker.patch.object(
threatintel.urlhaus, "urlhaus_lookup"
)
@@ -947,18 +948,19 @@ def test_search_online_for_url(
],
)
def test_search_offline_for_domain(
- mocker, mock_db, domain, mock_return_value, expected_result
+ mocker, domain, mock_return_value, expected_result
):
"""Test `search_offline_for_domain` for checking domain blacklisting."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db = mocker.patch.object(threatintel, "db")
- mock_db.is_domain_malicious.return_value = mock_return_value
+ threatintel = ModuleFactory().create_threatintel_obj()
+
+ threatintel.db.is_domain_malicious.return_value = mock_return_value
result = threatintel.search_offline_for_domain(domain)
assert result == expected_result
@pytest.mark.parametrize(
- "cname, dns_query, is_subdomain, cname_info, expected_call_count, expected_confidence, expected_description",
+ "cname, dns_query, is_subdomain, cname_info, "
+ "expected_call_count, expected_confidence, expected_description",
[
# Test Case 1: Malicious CNAME, not a subdomain
(
@@ -997,7 +999,6 @@ def test_search_offline_for_domain(
],
)
def test_set_evidence_malicious_cname_in_dns_response(
- mock_db,
cname,
dns_query,
is_subdomain,
@@ -1008,7 +1009,7 @@ def test_set_evidence_malicious_cname_in_dns_response(
):
"""Test `set_evidence_malicious_cname_in_dns_response`
for recording evidence of malicious CNAMEs."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
threatintel.set_evidence_malicious_cname_in_dns_response(
cname=cname,
@@ -1021,17 +1022,17 @@ def test_set_evidence_malicious_cname_in_dns_response(
twid="timewindow1",
)
- assert mock_db.set_evidence.call_count == expected_call_count
+ assert threatintel.db.set_evidence.call_count == expected_call_count
if expected_call_count > 0:
- call_args = mock_db.set_evidence.call_args[0][0]
+ call_args = threatintel.db.set_evidence.call_args[0][0]
assert call_args.description == expected_description
assert call_args.confidence == expected_confidence
-def test_pre_main(mocker, mock_db):
+def test_pre_main(mocker):
"""Test `pre_main` for initializing the module."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mocker.patch.object(threatintel, "update_local_file")
threatintel.pre_main()
assert threatintel.update_local_file.call_count == 3
@@ -1056,11 +1057,11 @@ def test_pre_main(mocker, mock_db):
("192.168.1.1", "ICMP", "srcip", True),
],
)
-def test_should_lookup(ip, protocol, ip_state, expected_result, mock_db):
+def test_should_lookup(ip, protocol, ip_state, expected_result):
"""
Test `should_lookup` for various IP addresses, protocols, and states.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
assert threatintel.should_lookup(ip, protocol, ip_state) == expected_result
@@ -1079,14 +1080,14 @@ def test_should_lookup(ip, protocol, ip_state, expected_result, mock_db):
],
)
def test_is_malicious_cname(
- mocker, mock_db, cname, is_domain_malicious_return, expected_result
+ mocker, cname, is_domain_malicious_return, expected_result
):
"""
Test `is_malicious_cname` for various CNAME scenarios.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db = mocker.patch.object(threatintel, "db")
- mock_db.is_domain_malicious.return_value = (
+ threatintel = ModuleFactory().create_threatintel_obj()
+
+ threatintel.db.is_domain_malicious.return_value = (
is_domain_malicious_return,
False,
)
@@ -1109,11 +1110,11 @@ def test_is_malicious_cname(
"another_ignored.com",
],
)
-def test_is_malicious_cname_ignored_cname(mocker, mock_db, cname):
+def test_is_malicious_cname_ignored_cname(mocker, cname):
"""
Test `is_malicious_cname` for ignored CNAME scenarios.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mocker.patch.object(threatintel, "is_ignored_domain", return_value=True)
result = threatintel.is_malicious_cname(
@@ -1145,11 +1146,9 @@ def test_is_malicious_cname_ignored_cname(mocker, mock_db, cname):
(None, None, False),
],
)
-def test_is_malicious_ip(
- offline_result, online_result, expected_result, mock_db
-):
+def test_is_malicious_ip(offline_result, online_result, expected_result):
"""Test `is_malicious_ip` for checking IP blacklisting."""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
with patch(
"modules.threat_intelligence.threat_intelligence.ThreatIntel.search_offline_for_ip",
return_value=offline_result,
@@ -1176,12 +1175,12 @@ def test_is_malicious_ip(
("safe.com", None, False),
],
)
-def test_is_malicious_domain(domain, result, is_malicious, mocker, mock_db):
+def test_is_malicious_domain(domain, result, is_malicious, mocker):
"""
Test `is_malicious_domain` for identifying
and recording evidence of malicious domains.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_search_offline_for_domain = mocker.patch.object(
threatintel, "search_offline_for_domain"
)
@@ -1221,14 +1220,14 @@ def test_is_malicious_domain(domain, result, is_malicious, mocker, mock_db):
],
)
def test_is_malicious_hash(
- mocker, mock_db, search_online_result, expected_set_evidence_call
+ mocker, search_online_result, expected_set_evidence_call
):
"""
Test `is_malicious_hash` for identifying and
recording evidence of malicious file hashes.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db = mocker.patch.object(threatintel, "db")
+ threatintel = ModuleFactory().create_threatintel_obj()
+
mock_search_online_for_hash = mocker.patch.object(
threatintel, "search_online_for_hash"
)
@@ -1249,7 +1248,7 @@ def test_is_malicious_hash(
mock_search_online_for_hash.return_value = search_online_result
threatintel.is_malicious_hash(flow_info)
- assert mock_db.set_evidence.called == expected_set_evidence_call
+ assert threatintel.db.set_evidence.called == expected_set_evidence_call
@pytest.mark.parametrize(
@@ -1259,12 +1258,12 @@ def test_is_malicious_hash(
("http://safe.com", None, False),
],
)
-def test_is_malicious_url(url, result, is_malicious, mocker, mock_db):
+def test_is_malicious_url(url, result, is_malicious, mocker):
"""
Test `is_malicious_url` for correctly handling
both malicious and non-malicious URLs.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_search_online_for_url = mocker.patch.object(
threatintel, "search_online_for_url"
)
@@ -1323,12 +1322,12 @@ def test_is_malicious_url(url, result, is_malicious, mocker, mock_db):
),
],
)
-def test_main_domain_lookup(mocker, mock_db, msg_data, expected_call):
+def test_main_domain_lookup(mocker, msg_data, expected_call):
"""
Test the `main` function's handling of domain name lookups,
covering scenarios with DNS responses and direct domain queries.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_call = mocker.patch.object(threatintel, expected_call)
mock_get_msg = mocker.patch.object(threatintel, "get_msg")
mock_get_msg.return_value = {"data": json.dumps(msg_data)}
@@ -1338,22 +1337,22 @@ def test_main_domain_lookup(mocker, mock_db, msg_data, expected_call):
mock_call.assert_called_once()
-def test_main_empty_message(mocker, mock_db):
+def test_main_empty_message(mocker):
"""
Test the `main` function's behavior when receiving an empty message,
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_get_msg = mocker.patch.object(threatintel, "get_msg")
mock_get_msg.return_value = None
threatintel.main()
-def test_main_file_hash_lookup(mocker, mock_db):
+def test_main_file_hash_lookup(mocker):
"""
Test the `main` function's handling of file hash lookups,
verifying it calls the appropriate malicious hash checks.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_is_malicious_hash = mocker.patch.object(
threatintel, "is_malicious_hash"
@@ -1411,13 +1410,11 @@ def test_main_file_hash_lookup(mocker, mock_db):
),
],
)
-def test_circl_lu(
- mocker, mock_db, status_code, response_text, expected_result
-):
+def test_circl_lu(mocker, status_code, response_text, expected_result):
"""
Test the `circl_lu` method for various Circl.lu API responses.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_session = mocker.patch.object(threatintel, "circl_session")
flow_info = {"flow": {"md5": "1234567890abcdef1234567890abcdef"}}
mock_response = MagicMock()
@@ -1441,12 +1438,12 @@ def test_circl_lu(
("25", 0.75),
],
)
-def test_calculate_threat_level(circl_trust, expected_threat_level, mock_db):
+def test_calculate_threat_level(circl_trust, expected_threat_level):
"""
Test `calculate_threat_level` for accurately converting
Circl.lu trust scores to threat levels.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
assert (
threatintel.calculate_threat_level(circl_trust)
== expected_threat_level
@@ -1463,12 +1460,12 @@ def test_calculate_threat_level(circl_trust, expected_threat_level, mock_db):
("blacklist1 blacklist2 blacklist3", 1),
],
)
-def test_calculate_confidence(blacklists, expected_confidence, mock_db):
+def test_calculate_confidence(blacklists, expected_confidence):
"""
Test `calculate_confidence` to ensure it properly assigns confidence
scores based on the number of blacklists flagging a file.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
assert threatintel.calculate_confidence(blacklists) == expected_confidence
@@ -1511,7 +1508,6 @@ def test_calculate_confidence(blacklists, expected_confidence, mock_db):
)
def test_main_ip_lookup(
mocker,
- mock_db,
ip_address,
is_malicious,
should_lookup_return,
@@ -1520,7 +1516,7 @@ def test_main_ip_lookup(
"""
Test the main function's handling of IP address lookups.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mock_is_malicious_ip = mocker.patch.object(
threatintel, "is_malicious_ip", return_value=is_malicious
)
@@ -1574,13 +1570,13 @@ def test_main_ip_lookup(
],
)
def test_update_local_file_parse_function(
- filename, expected_parse_function, mocker, mock_db
+ filename, expected_parse_function, mocker
):
"""
Test `update_local_file` to ensure the correct parsing function
is called based on the filename.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
mocker.patch.object(
threatintel, "should_update_local_ti_file", return_value="new_hash"
)
@@ -1614,23 +1610,23 @@ def test_update_local_file_parse_function(
],
)
def test_ip_has_blacklisted_asn(
- mock_db, ip_address, asn, asn_info, expected_call_count
+ ip_address, asn, asn_info, expected_call_count
):
"""
Test `ip_has_blacklisted_asn` for both blacklisted and
non-blacklisted ASNs.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
uid = "test_uid"
timestamp = "2023-10-26 10:00:00"
profileid = "profile_127.0.0.1"
twid = "timewindow1"
- mock_db.get_ip_info.return_value = {"asn": {"number": asn}}
- mock_db.is_blacklisted_ASN.return_value = asn_info
+ threatintel.db.get_ip_info.return_value = {"asn": {"number": asn}}
+ threatintel.db.is_blacklisted_ASN.return_value = asn_info
threatintel.ip_has_blacklisted_asn(
ip_address, uid, timestamp, profileid, twid
)
- assert mock_db.set_evidence.call_count == expected_call_count
+ assert threatintel.db.set_evidence.call_count == expected_call_count
@pytest.mark.parametrize(
@@ -1695,7 +1691,6 @@ def test_ip_has_blacklisted_asn(
],
)
def test_set_evidence_malicious_domain(
- mock_db,
domain,
uid,
timestamp,
@@ -1708,8 +1703,8 @@ def test_set_evidence_malicious_domain(
"""
Test `set_evidence_malicious_domain` with various scenarios.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_domain_resolution.return_value = (
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_domain_resolution.return_value = (
["192.168.1.1"] if domain != "noresolve.com" else []
)
@@ -1723,7 +1718,7 @@ def test_set_evidence_malicious_domain(
twid=twid,
)
- assert mock_db.set_evidence.call_count == expected_evidence_count
+ assert threatintel.db.set_evidence.call_count == expected_evidence_count
@pytest.mark.parametrize(
@@ -1793,7 +1788,6 @@ def test_set_evidence_malicious_domain(
],
)
def test_set_evidence_malicious_ip_in_dns_response(
- mock_db,
ip,
uid,
timestamp,
@@ -1808,8 +1802,10 @@ def test_set_evidence_malicious_ip_in_dns_response(
Test `set_evidence_malicious_ip_in_dns_response` for recording evidence of
malicious IPs received in DNS responses.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
- mock_db.get_ip_identification.return_value = " (Organization: Example Org)"
+ threatintel = ModuleFactory().create_threatintel_obj()
+ threatintel.db.get_ip_identification.return_value = (
+ " (Organization: Example Org)"
+ )
threatintel.set_evidence_malicious_ip_in_dns_response(
ip=ip,
uid=uid,
@@ -1819,22 +1815,22 @@ def test_set_evidence_malicious_ip_in_dns_response(
profileid=profileid,
twid=twid,
)
- call_args_list = mock_db.set_evidence.call_args_list
+ call_args_list = threatintel.db.set_evidence.call_args_list
for call_args in call_args_list:
evidence = call_args[0][0]
assert expected_description in evidence.description
assert evidence.threat_level == expected_threat_level
- assert mock_db.set_ip_info.call_count == 1
- assert mock_db.set_malicious_ip.call_count == 1
+ assert threatintel.db.set_ip_info.call_count == 1
+ assert threatintel.db.set_malicious_ip.call_count == 1
-def test_read_configuration(mocker, mock_db):
+def test_read_configuration(mocker):
"""
Test `__read_configuration` to verify it correctly
reads configuration settings.
"""
- threatintel = ModuleFactory().create_threatintel_obj(mock_db)
+ threatintel = ModuleFactory().create_threatintel_obj()
with patch(
"modules.threat_intelligence.threat_intelligence.ConfigParser"
) as MockConfigParser:
diff --git a/tests/test_trustdb.py b/tests/test_trustdb.py
new file mode 100644
index 000000000..4b1988245
--- /dev/null
+++ b/tests/test_trustdb.py
@@ -0,0 +1,599 @@
+import pytest
+from unittest.mock import (
+ patch,
+ call,
+ MagicMock,
+ Mock,
+)
+from tests.module_factory import ModuleFactory
+import datetime
+import time
+
+
+@pytest.mark.parametrize(
+ "existing_tables",
+ [
+ # Testcase 1: All tables exist
+ (
+ [
+ "opinion_cache",
+ "slips_reputation",
+ "go_reliability",
+ "peer_ips",
+ "reports",
+ ]
+ ),
+ # Testcase 2: Some tables missing
+ (["slips_reputation", "peer_ips"]),
+ # Testcase 3: No tables exist
+ ([]),
+ ],
+)
+def test_delete_tables(existing_tables):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.execute.side_effect = lambda query: (
+ None if query.startswith("DROP TABLE") else ["table"]
+ )
+ trust_db.conn.fetchall = Mock()
+ trust_db.conn.fetchall.return_value = existing_tables
+
+ expected_calls = [
+ call("DROP TABLE IF EXISTS opinion_cache;"),
+ call("DROP TABLE IF EXISTS slips_reputation;"),
+ call("DROP TABLE IF EXISTS go_reliability;"),
+ call("DROP TABLE IF EXISTS peer_ips;"),
+ call("DROP TABLE IF EXISTS reports;"),
+ ]
+
+ trust_db.delete_tables()
+ assert trust_db.conn.execute.call_args_list == expected_calls
+
+
+@pytest.mark.parametrize(
+ "key_type, reported_key, fetchone_result, expected_result",
+ [
+ # Testcase 1: Cache hit
+ (
+ "ip",
+ "192.168.1.1",
+ (0.8, 0.9, 0.7, 1678886400),
+ (0.8, 0.9, 0.7, 1678886400),
+ ),
+ # Testcase 2: Cache miss
+ (
+ "peerid",
+ "some_peer_id",
+ None,
+ (None, None, None, None),
+ ),
+ ],
+)
+def test_get_cached_network_opinion(
+ key_type,
+ reported_key,
+ fetchone_result,
+ expected_result,
+):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.execute.return_value.fetchone.return_value = fetchone_result
+ result = trust_db.get_cached_network_opinion(key_type, reported_key)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "key_type, reported_key, score, confidence, "
+ "network_score, expected_query, expected_params",
+ [ # Test Case 1: Update IP reputation in cache
+ (
+ "ip",
+ "192.168.1.1",
+ 0.8,
+ 0.9,
+ 0.7,
+ "REPLACE INTO opinion_cache (key_type, reported_key, score, "
+ "confidence, network_score, "
+ "update_time)VALUES (?, ?, ?, ?, ?, strftime('%s','now'));",
+ ("ip", "192.168.1.1", 0.8, 0.9, 0.7),
+ ),
+ # Test Case 2: Update Peer ID reputation in cache
+ (
+ "peerid",
+ "some_peer_id",
+ 0.5,
+ 0.6,
+ 0.4,
+ "REPLACE INTO opinion_cache (key_type, reported_key, score, "
+ "confidence, network_score, "
+ "update_time)VALUES (?, ?, ?, ?, ?, strftime('%s','now'));",
+ ("peerid", "some_peer_id", 0.5, 0.6, 0.4),
+ ),
+ ],
+)
+def test_update_cached_network_opinion(
+ key_type,
+ reported_key,
+ score,
+ confidence,
+ network_score,
+ expected_query,
+ expected_params,
+):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.commit = Mock()
+ trust_db.update_cached_network_opinion(
+ key_type, reported_key, score, confidence, network_score
+ )
+ trust_db.conn.execute.assert_called_once_with(
+ expected_query, expected_params
+ )
+ trust_db.conn.commit.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "reports, expected_calls",
+ [
+ (
+ # Testcase 1: Single report
+ [
+ (
+ "reporter_1",
+ "ip",
+ "192.168.1.1",
+ 0.5,
+ 0.8,
+ 1678886400, # Fixed timestamp
+ )
+ ],
+ [
+ call(
+ "INSERT INTO reports "
+ "(reporter_peerid, key_type, reported_key, "
+ "score, confidence, update_time) "
+ "VALUES (?, ?, ?, ?, ?, ?)",
+ [
+ (
+ "reporter_1",
+ "ip",
+ "192.168.1.1",
+ 0.5,
+ 0.8,
+ 1678886400,
+ )
+ ],
+ )
+ ],
+ ),
+ (
+ # Testcase 2: Multiple reports
+ [
+ (
+ "reporter_1",
+ "ip",
+ "192.168.1.1",
+ 0.5,
+ 0.8,
+ 1678886400,
+ ),
+ (
+ "reporter_2",
+ "peerid",
+ "another_peer",
+ 0.3,
+ 0.6,
+ 1678886500,
+ ),
+ ],
+ [
+ call(
+ "INSERT INTO reports "
+ "(reporter_peerid, key_type, reported_key, "
+ "score, confidence, update_time) "
+ "VALUES (?, ?, ?, ?, ?, ?)",
+ [
+ (
+ "reporter_1",
+ "ip",
+ "192.168.1.1",
+ 0.5,
+ 0.8,
+ 1678886400,
+ ),
+ (
+ "reporter_2",
+ "peerid",
+ "another_peer",
+ 0.3,
+ 0.6,
+ 1678886500,
+ ),
+ ],
+ )
+ ],
+ ),
+ ],
+)
+def test_insert_new_go_data(reports, expected_calls):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.executemany = Mock()
+ trust_db.insert_new_go_data(reports)
+ trust_db.conn.executemany.assert_has_calls(expected_calls)
+ assert trust_db.conn.executemany.call_count == len(expected_calls)
+
+
+@pytest.mark.parametrize(
+ "peerid, ip, timestamp, expected_params",
+ [ # Testcase 1: Using provided timestamp
+ (
+ "peer_123",
+ "192.168.1.20",
+ 1678887000,
+ ("192.168.1.20", "peer_123", 1678887000),
+ ),
+ # Testcase 2: Using current time as timestamp
+ (
+ "another_peer",
+ "10.0.0.5",
+ datetime.datetime(2024, 7, 24, 20, 26, 35),
+ (
+ "10.0.0.5",
+ "another_peer",
+ datetime.datetime(2024, 7, 24, 20, 26, 35),
+ ),
+ ),
+ ],
+)
+def test_insert_go_ip_pairing(peerid, ip, timestamp, expected_params):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.commit = Mock()
+ trust_db.insert_go_ip_pairing(peerid, ip, timestamp)
+ trust_db.conn.execute.assert_called_once_with(
+ "INSERT INTO peer_ips (ipaddress, peerid, "
+ "update_time) VALUES (?, ?, ?);",
+ expected_params,
+ )
+ trust_db.conn.commit.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "ip, score, confidence, timestamp, expected_timestamp",
+ [
+ # Testcase 1: Using provided timestamp
+ ("192.168.1.10", 0.85, 0.95, 1678886400, 1678886400),
+ # Testcase 2: Using current time as timestamp
+ ("10.0.0.1", 0.6, 0.7, None, 1234),
+ ],
+)
+def test_insert_slips_score(
+ ip, score, confidence, timestamp, expected_timestamp
+):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ with patch.object(time, "time", return_value=time.time()) as mock_time:
+ trust_db.insert_slips_score(ip, score, confidence, timestamp)
+ expected_params = (
+ ip,
+ score,
+ confidence,
+ timestamp or mock_time.return_value,
+ )
+
+ trust_db.conn.execute.assert_called_once_with(
+ "INSERT INTO slips_reputation (ipaddress, score, confidence, "
+ "update_time) VALUES (?, ?, ?, ?);",
+ expected_params,
+ )
+ trust_db.conn.commit.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "peerid, reliability, timestamp, expected_timestamp",
+ [
+ # Testcase 1: Using provided timestamp
+ ("peer_123", 0.92, 1678887000, 1678887000),
+ # Testcase 2: Using current time as timestamp
+ ("another_peer", 0.55, None, datetime.datetime.now()),
+ ],
+)
+def test_insert_go_reliability(
+ peerid, reliability, timestamp, expected_timestamp
+):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ with patch.object(
+ datetime, "datetime", wraps=datetime.datetime
+ ) as mock_datetime:
+ mock_datetime.now.return_value = expected_timestamp
+ trust_db.insert_go_reliability(peerid, reliability, timestamp)
+
+ expected_params = (
+ peerid,
+ reliability,
+ timestamp or expected_timestamp,
+ )
+
+ trust_db.conn.execute.assert_called_once_with(
+ "INSERT INTO go_reliability (peerid, reliability, "
+ "update_time) VALUES (?, ?, ?);",
+ expected_params,
+ )
+ trust_db.conn.commit.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "peerid, fetchone_result, expected_result",
+ [
+ # Testcase 1: IP found for peerid
+ (
+ "peer_123",
+ (1678887000, "192.168.1.20"),
+ (1678887000, "192.168.1.20"),
+ ),
+ # Testcase 2: No IP found for peerid
+ ("unknown_peer", None, (False, False)),
+ ],
+)
+def test_get_ip_of_peer(peerid, fetchone_result, expected_result):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.execute.return_value.fetchone.return_value = fetchone_result
+ result = trust_db.get_ip_of_peer(peerid)
+ assert result == expected_result
+
+
+def test_create_tables():
+ trust_db = ModuleFactory().create_trust_db_obj()
+
+ expected_calls = [
+ call(
+ "CREATE TABLE IF NOT EXISTS slips_reputation ("
+ "id INTEGER PRIMARY KEY NOT NULL, "
+ "ipaddress TEXT NOT NULL, "
+ "score REAL NOT NULL, "
+ "confidence REAL NOT NULL, "
+ "update_time REAL NOT NULL);"
+ ),
+ call(
+ "CREATE TABLE IF NOT EXISTS go_reliability ("
+ "id INTEGER PRIMARY KEY NOT NULL, "
+ "peerid TEXT NOT NULL, "
+ "reliability REAL NOT NULL, "
+ "update_time REAL NOT NULL);"
+ ),
+ call(
+ "CREATE TABLE IF NOT EXISTS peer_ips ("
+ "id INTEGER PRIMARY KEY NOT NULL, "
+ "ipaddress TEXT NOT NULL, "
+ "peerid TEXT NOT NULL, "
+ "update_time REAL NOT NULL);"
+ ),
+ call(
+ "CREATE TABLE IF NOT EXISTS reports ("
+ "id INTEGER PRIMARY KEY NOT NULL, "
+ "reporter_peerid TEXT NOT NULL, "
+ "key_type TEXT NOT NULL, "
+ "reported_key TEXT NOT NULL, "
+ "score REAL NOT NULL, "
+ "confidence REAL NOT NULL, "
+ "update_time REAL NOT NULL);"
+ ),
+ call(
+ "CREATE TABLE IF NOT EXISTS opinion_cache ("
+ "key_type TEXT NOT NULL, "
+ "reported_key TEXT NOT NULL PRIMARY KEY, "
+ "score REAL NOT NULL, "
+ "confidence REAL NOT NULL, "
+ "network_score REAL NOT NULL, "
+ "update_time DATE NOT NULL);"
+ ),
+ ]
+ trust_db.conn.execute = Mock()
+ trust_db.create_tables()
+ trust_db.conn.execute.assert_has_calls(expected_calls, any_order=True)
+
+
+@pytest.mark.parametrize(
+ "reporter_peerid, key_type, reported_key, score, confidence, "
+ "timestamp, expected_query, expected_params",
+ [
+ # Testcase 1: Using provided timestamp
+ (
+ "peer_123",
+ "ip",
+ "192.168.1.1",
+ 0.8,
+ 0.9,
+ 1678887000,
+ "INSERT INTO reports (reporter_peerid, key_type, reported_key, "
+ "score, confidence, update_time) VALUES (?, ?, ?, ?, ?, ?)",
+ ("peer_123", "ip", "192.168.1.1", 0.8, 0.9, 1678887000),
+ ),
+ # Testcase 2: Using current time as timestamp
+ (
+ "another_peer",
+ "peerid",
+ "target_peer",
+ 0.6,
+ 0.7,
+ None,
+ "INSERT INTO reports (reporter_peerid, key_type, reported_key, "
+ "score, confidence, update_time) VALUES (?, ?, ?, ?, ?, ?)",
+ ("another_peer", "peerid", "target_peer", 0.6, 0.7, 1678887000.0),
+ ),
+ ],
+)
+def test_insert_new_go_report(
+ reporter_peerid,
+ key_type,
+ reported_key,
+ score,
+ confidence,
+ timestamp,
+ expected_query,
+ expected_params,
+):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.commit = Mock()
+ with patch("time.time", return_value=1678887000.0):
+ trust_db.insert_new_go_report(
+ reporter_peerid,
+ key_type,
+ reported_key,
+ score,
+ confidence,
+ timestamp,
+ )
+ trust_db.conn.execute.assert_called_once()
+ actual_query, actual_params = trust_db.conn.execute.call_args[0]
+ assert actual_query == expected_query
+ assert actual_params[:-1] == expected_params[:-1]
+ assert isinstance(actual_params[-1], (float, int))
+ assert abs(actual_params[-1] - expected_params[-1]) < 0.001
+ trust_db.conn.commit.assert_called_once()
+
+
+@pytest.mark.parametrize(
+ "ipaddress, expected_reports",
+ [
+ # Testcase 1: No reports for the IP
+ ("192.168.1.1", []),
+ # Testcase 2: One report
+ (
+ "192.168.1.1",
+ [
+ (
+ "reporter_1",
+ 1678886400,
+ 0.5,
+ 0.8,
+ "192.168.1.1",
+ )
+ ],
+ ),
+ # Testcase 3: Multiple reports
+ (
+ "192.168.1.1",
+ [
+ (
+ "reporter_1",
+ 1678886400,
+ 0.5,
+ 0.8,
+ "192.168.1.1",
+ ),
+ (
+ "reporter_2",
+ 1678886500,
+ 0.3,
+ 0.6,
+ "192.168.1.1",
+ ),
+ ],
+ ),
+ ],
+)
+def test_get_reports_for_ip(ipaddress, expected_reports):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.execute.return_value.fetchall.return_value = expected_reports
+ reports = trust_db.get_reports_for_ip(ipaddress)
+ assert reports == expected_reports
+
+
+@pytest.mark.parametrize(
+ "reporter_peerid, expected_reliability",
+ [
+ # Testcase 1: Reliability found for reporter
+ ("reporter_1", 0.7),
+ # Testcase 2: No reliability found for reporter
+ ("unknown_reporter", None),
+ ],
+)
+def test_get_reporter_reliability(reporter_peerid, expected_reliability):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute.return_value.fetchone.return_value = (
+ expected_reliability,
+ )
+ reliability = trust_db.get_reporter_reliability(reporter_peerid)
+ assert reliability == expected_reliability
+
+
+@pytest.mark.parametrize(
+ "reporter_ipaddress, expected_score, expected_confidence",
+ [
+ # Testcase 1: Reputation found for reporter
+ ("192.168.1.2", 0.6, 0.9),
+ # Testcase 2: No reputation found for reporter
+ ("unknown_ip", None, None),
+ ],
+)
+def test_get_reporter_reputation(
+ reporter_ipaddress, expected_score, expected_confidence
+):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute.return_value.fetchone.return_value = (
+ expected_score,
+ expected_confidence,
+ )
+ score, confidence = trust_db.get_reporter_reputation(reporter_ipaddress)
+ assert score == expected_score
+ assert confidence == expected_confidence
+
+
+@pytest.mark.parametrize(
+ "reporter_peerid, report_timestamp, fetchone_result, expected_ip",
+ [
+ # Testcase 1: IP found for reporter at report time
+ ("reporter_1", 1678886450, (1678886400, "192.168.1.2"), "192.168.1.2"),
+ # Testcase 2: No IP found for reporter at report time
+ ("reporter_2", 1678886550, None, None),
+ ],
+)
+def test_get_reporter_ip(
+ reporter_peerid, report_timestamp, fetchone_result, expected_ip
+):
+ trust_db = ModuleFactory().create_trust_db_obj()
+ trust_db.conn.execute = Mock()
+ trust_db.conn.execute.return_value.fetchone.return_value = fetchone_result
+ ip = trust_db.get_reporter_ip(reporter_peerid, report_timestamp)
+ assert ip == expected_ip
+
+
+@pytest.mark.parametrize(
+ "ipaddress, reports, expected_result",
+ [
+ # Testcase 1: No reports for the IP
+ ("192.168.1.1", [], []),
+ # Testcase 2: One report with valid reporter data
+ (
+ "192.168.1.1",
+ [("reporter_1", 1678886400, 0.5, 0.8, "192.168.1.1")],
+ [(0.5, 0.8, 0.7, 0.6, 0.9)],
+ ),
+ # Testcase 3: Multiple reports with valid reporter data
+ (
+ "192.168.1.1",
+ [
+ ("reporter_1", 1678886400, 0.5, 0.8, "192.168.1.1"),
+ ("reporter_2", 1678886500, 0.3, 0.6, "192.168.1.1"),
+ ],
+ [(0.5, 0.8, 0.7, 0.6, 0.9), (0.3, 0.6, 0.8, 0.4, 0.7)],
+ ),
+ ],
+)
+def test_get_opinion_on_ip(ipaddress, reports, expected_result):
+ trust_db = ModuleFactory().create_trust_db_obj()
+
+ trust_db.get_reports_for_ip = MagicMock(return_value=reports)
+ trust_db.get_reporter_ip = MagicMock(
+ side_effect=["192.168.1.2", "192.168.1.3", "192.168.1.2"]
+ )
+ trust_db.get_reporter_reliability = MagicMock(side_effect=[0.7, 0.8, 0.7])
+ trust_db.get_reporter_reputation = MagicMock(
+ side_effect=[(0.6, 0.9), (0.4, 0.7), (0.6, 0.9)]
+ )
+
+ result = trust_db.get_opinion_on_ip(ipaddress)
+ assert result == expected_result
diff --git a/tests/test_tunnel.py b/tests/test_tunnel.py
index 3030460cc..b0f40d37c 100644
--- a/tests/test_tunnel.py
+++ b/tests/test_tunnel.py
@@ -1,5 +1,7 @@
"""Unit test for modules/flowalerts/tunnel.py"""
+from unittest.mock import Mock
+
from tests.module_factory import ModuleFactory
import json
import pytest
@@ -17,12 +19,12 @@
({"flow": {"tunnel_type": "Invalid"}}, False),
],
)
-def test_check_GRE_tunnel(mocker, mock_db, test_input, expected_call_count):
+def test_check_GRE_tunnel(mocker, test_input, expected_call_count):
"""
Tests the check_gre_tunnel function for various
tunnel types and checks if the evidence is set correctly.
"""
- tunnel = ModuleFactory().create_tunnel_analyzer_obj(mock_db)
+ tunnel = ModuleFactory().create_tunnel_analyzer_obj()
mock_set_evidence = mocker.patch(
"modules.flowalerts.set_evidence.SetEvidnceHelper.GRE_tunnel"
)
@@ -30,28 +32,26 @@ def test_check_GRE_tunnel(mocker, mock_db, test_input, expected_call_count):
assert mock_set_evidence.call_count == expected_call_count
-def test_analyze_with_message(mocker, mock_db):
+def test_analyze_with_message(mocker):
"""Tests analyze when flowalerts.get_msg returns data."""
- test_msg_data = {"flow": {"tunnel_type": "Tunnel::GRE"}}
+ msg = {
+ "channel": "new_tunnel",
+ "data": json.dumps({"tunnel_type": "Tunnel::GRE"}),
+ }
expected_check_gre_call_count = 1
- tunnel = ModuleFactory().create_tunnel_analyzer_obj(mock_db)
- mocker.patch.object(
- tunnel.flowalerts,
- "get_msg",
- return_value={"data": json.dumps(test_msg_data)},
- )
- mock_check_gre_tunnel = mocker.patch.object(tunnel, "check_gre_tunnel")
- tunnel.analyze()
- assert mock_check_gre_tunnel.call_count == expected_check_gre_call_count
-
+ tunnel = ModuleFactory().create_tunnel_analyzer_obj()
+ tunnel.flowalerts.get_msg = Mock(return_value=msg)
+ tunnel.check_gre_tunnel = Mock()
+ tunnel.analyze(msg)
+ assert tunnel.check_gre_tunnel.call_count == expected_check_gre_call_count
-def test_analyze_without_message(mocker, mock_db):
- """Tests analyze when flowalerts.get_msg returns None."""
- tunnel = ModuleFactory().create_tunnel_analyzer_obj(mock_db)
- mocker.patch.object(tunnel.flowalerts, "get_msg", return_value=None)
+def test_analyze_without_message(
+ mocker,
+):
+ tunnel = ModuleFactory().create_tunnel_analyzer_obj()
mock_check_gre_tunnel = mocker.patch.object(tunnel, "check_gre_tunnel")
- tunnel.analyze()
+ tunnel.analyze(None)
assert mock_check_gre_tunnel.call_count == 0
diff --git a/tests/test_update_file_manager.py b/tests/test_update_file_manager.py
index cb7e745c9..58b71776f 100644
--- a/tests/test_update_file_manager.py
+++ b/tests/test_update_file_manager.py
@@ -8,21 +8,21 @@
from unittest.mock import Mock, mock_open, patch
-def test_check_if_update_based_on_update_period(mock_db):
- mock_db.get_TI_file_info.return_value = {"time": float("inf")}
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+def test_check_if_update_based_on_update_period():
+ update_manager = ModuleFactory().create_update_manager_obj()
+ update_manager.db.get_TI_file_info.return_value = {"time": float("inf")}
url = "abc.com/x"
# update period hasn't passed
assert update_manager.check_if_update(url, float("inf")) is False
-def test_check_if_update_based_on_e_tag(mocker, mock_db):
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+def test_check_if_update_based_on_e_tag(mocker):
+ update_manager = ModuleFactory().create_update_manager_obj()
# period passed, etag same
etag = "1234"
url = "google.com/images"
- mock_db.get_TI_file_info.return_value = {"e-tag": etag}
+ update_manager.db.get_TI_file_info.return_value = {"e-tag": etag}
mock_requests = mocker.patch("requests.get")
mock_requests.return_value.status_code = 200
@@ -33,7 +33,7 @@ def test_check_if_update_based_on_e_tag(mocker, mock_db):
# period passed, etag different
etag = "1111"
url = "google.com/images"
- mock_db.get_TI_file_info.return_value = {"e-tag": etag}
+ update_manager.db.get_TI_file_info.return_value = {"e-tag": etag}
mock_requests = mocker.patch("requests.get")
mock_requests.return_value.status_code = 200
mock_requests.return_value.headers = {"ETag": "2222"}
@@ -41,13 +41,16 @@ def test_check_if_update_based_on_e_tag(mocker, mock_db):
assert update_manager.check_if_update(url, float("-inf")) is True
-def test_check_if_update_based_on_last_modified(database, mocker, mock_db):
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+def test_check_if_update_based_on_last_modified(
+ database,
+ mocker,
+):
+ update_manager = ModuleFactory().create_update_manager_obj()
# period passed, no etag, last modified the same
url = "google.com/photos"
- mock_db.get_TI_file_info.return_value = {"Last-Modified": 10.0}
+ update_manager.db.get_TI_file_info.return_value = {"Last-Modified": 10.0}
mock_requests = mocker.patch("requests.get")
mock_requests.return_value.status_code = 200
mock_requests.return_value.headers = {"Last-Modified": 10.0}
@@ -58,7 +61,7 @@ def test_check_if_update_based_on_last_modified(database, mocker, mock_db):
# period passed, no etag, last modified changed
url = "google.com/photos"
- mock_db.get_TI_file_info.return_value = {"Last-Modified": 10}
+ update_manager.db.get_TI_file_info.return_value = {"Last-Modified": 10}
mock_requests = mocker.patch("requests.get")
mock_requests.return_value.status_code = 200
mock_requests.return_value.headers = {"Last-Modified": 11}
@@ -78,19 +81,19 @@ def test_check_if_update_based_on_last_modified(database, mocker, mock_db):
],
)
def test_check_if_update_local_file(
- mocker, mock_db, new_hash, old_hash, expected_result
+ mocker, new_hash, old_hash, expected_result
):
"""
Test if check_if_update_local_file() correctly detects
if we should update a local file based on the file hash.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
mocker.patch(
"slips_files.common.slips_utils.Utils.get_sha256_hash",
return_value=new_hash,
)
- mock_db.get_TI_file_info.return_value = {"hash": old_hash}
+ update_manager.db.get_TI_file_info.return_value = {"hash": old_hash}
file_path = "path/to/my/file.txt"
assert (
@@ -135,9 +138,9 @@ def test_check_if_update_local_file(
("", {}),
],
)
-def test_get_feed_details(mocker, mock_db, mock_data, expected_feeds):
+def test_get_feed_details(mocker, mock_data, expected_feeds):
"""Test get_feed_details with different file contents."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
mock_feeds_file = mock_open(read_data=mock_data)
mocker.patch("builtins.open", mock_feeds_file)
feeds = update_manager.get_feed_details("path/to/feeds")
@@ -171,22 +174,24 @@ def test_get_feed_details(mocker, mock_db, mock_data, expected_feeds):
),
],
)
-def test_log(mock_db, message, expected_call_args):
+def test_log(message, expected_call_args):
"""Test the log function with different message types."""
mock_observer = Mock()
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.notify_observers = mock_observer
update_manager.log(message)
mock_observer.assert_called_once_with(expected_call_args)
-def test_download_file(mocker, mock_db):
+def test_download_file(
+ mocker,
+):
"""Test download_file with a successful request."""
url = "https://example.com/file.txt"
mock_requests = mocker.patch("requests.get")
mock_requests.return_value.status_code = 200
mock_requests.return_value.text = "file content"
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
response = update_manager.download_file(url)
mock_requests.assert_called_once_with(url, timeout=5)
@@ -226,13 +231,13 @@ def test_download_file(mocker, mock_db):
),
],
)
-def test_read_ports_info(mocker, mock_db, tmp_path, test_data, expected_calls):
+def test_read_ports_info(mocker, tmp_path, test_data, expected_calls):
"""Test read_ports_info with different file contents."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
mocker.patch("builtins.open", mock_open(read_data=test_data))
update_manager.read_ports_info(str(tmp_path / "ports_info.csv"))
for call in expected_calls:
- mock_db.set_organization_of_port.assert_any_call(*call)
+ update_manager.db.set_organization_of_port.assert_any_call(*call)
@pytest.mark.parametrize(
@@ -256,23 +261,25 @@ def test_read_ports_info(mocker, mock_db, tmp_path, test_data, expected_calls):
],
)
def test_update_local_file(
- mocker, mock_db, tmp_path, test_data, file_name, expected_db_call
+ mocker, tmp_path, test_data, file_name, expected_db_call
):
"""Test update_local_file with a valid file."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.new_hash = "test_hash"
mocker.patch("builtins.open", mock_open(read_data=test_data))
result = update_manager.update_local_file(str(tmp_path / file_name))
- mock_db.set_TI_file_info.assert_called_once_with(
+ update_manager.db.set_TI_file_info.assert_called_once_with(
str(tmp_path / file_name), {"hash": "test_hash"}
)
assert result is True
-def test_check_if_update_online_whitelist_download_updated(mocker, mock_db):
+def test_check_if_update_online_whitelist_download_updated(
+ mocker,
+):
"""Update period passed, download succeeds."""
- mock_db.get_TI_file_info.return_value = {"time": 0}
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
+ update_manager.db.get_TI_file_info.return_value = {"time": 0}
update_manager.online_whitelist = "https://example.com/whitelist.txt"
update_manager.download_file = Mock(return_value=Mock(status_code=200))
@@ -280,20 +287,20 @@ def test_check_if_update_online_whitelist_download_updated(mocker, mock_db):
result = update_manager.check_if_update_online_whitelist()
assert result is True
- mock_db.set_TI_file_info.assert_called_once_with(
+ update_manager.db.set_TI_file_info.assert_called_once_with(
"tranco_whitelist", {"time": mocker.ANY}
)
assert "tranco_whitelist" in update_manager.responses
-def test_check_if_update_online_whitelist_not_updated(mock_db):
+def test_check_if_update_online_whitelist_not_updated():
"""Update period hasn't passed - no update needed."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.online_whitelist = "https://example.com/whitelist.txt"
- mock_db.get_TI_file_info.return_value = {"time": time.time()}
+ update_manager.db.get_TI_file_info.return_value = {"time": time.time()}
result = update_manager.check_if_update_online_whitelist()
assert result is False
- mock_db.set_TI_file_info.assert_not_called()
+ update_manager.db.set_TI_file_info.assert_not_called()
@pytest.mark.parametrize(
@@ -308,13 +315,13 @@ def test_check_if_update_online_whitelist_not_updated(mock_db):
({}, False),
],
)
-def test_get_last_modified(mocker, mock_db, headers, expected_last_modified):
+def test_get_last_modified(mocker, headers, expected_last_modified):
"""
Test get_last_modified() with different scenarios:
"""
mock_response = mocker.Mock()
mock_response.headers = headers
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
assert (
update_manager.get_last_modified(mock_response)
== expected_last_modified
@@ -333,21 +340,21 @@ def test_get_last_modified(mocker, mock_db, headers, expected_last_modified):
({}, False),
],
)
-def test_get_e_tag(mocker, mock_db, headers, expected_etag):
+def test_get_e_tag(mocker, headers, expected_etag):
"""
Test get_e_tag() with different scenarios:
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
mock_response = mocker.Mock()
mock_response.headers = headers
assert update_manager.get_e_tag(mock_response) == expected_etag
-def test_write_file_to_disk(mocker, mock_db, tmp_path):
+def test_write_file_to_disk(mocker, tmp_path):
"""
Test write_file_to_disk() by writing content to a temporary file.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
mock_response = mocker.Mock()
mock_response.text = "test content"
file_path = tmp_path / "test_file.txt"
@@ -358,10 +365,10 @@ def test_write_file_to_disk(mocker, mock_db, tmp_path):
assert f.read() == "test content"
-def test_delete_old_source_ips(mock_db):
+def test_delete_old_source_ips():
"""Test delete_old_source_IPs."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
- mock_db.get_IPs_in_IoC.return_value = {
+ update_manager = ModuleFactory().create_update_manager_obj()
+ update_manager.db.get_IPs_in_IoC.return_value = {
"1.2.3.4": json.dumps(
{"description": "old IP", "source": "old_file.txt"}
),
@@ -370,13 +377,17 @@ def test_delete_old_source_ips(mock_db):
),
}
update_manager.delete_old_source_IPs("old_file.txt")
- (mock_db.delete_ips_from_IoC_ips.assert_called_once_with(["1.2.3.4"]))
+ (
+ update_manager.db.delete_ips_from_IoC_ips.assert_called_once_with(
+ ["1.2.3.4"]
+ )
+ )
-def test_delete_old_source_domains(mock_db):
+def test_delete_old_source_domains():
"""Test delete_old_source_Domains."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
- mock_db.get_Domains_in_IoC.return_value = {
+ update_manager = ModuleFactory().create_update_manager_obj()
+ update_manager.db.get_Domains_in_IoC.return_value = {
"olddomain.com": json.dumps(
{"description": "old domain", "source": "old_file.txt"}
),
@@ -387,12 +398,14 @@ def test_delete_old_source_domains(mock_db):
update_manager.delete_old_source_Domains("old_file.txt")
-def test_update_riskiq_feed(mocker, mock_db):
+def test_update_riskiq_feed(
+ mocker,
+):
"""
Test update_riskiq_feed with a
successful request and valid data.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.riskiq_email = "test@example.com"
update_manager.riskiq_key = "test_key"
mock_response = mocker.Mock()
@@ -403,7 +416,7 @@ def test_update_riskiq_feed(mocker, mock_db):
}
mocker.patch("requests.get", return_value=mock_response)
result = update_manager.update_riskiq_feed()
- mock_db.add_domains_to_IoC.assert_called_once_with(
+ update_manager.db.add_domains_to_IoC.assert_called_once_with(
{
"malicious.com": json.dumps(
{
@@ -413,17 +426,19 @@ def test_update_riskiq_feed(mocker, mock_db):
)
}
)
- mock_db.set_TI_file_info.assert_called_once_with(
+ update_manager.db.set_TI_file_info.assert_called_once_with(
"riskiq_domains", {"time": mocker.ANY}
)
assert result is True
-def test_update_riskiq_feed_invalid_api_key(mocker, mock_db):
+def test_update_riskiq_feed_invalid_api_key(
+ mocker,
+):
"""
Test when RiskIQ API returns an error (e.g., invalid API key)
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.riskiq_email = "test@example.com"
update_manager.riskiq_key = "invalid_key"
mock_response = mocker.Mock()
@@ -432,13 +447,15 @@ def test_update_riskiq_feed_invalid_api_key(mocker, mock_db):
result = update_manager.update_riskiq_feed()
assert result is False
- mock_db.add_domains_to_IoC.assert_not_called()
- mock_db.set_TI_file_info.assert_not_called()
+ update_manager.db.add_domains_to_IoC.assert_not_called()
+ update_manager.db.set_TI_file_info.assert_not_called()
-def test_update_riskiq_feed_request_exception(mocker, mock_db):
+def test_update_riskiq_feed_request_exception(
+ mocker,
+):
"""Test when there's an error during the request to RiskIQ."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.riskiq_email = "test@example.com"
update_manager.riskiq_key = "test_key"
mocker.patch(
@@ -448,16 +465,16 @@ def test_update_riskiq_feed_request_exception(mocker, mock_db):
result = update_manager.update_riskiq_feed()
assert result is False
- mock_db.add_domains_to_IoC.assert_not_called()
- mock_db.set_TI_file_info.assert_not_called()
+ update_manager.db.add_domains_to_IoC.assert_not_called()
+ update_manager.db.set_TI_file_info.assert_not_called()
-def test_delete_old_source_data_from_database(mock_db):
+def test_delete_old_source_data_from_database():
"""
Test delete_old_source_data_from_database for deleting old IPs and domains.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
- mock_db.get_IPs_in_IoC.return_value = {
+ update_manager = ModuleFactory().create_update_manager_obj()
+ update_manager.db.get_IPs_in_IoC.return_value = {
"1.2.3.4": json.dumps(
{"description": "old IP", "source": "old_file.txt"}
),
@@ -465,7 +482,7 @@ def test_delete_old_source_data_from_database(mock_db):
{"description": "new IP", "source": "new_file.txt"}
),
}
- mock_db.get_Domains_in_IoC.return_value = {
+ update_manager.db.get_Domains_in_IoC.return_value = {
"olddomain.com": json.dumps(
{"description": "old domain", "source": "old_file.txt"}
),
@@ -474,8 +491,10 @@ def test_delete_old_source_data_from_database(mock_db):
),
}
update_manager.delete_old_source_data_from_database("old_file.txt")
- mock_db.delete_ips_from_IoC_ips.assert_called_once_with(["1.2.3.4"])
- mock_db.delete_domains_from_IoC_domains.assert_called_once_with(
+ update_manager.db.delete_ips_from_IoC_ips.assert_called_once_with(
+ ["1.2.3.4"]
+ )
+ update_manager.db.delete_domains_from_IoC_domains.assert_called_once_with(
["olddomain.com"]
)
@@ -491,13 +510,11 @@ def test_delete_old_source_data_from_database(mock_db):
("#,ip,date", None),
],
)
-def test_get_description_column_index(
- mock_db, header, expected_description_column
-):
+def test_get_description_column_index(header, expected_description_column):
"""
Test get_description_column() with different header formats.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
description_column = update_manager.get_description_column_index(header)
assert description_column == expected_description_column
@@ -516,11 +533,11 @@ def test_get_description_column_index(
("1.2.3.4,Test description", None),
],
)
-def test_is_ignored_line(mock_db, line, expected_result):
+def test_is_ignored_line(line, expected_result):
"""
Test is_ignored_line() with different line types.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
assert update_manager.is_ignored_line(line) is expected_result
@@ -546,7 +563,7 @@ def test_parse_line(
"""
Test parse_line() with different line formats.
"""
- update_manager = ModuleFactory().create_update_manager_obj(Mock())
+ update_manager = ModuleFactory().create_update_manager_obj()
amount_of_columns, line_fields, sep = (
update_manager.get_feed_fields_and_sep(line, "")
)
@@ -565,11 +582,11 @@ def test_parse_line(
(["invalid_data", "Test description"], "Error"),
],
)
-def test_get_data_column(mock_db, line_fields, expected_data_column):
+def test_get_data_column(line_fields, expected_data_column):
"""
Test get_data_column with different input scenarios:
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
amount_of_columns = 2
file_path = "test_file.txt"
data_column = update_manager.get_data_column(
@@ -616,7 +633,6 @@ def test_get_data_column(mock_db, line_fields, expected_data_column):
],
)
def test_extract_ioc_from_line(
- mock_db,
line,
line_fields,
separator,
@@ -629,7 +645,7 @@ def test_extract_ioc_from_line(
"""
Test extract_ioc_from_line with different scenarios:
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
data, description = update_manager.extract_ioc_from_line(
line,
line_fields,
@@ -642,9 +658,9 @@ def test_extract_ioc_from_line(
assert description == expected_description
-def test_add_to_ip_ctr_new_ip(mock_db):
+def test_add_to_ip_ctr_new_ip():
"""Test add_to_ip_ctr with a new IP address."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
ip = "1.2.3.4"
blacklist = "test_blacklist.txt"
update_manager.add_to_ip_ctr(ip, blacklist)
@@ -655,12 +671,14 @@ def test_add_to_ip_ctr_new_ip(mock_db):
@patch("os.path.getsize", return_value=10)
-def test_parse_ti_feed_valid_data(mocker, mock_db):
+def test_parse_ti_feed_valid_data(
+ mocker,
+):
"""
Test parse_ti_feed with valid data
containing both IP and domain.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.url_feeds = {
"https://example.com/test.txt": {
"threat_level": "low",
@@ -674,7 +692,7 @@ def test_parse_ti_feed_valid_data(mocker, mock_db):
result = update_manager.parse_ti_feed(
"https://example.com/test.txt", "test.txt"
)
- mock_db.add_ips_to_IoC.assert_any_call(
+ update_manager.db.add_ips_to_IoC.assert_any_call(
{
"1.2.3.4": '{"description": "Test description", '
'"source": "test.txt", '
@@ -682,7 +700,7 @@ def test_parse_ti_feed_valid_data(mocker, mock_db):
'"tags": ["tag3"]}'
}
)
- mock_db.add_domains_to_IoC.assert_any_call(
+ update_manager.db.add_domains_to_IoC.assert_any_call(
{
"example.com": '{"description": "Another description",'
' "source": "test.txt",'
@@ -693,9 +711,9 @@ def test_parse_ti_feed_valid_data(mocker, mock_db):
assert result is True
-def test_parse_ti_feed_invalid_data(mocker, mock_db, tmp_path):
+def test_parse_ti_feed_invalid_data(mocker, tmp_path):
"""Test parse_ti_feed with invalid data."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.url_feeds = {
"https://example.com/invalid.txt": {
"threat_level": "low",
@@ -709,8 +727,8 @@ def test_parse_ti_feed_invalid_data(mocker, mock_db, tmp_path):
result = update_manager.parse_ti_feed(
"https://example.com/invalid.txt", str(tmp_path / "invalid.txt")
)
- mock_db.add_ips_to_IoC.assert_not_called()
- mock_db.add_domains_to_IoC.assert_not_called()
+ update_manager.db.add_ips_to_IoC.assert_not_called()
+ update_manager.db.add_domains_to_IoC.assert_not_called()
assert result is False
@@ -723,12 +741,12 @@ def test_parse_ti_feed_invalid_data(mocker, mock_db, tmp_path):
],
)
def test_check_if_update_org(
- mocker, mock_db, file_content, cached_hash, expected_result
+ mocker, file_content, cached_hash, expected_result
):
"""Test check_if_update_org with different file and cache scenarios."""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
- mock_db.get_TI_file_info.return_value = {"hash": cached_hash}
+ update_manager.db.get_TI_file_info.return_value = {"hash": cached_hash}
mocker.patch(
"slips_files.common." "slips_utils.Utils.get_sha256_hash",
return_value=hash(file_content.encode()),
@@ -747,13 +765,11 @@ def test_check_if_update_org(
(500, False, 0),
],
)
-def test_update_mac_db(
- mocker, mock_db, status_code, expected_result, db_call_count
-):
+def test_update_mac_db(mocker, status_code, expected_result, db_call_count):
"""
Test update_mac_db with different response status codes.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.mac_db_link = "https://example.com/mac_db.json"
mock_response = mocker.Mock()
@@ -766,14 +782,16 @@ def test_update_mac_db(
result = update_manager.update_mac_db()
assert result is expected_result
- assert mock_db.set_TI_file_info.call_count == db_call_count
+ assert update_manager.db.set_TI_file_info.call_count == db_call_count
-def test_shutdown_gracefully(mocker, mock_db):
+def test_shutdown_gracefully(
+ mocker,
+):
"""
Test shutdown_gracefully to ensure timers are canceled.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.timer_manager = mocker.Mock()
update_manager.mac_db_update_manager = mocker.Mock()
update_manager.online_whitelist_update_timer = mocker.Mock()
@@ -813,11 +831,11 @@ def test_shutdown_gracefully(mocker, mock_db):
),
],
)
-def test_print_duplicate_ip_summary(capsys, mock_db, ips_ctr, expected_output):
+def test_print_duplicate_ip_summary(capsys, ips_ctr, expected_output):
"""
Test print_duplicate_ip_summary with different IP repetition scenarios.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.ips_ctr = ips_ctr
update_manager.first_time_reading_files = True
update_manager.print_duplicate_ip_summary()
@@ -825,11 +843,11 @@ def test_print_duplicate_ip_summary(capsys, mock_db, ips_ctr, expected_output):
assert captured.out == expected_output
-def test_parse_ssl_feed_valid_data(mocker, mock_db, tmp_path):
+def test_parse_ssl_feed_valid_data(mocker, tmp_path):
"""
Test parse_ssl_feed with valid data containing multiple SSL fingerprints.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.ssl_feeds = {
"https://example.com/test_ssl_feed.csv": {
"threat_level": "medium",
@@ -845,7 +863,7 @@ def test_parse_ssl_feed_valid_data(mocker, mock_db, tmp_path):
str(tmp_path / "test_ssl_feed.csv"),
)
- mock_db.add_ssl_sha1_to_IoC.assert_called_once_with(
+ update_manager.db.add_ssl_sha1_to_IoC.assert_called_once_with(
{
"aaabbbcccdddeeeeffff00001111222233334444": json.dumps(
{
@@ -860,11 +878,11 @@ def test_parse_ssl_feed_valid_data(mocker, mock_db, tmp_path):
assert result is True
-def test_parse_ssl_feed_no_valid_fingerprints(mocker, mock_db, tmp_path):
+def test_parse_ssl_feed_no_valid_fingerprints(mocker, tmp_path):
"""
Test parse_ssl_feed with a file that doesn't contain any valid SSL fingerprints.
"""
- update_manager = ModuleFactory().create_update_manager_obj(mock_db)
+ update_manager = ModuleFactory().create_update_manager_obj()
update_manager.ssl_feeds = {
"https://example.com/test_ssl_feed.csv": {
"threat_level": "medium",
@@ -880,5 +898,5 @@ def test_parse_ssl_feed_no_valid_fingerprints(mocker, mock_db, tmp_path):
str(tmp_path / "test_ssl_feed.csv"),
)
- mock_db.add_ssl_sha1_to_IoC.assert_not_called()
+ update_manager.db.add_ssl_sha1_to_IoC.assert_not_called()
assert result is False
diff --git a/tests/test_urlhaus.py b/tests/test_urlhaus.py
index 32b91ab06..d3ca79d2a 100644
--- a/tests/test_urlhaus.py
+++ b/tests/test_urlhaus.py
@@ -30,16 +30,14 @@
],
)
@patch("modules.threat_intelligence.urlhaus.requests.session")
-def test_make_urlhaus_request(
- mock_response, mock_db, to_lookup, uri, status_code
-):
+def test_make_urlhaus_request(mock_response, to_lookup, uri, status_code):
"""Test successful requests to the make_urlhaus_request function."""
mock_response_instance = Mock()
mock_response.return_value = mock_response_instance
mock_response = Mock(status_code=status_code)
mock_response_instance.post.return_value = mock_response
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
urlhaus.urlhaus_session = mock_response_instance
response = urlhaus.make_urlhaus_request(to_lookup)
@@ -59,9 +57,7 @@ def test_make_urlhaus_request(
],
)
@patch("modules.threat_intelligence.urlhaus.requests.session")
-def test_make_urlhaus_request_connection_error(
- mock_response, mock_db, to_lookup
-):
+def test_make_urlhaus_request_connection_error(mock_response, to_lookup):
"""Test the ConnectionError handling in make_urlhaus_request."""
mock_response_instance = Mock()
mock_response.return_value = mock_response_instance
@@ -69,7 +65,7 @@ def test_make_urlhaus_request_connection_error(
requests.exceptions.ConnectionError
)
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
urlhaus.urlhaus_session = mock_response_instance
response = urlhaus.make_urlhaus_request(to_lookup)
@@ -77,11 +73,13 @@ def test_make_urlhaus_request_connection_error(
@patch("modules.threat_intelligence.urlhaus.requests.session")
-def test_create_urlhaus_session(mock_response, mock_db):
+def test_create_urlhaus_session(
+ mock_response,
+):
"""Verifies session creation with successful setup."""
mock_response_instance = Mock()
mock_response.return_value = mock_response_instance
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
mock_response.assert_called_once()
assert urlhaus.urlhaus_session == mock_response_instance
assert urlhaus.urlhaus_session.verify is True
@@ -166,12 +164,12 @@ def test_create_urlhaus_session(mock_response, mock_db):
)
@patch.object(URLhaus, "make_urlhaus_request")
def test_parse_urlhaus_responses(
- mock_request, mock_db, ioc_type, mock_response, expected_result, ioc_value
+ mock_request, ioc_type, mock_response, expected_result, ioc_value
):
"""Test parsing responses from URLhaus for different IOC types."""
mock_request.return_value.status_code = 200
mock_request.return_value.text = json.dumps(mock_response)
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
parsing_functions = {
"url": urlhaus.parse_urlhaus_url_response,
"md5_hash": urlhaus.parse_urlhaus_md5_response,
@@ -277,7 +275,6 @@ def test_parse_urlhaus_responses(
@patch("modules.threat_intelligence.urlhaus.URLhaus.make_urlhaus_request")
def test_urlhaus_lookup(
mock_request,
- mock_db,
ioc,
type_of_ioc,
expected_result,
@@ -291,13 +288,15 @@ def test_urlhaus_lookup(
mock_response.status_code = mock_status_code
mock_response.text = json.dumps(mock_response_data)
mock_request.return_value = mock_response
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
result = urlhaus.urlhaus_lookup(ioc, type_of_ioc)
assert result == expected_result
@patch("modules.threat_intelligence.urlhaus.URLhaus.make_urlhaus_request")
-def test_urlhaus_lookup_json_decode_error(mock_request, mock_db):
+def test_urlhaus_lookup_json_decode_error(
+ mock_request,
+):
"""
Test the case when the response from the API is not valid JSON.
"""
@@ -306,7 +305,7 @@ def test_urlhaus_lookup_json_decode_error(mock_request, mock_db):
mock_response.text = "Invalid JSON"
mock_request.return_value = mock_response
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
result = urlhaus.urlhaus_lookup("https://example.com", "url")
assert result is None
@@ -328,8 +327,8 @@ def test_urlhaus_lookup_json_decode_error(mock_request, mock_db):
),
],
)
-def test_get_threat_level(mock_db, url_info, expected_threat_level):
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+def test_get_threat_level(url_info, expected_threat_level):
+ urlhaus = ModuleFactory().create_urlhaus_obj()
assert urlhaus.get_threat_level(url_info) == expected_threat_level
@@ -363,7 +362,8 @@ def test_get_threat_level(mock_db, url_info, expected_threat_level):
],
),
(
- # Testcase 2: Some fields missing in flow, threat level from VT NOT available
+ # Testcase 2: Some fields missing in flow, threat level from VT
+ # NOT available
{
"flow": {
"daddr": "8.8.8.8",
@@ -385,17 +385,17 @@ def test_get_threat_level(mock_db, url_info, expected_threat_level):
],
)
def test_set_evidence_malicious_hash(
- mock_db, file_info, expected_threat_level, expected_description_snippets
+ file_info, expected_threat_level, expected_description_snippets
):
"""
Test the `set_evidence_malicious_hash`.
"""
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
urlhaus.set_evidence_malicious_hash(file_info)
- assert mock_db.set_evidence.call_count == 2
+ assert urlhaus.db.set_evidence.call_count == 2
- for call_args in mock_db.set_evidence.call_args_list:
+ for call_args in urlhaus.db.set_evidence.call_args_list:
evidence = call_args[0][0]
assert evidence.threat_level == expected_threat_level
for snippet in expected_description_snippets:
@@ -429,13 +429,13 @@ def test_set_evidence_malicious_hash(
],
)
def test_set_evidence_malicious_url(
- mock_db, url_info, expected_threat_level, expected_description
+ url_info, expected_threat_level, expected_description
):
"""
Tests the set_evidence_malicious_url method
with different URL info inputs.
"""
- urlhaus = ModuleFactory().create_urlhaus_obj(mock_db)
+ urlhaus = ModuleFactory().create_urlhaus_obj()
daddr = "1.2.3.4"
uid = "1234"
timestamp = "2023-11-01 12:00:00"
@@ -446,8 +446,8 @@ def test_set_evidence_malicious_url(
daddr, url_info, uid, timestamp, profileid, twid
)
- assert mock_db.set_evidence.call_count == 2
- call_args_list = mock_db.set_evidence.call_args_list
+ assert urlhaus.db.set_evidence.call_count == 2
+ call_args_list = urlhaus.db.set_evidence.call_args_list
evidence_objects = [args[0][0] for args in call_args_list]
for evidence in evidence_objects:
diff --git a/tests/test_vertical_portscans.py b/tests/test_vertical_portscans.py
index 654b617d9..76f6fa2a9 100644
--- a/tests/test_vertical_portscans.py
+++ b/tests/test_vertical_portscans.py
@@ -10,12 +10,12 @@ def get_random_uid():
return base64.b64encode(binascii.b2a_hex(os.urandom(9))).decode("utf-8")
-def not_enough_dports_to_reach_the_threshold(mock_db):
+def not_enough_dports_to_reach_the_threshold():
"""
returns a dict with conns to dport that are not enough
to reach the minimum dports to trigger the first scan
"""
- module = ModuleFactory().create_vertical_portscan_obj(mock_db)
+ module = ModuleFactory().create_vertical_portscan_obj()
# get a random list of ints(ports) that are below the threshold
# Generate a random number between 0 and threshold
@@ -35,12 +35,12 @@ def not_enough_dports_to_reach_the_threshold(mock_db):
return res
-def enough_dports_to_reach_the_threshold(mock_db):
+def enough_dports_to_reach_the_threshold():
"""
returns conns to dport that are not enough
to reach the minimum dports to trigger the first scan
"""
- module = ModuleFactory().create_vertical_portscan_obj(mock_db)
+ module = ModuleFactory().create_vertical_portscan_obj()
# get a random list of ints(ports) that are below the threshold
# Generate a random number between 0 and threshold
@@ -60,14 +60,14 @@ def enough_dports_to_reach_the_threshold(mock_db):
return res
-def not_enough_dports_to_combine_1_evidence(mock_db):
+def not_enough_dports_to_combine_1_evidence():
"""
returns dports that are not enough to combine an evidence
any number of dports within the range threshold -> threshold +15 is ok
here, aka won't be enough
:param key:
"""
- module = ModuleFactory().create_vertical_portscan_obj(mock_db)
+ module = ModuleFactory().create_vertical_portscan_obj()
# get a random list of ints(ports) that are below the threshold
# Generate a random number between 0 and threshold
@@ -94,17 +94,15 @@ def not_enough_dports_to_combine_1_evidence(mock_db):
(enough_dports_to_reach_the_threshold, True),
],
)
-def test_min_dports_threshold(
- get_test_conns, expected_return_val: bool, mock_db
-):
- vertical_ps = ModuleFactory().create_vertical_portscan_obj(mock_db)
+def test_min_dports_threshold(get_test_conns, expected_return_val: bool):
+ vertical_ps = ModuleFactory().create_vertical_portscan_obj()
profileid = "profile_1.1.1.1"
timewindow = "timewindow0"
dstip = "8.8.8.8"
- conns: dict = get_test_conns(mock_db)
- mock_db.get_data_from_profile_tw.return_value = conns
+ conns: dict = get_test_conns()
+ vertical_ps.db.get_data_from_profile_tw.return_value = conns
cache_key = vertical_ps.get_twid_identifier(profileid, timewindow, dstip)
amount_of_dports = len(conns[dstip]["dstports"])
@@ -145,7 +143,7 @@ def test_check_if_enough_dports_to_trigger_an_evidence(
timewindow = "timewindow0"
dstip = "8.8.8.8"
- vertical_ps = ModuleFactory().create_vertical_portscan_obj(mock_db)
+ vertical_ps = ModuleFactory().create_vertical_portscan_obj()
key: str = vertical_ps.get_twid_identifier(profileid, timewindow, dstip)
vertical_ps.cached_thresholds_per_tw[key] = ports_reported_last_evidence
diff --git a/tests/test_virustotal.py b/tests/test_virustotal.py
index ff813f684..f925defbe 100644
--- a/tests/test_virustotal.py
+++ b/tests/test_virustotal.py
@@ -87,15 +87,15 @@ def get_allowed(quota):
@pytest.mark.dependency(name="sufficient_quota")
@pytest.mark.parametrize("ip", ["8.8.8.8"])
@valid_api_key
-def test_interpret_rsponse(ip, mock_db):
- virustotal = ModuleFactory().create_virustotal_obj(mock_db)
+def test_interpret_rsponse(ip):
+ virustotal = ModuleFactory().create_virustotal_obj()
response = virustotal.api_query_(ip)
for ratio in virustotal.interpret_response(response):
- assert type(ratio) == float
+ assert isinstance(ratio, float)
@pytest.mark.dependency(depends=["sufficient_quota"])
@valid_api_key
-def test_get_domain_vt_data(mock_db):
- virustotal = ModuleFactory().create_virustotal_obj(mock_db)
+def test_get_domain_vt_data():
+ virustotal = ModuleFactory().create_virustotal_obj()
assert virustotal.get_domain_vt_data("google.com") is not False
diff --git a/tests/test_whitelist.py b/tests/test_whitelist.py
index 60e369708..8041059d2 100644
--- a/tests/test_whitelist.py
+++ b/tests/test_whitelist.py
@@ -10,19 +10,22 @@
)
-def test_read_whitelist(mock_db):
+def test_read_whitelist():
"""
make sure the content of whitelists is read and stored properly
uses tests/test_whitelist.conf for testing
"""
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_whitelist.return_value = {}
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_whitelist.return_value = {}
assert whitelist.parser.parse()
@pytest.mark.parametrize("org,asn", [("google", "AS6432")])
-def test_load_org_asn(org, asn, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+def test_load_org_asn(
+ org,
+ asn,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
parsed_asn = whitelist.parser.load_org_asn(org)
assert parsed_asn is not False
assert asn in parsed_asn
@@ -32,11 +35,13 @@ def test_load_org_asn(org, asn, mock_db):
"slips_files.core.helpers.whitelist."
"whitelist_parser.WhitelistParser.load_org_ips"
)
-def test_load_org_ips(mock_load_org_ips, mock_db):
+def test_load_org_ips(
+ mock_load_org_ips,
+):
"""
Test load_org_IPs without modifying real files.
"""
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+ whitelist = ModuleFactory().create_whitelist_obj()
mock_load_org_ips.return_value = {
"34": ["34.64.0.0/10"],
"216": ["216.58.192.0/19"],
@@ -60,15 +65,20 @@ def test_load_org_ips(mock_load_org_ips, mock_db):
("arp", True),
],
)
-def test_is_ignored_flow_type(flow_type, expected_result, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+def test_is_ignored_flow_type(
+ flow_type,
+ expected_result,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
assert whitelist.match.is_ignored_flow_type(flow_type) == expected_result
-def test_get_src_domains_of_flow(mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_ip_info.return_value = {"SNI": [{"server_name": "sni.com"}]}
- mock_db.get_dns_resolution.return_value = {
+def test_get_src_domains_of_flow():
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_ip_info.return_value = {
+ "SNI": [{"server_name": "sni.com"}]
+ }
+ whitelist.db.get_dns_resolution.return_value = {
"domains": ["dns_resolution.com"]
}
flow = Mock()
@@ -87,8 +97,8 @@ def test_get_src_domains_of_flow(mock_db):
("dns", ["query.com"]),
],
)
-def test_get_dst_domains_of_flow(mock_db, flow_type, expected_result):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+def test_get_dst_domains_of_flow(flow_type, expected_result):
+ whitelist = ModuleFactory().create_whitelist_obj()
flow = Mock()
flow.type_ = flow_type
flow.server_name = "server_name"
@@ -110,9 +120,14 @@ def test_get_dst_domains_of_flow(mock_db, flow_type, expected_result):
("8.8.8.8", "google", {}, False), # no org ip info
],
)
-def test_is_ip_in_org(ip, org, org_ips, expected_result, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_org_IPs.return_value = org_ips
+def test_is_ip_in_org(
+ ip,
+ org,
+ org_ips,
+ expected_result,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_org_IPs.return_value = org_ips
result = whitelist.org_analyzer.is_ip_in_org(ip, org)
assert result == expected_result
@@ -130,9 +145,14 @@ def test_is_ip_in_org(ip, org, org_ips, expected_result, mock_db):
), # no org domain info
],
)
-def test_is_domain_in_org(domain, org, org_domains, expected_result, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_org_info.return_value = org_domains
+def test_is_domain_in_org(
+ domain,
+ org,
+ org_domains,
+ expected_result,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_org_info.return_value = org_domains
result = whitelist.org_analyzer.is_domain_in_org(domain, org)
assert result == expected_result
@@ -151,9 +171,9 @@ def test_is_domain_in_org(domain, org, org_domains, expected_result, mock_db):
],
)
def test_is_whitelisted_evidence(
- is_whitelisted_victim, is_whitelisted_attacker, expected_result, mock_db
+ is_whitelisted_victim, is_whitelisted_attacker, expected_result
):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+ whitelist = ModuleFactory().create_whitelist_obj()
whitelist.is_whitelisted_attacker = Mock()
whitelist.is_whitelisted_attacker.return_value = is_whitelisted_attacker
@@ -190,11 +210,10 @@ def test_profile_has_whitelisted_mac(
direction,
expected_result,
whitelisted_macs,
- mock_db,
):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_mac_addr_from_profile.return_value = mac_address
- mock_db.get_whitelist.return_value = whitelisted_macs
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_mac_addr_from_profile.return_value = mac_address
+ whitelist.db.get_whitelist.return_value = whitelisted_macs
assert (
whitelist.mac_analyzer.profile_has_whitelisted_mac(
profile_ip, direction, "both"
@@ -213,10 +232,8 @@ def test_profile_has_whitelisted_mac(
(Direction.DST, "dst", True),
],
)
-def test_matching_direction(
- direction, whitelist_direction, expected_result, mock_db
-):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+def test_matching_direction(direction, whitelist_direction, expected_result):
+ whitelist = ModuleFactory().create_whitelist_obj()
result = whitelist.match.direction(direction, whitelist_direction)
assert result == expected_result
@@ -250,14 +267,17 @@ def test_matching_direction(
),
],
)
-def test_is_part_of_a_whitelisted_org(ioc_data, expected_result, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_whitelist.return_value = {
+def test_is_part_of_a_whitelisted_org(
+ ioc_data,
+ expected_result,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_whitelist.return_value = {
"google": {"from": "both", "what_to_ignore": "both"}
}
- mock_db.get_org_info.return_value = json.dumps(["1.2.3.4/32"])
- mock_db.get_ip_info.return_value = {"asn": {"asnorg": "Google"}}
- mock_db.get_org_info.return_value = json.dumps(["example.com"])
+ whitelist.db.get_org_info.return_value = json.dumps(["1.2.3.4/32"])
+ whitelist.db.get_ip_info.return_value = {"asn": {"asnorg": "Google"}}
+ whitelist.db.get_org_info.return_value = json.dumps(["example.com"])
mock_ioc = MagicMock()
mock_ioc.value = ioc_data["value"]
@@ -307,10 +327,9 @@ def test_check_if_whitelisted_domains_of_flow(
src_domains,
whitelisted_domains,
expected_result,
- mock_db,
):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_whitelist.return_value = whitelisted_domains
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_whitelist.return_value = whitelisted_domains
whitelist.domain_analyzer.is_domain_in_tranco_list = Mock()
whitelist.domain_analyzer.is_domain_in_tranco_list.return_value = False
@@ -330,13 +349,13 @@ def test_check_if_whitelisted_domains_of_flow(
assert result == expected_result
-def test_is_whitelisted_domain_not_found(mock_db):
+def test_is_whitelisted_domain_not_found():
"""
Test when the domain is not found in the whitelisted domains.
"""
- mock_db.get_whitelist.return_value = {}
- mock_db.is_whitelisted_tranco_domain.return_value = False
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_whitelist.return_value = {}
+ whitelist.db.is_whitelisted_tranco_domain.return_value = False
domain = "nonwhitelisteddomain.com"
ignore_type = "flows"
assert not whitelist.domain_analyzer.is_whitelisted(
@@ -345,8 +364,10 @@ def test_is_whitelisted_domain_not_found(mock_db):
@patch("slips_files.common.parsers.config_parser.ConfigParser.whitelist_path")
-def test_read_configuration(mock_config_parser, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+def test_read_configuration(
+ mock_config_parser,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
mock_config_parser.return_value = "config_whitelist_path"
whitelist.parser.read_configuration()
assert whitelist.parser.whitelist_path == "config_whitelist_path"
@@ -363,11 +384,9 @@ def test_read_configuration(mock_config_parser, mock_db):
("invalid_ip", "both", False), # Invalid IP
],
)
-def test_ip_analyzer_is_whitelisted(
- ip, what_to_ignore, expected_result, mock_db
-):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_whitelist.return_value = {
+def test_ip_analyzer_is_whitelisted(ip, what_to_ignore, expected_result):
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_whitelist.return_value = {
"1.2.3.4": {"from": "both", "what_to_ignore": "both"}
}
assert (
@@ -387,9 +406,9 @@ def test_ip_analyzer_is_whitelisted(
],
)
def test_is_whitelisted_attacker_domain(
- is_whitelisted_domain, is_whitelisted_org, expected_result, mock_db
+ is_whitelisted_domain, is_whitelisted_org, expected_result
):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+ whitelist = ModuleFactory().create_whitelist_obj()
whitelist.domain_analyzer.is_whitelisted = Mock()
whitelist.domain_analyzer.is_whitelisted.return_value = (
@@ -401,7 +420,7 @@ def test_is_whitelisted_attacker_domain(
is_whitelisted_org
)
- mock_db.is_whitelisted_tranco_domain.return_value = False
+ whitelist.db.is_whitelisted_tranco_domain.return_value = False
evidence = Mock()
evidence.attacker = Attacker(
@@ -429,9 +448,8 @@ def test_is_whitelisted_victim(
is_whitelisted_mac,
is_whitelisted_org,
expected_result,
- mock_db,
):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+ whitelist = ModuleFactory().create_whitelist_obj()
whitelist.domain_analyzer.is_whitelisted = Mock()
whitelist.domain_analyzer.is_whitelisted.return_value = (
is_whitelisted_domain
@@ -448,7 +466,7 @@ def test_is_whitelisted_victim(
is_whitelisted_org
)
- mock_db.is_whitelisted_tranco_domain.return_value = False
+ whitelist.db.is_whitelisted_tranco_domain.return_value = False
evidence = Mock()
evidence.attacker = Victim(
@@ -466,16 +484,19 @@ def test_is_whitelisted_victim(
("microsoft", ["microsoft.com", "microsoft.net"]),
],
)
-def test_load_org_domains(org, expected_result, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.set_org_info = MagicMock()
+def test_load_org_domains(
+ org,
+ expected_result,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.set_org_info = MagicMock()
actual_result = whitelist.parser.load_org_domains(org)
for domain in expected_result:
assert domain in actual_result
assert len(actual_result) >= len(expected_result)
- mock_db.set_org_info.assert_called_with(
+ whitelist.db.set_org_info.assert_called_with(
org, json.dumps(actual_result), "domains"
)
@@ -488,12 +509,16 @@ def test_load_org_domains(org, expected_result, mock_db):
("malicious.com", Direction.SRC, False),
],
)
-def test_is_domain_whitelisted(domain, direction, expected_result, mock_db):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_whitelist.return_value = {
+def test_is_domain_whitelisted(
+ domain,
+ direction,
+ expected_result,
+):
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_whitelist.return_value = {
"example.com": {"from": "both", "what_to_ignore": "both"}
}
- mock_db.is_whitelisted_tranco_domain.return_value = False
+ whitelist.db.is_whitelisted_tranco_domain.return_value = False
for type_ in ("alerts", "flows"):
result = whitelist.domain_analyzer.is_whitelisted(
domain, direction, type_
@@ -543,11 +568,11 @@ def test_is_domain_whitelisted(domain, direction, expected_result, mock_db):
],
)
def test_is_ip_asn_in_org_asn(
- ip, org, org_asn_info, ip_asn_info, expected_result, mock_db
+ ip, org, org_asn_info, ip_asn_info, expected_result
):
- whitelist = ModuleFactory().create_whitelist_obj(mock_db)
- mock_db.get_org_info.return_value = org_asn_info
- mock_db.get_ip_info.return_value = ip_asn_info
+ whitelist = ModuleFactory().create_whitelist_obj()
+ whitelist.db.get_org_info.return_value = org_asn_info
+ whitelist.db.get_ip_info.return_value = ip_asn_info
assert (
whitelist.org_analyzer.is_ip_asn_in_org_asn(ip, org) == expected_result
)
@@ -616,10 +641,10 @@ def test_is_ip_asn_in_org_asn(
# ),
# ],
# )
-# def test_is_whitelisted_flow(mock_db, flow_data, whitelist_data, expected_result):
+# def test_is_whitelisted_flow( flow_data, whitelist_data, expected_result):
# """
# Test the is_whitelisted_flow method with various combinations of flow data and whitelist data.
# """
-# mock_db.get_all_whitelist.return_value = whitelist_data
-# whitelist = ModuleFactory().create_whitelist_obj(mock_db)
+# whitelist.db.get_all_whitelist.return_value = whitelist_data
+# whitelist = ModuleFactory().create_whitelist_obj()
# assert whitelist.is_whitelisted_flow(flow_data) == expected_result
diff --git a/zeek-scripts/__load__.zeek b/zeek-scripts/__load__.zeek
index 2241a8448..c142733a2 100644
--- a/zeek-scripts/__load__.zeek
+++ b/zeek-scripts/__load__.zeek
@@ -21,9 +21,6 @@ redef digest_salt = "Please change this value.";
# This script logs which scripts were loaded during each run.
@load misc/loaded-scripts
-# Apply the default tuning scripts for common tuning settings.
-@load tuning/defaults
-
# Estimate and log capture loss.
@load misc/capture-loss