mirror of
https://github.com/yyhuni/xingrin.git
synced 2026-01-31 19:53:11 +08:00
Compare commits
390 Commits
v1.0.30
...
001-websoc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b859fc9062 | ||
|
|
49b5fbef28 | ||
|
|
11112a68f6 | ||
|
|
9049b096ba | ||
|
|
ca6c0eb082 | ||
|
|
64bcd9a6f5 | ||
|
|
443e2172e4 | ||
|
|
c6dcfb0a5b | ||
|
|
25ae325c69 | ||
|
|
cab83d89cf | ||
|
|
0f8fff2dc4 | ||
|
|
6e48b97dc2 | ||
|
|
ed757d6e14 | ||
|
|
2aa1afbabf | ||
|
|
35ac64db57 | ||
|
|
b4bfab92e3 | ||
|
|
72210c42d0 | ||
|
|
91aaf7997f | ||
|
|
32e3179d58 | ||
|
|
487f7c84b5 | ||
|
|
b2cc83f569 | ||
|
|
f854cf09be | ||
|
|
7e1c2c187a | ||
|
|
4abb259ca0 | ||
|
|
bbef6af000 | ||
|
|
ba0864ed16 | ||
|
|
f54827829a | ||
|
|
170021130c | ||
|
|
b540f69152 | ||
|
|
d7f1e04855 | ||
|
|
68ad18e6da | ||
|
|
a7542d4a34 | ||
|
|
6f02d9f3c5 | ||
|
|
794846ca7a | ||
|
|
5eea7b2621 | ||
|
|
069527a7f1 | ||
|
|
e542633ad3 | ||
|
|
e8a9606d3b | ||
|
|
dc2e1e027d | ||
|
|
b1847faa3a | ||
|
|
e699842492 | ||
|
|
08a4807bef | ||
|
|
191ff9837b | ||
|
|
679dff9037 | ||
|
|
ce4330b628 | ||
|
|
4ce6b148f8 | ||
|
|
a89f775ee9 | ||
|
|
e3003f33f9 | ||
|
|
3760684b64 | ||
|
|
bfd7e11d09 | ||
|
|
f758feb0d0 | ||
|
|
8798eed337 | ||
|
|
bd1e25cfd5 | ||
|
|
d775055572 | ||
|
|
00dfad60b8 | ||
|
|
a5c48fe4d4 | ||
|
|
85c880731c | ||
|
|
c6b6507412 | ||
|
|
af457dc44c | ||
|
|
9e01a6aa5e | ||
|
|
ed80772e6f | ||
|
|
a22af21dcb | ||
|
|
8de950a7a5 | ||
|
|
9db84221e9 | ||
|
|
0728f3c01d | ||
|
|
4aa7b3d68a | ||
|
|
3946a53337 | ||
|
|
c94fe1ec4b | ||
|
|
6dea525527 | ||
|
|
5b0416972a | ||
|
|
5345a34cbd | ||
|
|
3ca56abc3e | ||
|
|
9703add22d | ||
|
|
f5a489e2d6 | ||
|
|
d75a3f6882 | ||
|
|
59e48e5b15 | ||
|
|
2d2ec93626 | ||
|
|
ced9f811f4 | ||
|
|
aa99b26f50 | ||
|
|
8342f196db | ||
|
|
1bd2a6ed88 | ||
|
|
033ff89aee | ||
|
|
4284a0cd9a | ||
|
|
943a4cb960 | ||
|
|
eb2d853b76 | ||
|
|
1184c18b74 | ||
|
|
8a6f1b6f24 | ||
|
|
255d505aba | ||
|
|
d06a9bab1f | ||
|
|
6d5c776bf7 | ||
|
|
bf058dd67b | ||
|
|
0532d7c8b8 | ||
|
|
2ee9b5ffa2 | ||
|
|
648a1888d4 | ||
|
|
2508268a45 | ||
|
|
c60383940c | ||
|
|
47298c294a | ||
|
|
eba394e14e | ||
|
|
592a1958c4 | ||
|
|
38e2856c08 | ||
|
|
f5ad8e68e9 | ||
|
|
d5f91a236c | ||
|
|
24ae8b5aeb | ||
|
|
86f43f94a0 | ||
|
|
53ba03d1e5 | ||
|
|
89c44ebd05 | ||
|
|
e0e3419edb | ||
|
|
52ee4684a7 | ||
|
|
ce8cebf11d | ||
|
|
ec006d8f54 | ||
|
|
48976a570f | ||
|
|
5da7229873 | ||
|
|
8bb737a9fa | ||
|
|
2d018d33f3 | ||
|
|
0c07cc8497 | ||
|
|
225b039985 | ||
|
|
d1624627bc | ||
|
|
7bb15e4ae4 | ||
|
|
8e8cc29669 | ||
|
|
d6d5338acb | ||
|
|
c521bdb511 | ||
|
|
abf2d95f6f | ||
|
|
ab58cf0d85 | ||
|
|
fb0111adf2 | ||
|
|
161ee9a2b1 | ||
|
|
0cf75585d5 | ||
|
|
1d8d5f51d9 | ||
|
|
3f8de07c8c | ||
|
|
cd5c2b9f11 | ||
|
|
54786c22dd | ||
|
|
d468f975ab | ||
|
|
a85a12b8ad | ||
|
|
a8b0d97b7b | ||
|
|
b8504921c2 | ||
|
|
ecfc1822fb | ||
|
|
81633642e6 | ||
|
|
d1ec9b7f27 | ||
|
|
2a3d9b4446 | ||
|
|
9b63203b5a | ||
|
|
6ff86e14ec | ||
|
|
4c1282e9bb | ||
|
|
ba3a9b709d | ||
|
|
283b28b46a | ||
|
|
1269e5a314 | ||
|
|
802e967906 | ||
|
|
e446326416 | ||
|
|
e0abb3ce7b | ||
|
|
d418baaf79 | ||
|
|
f8da408580 | ||
|
|
7cd4354d8f | ||
|
|
6bf35a760f | ||
|
|
be9ecadffb | ||
|
|
adb53c9f85 | ||
|
|
7b7bbed634 | ||
|
|
8dd3f0536e | ||
|
|
8a8062a12d | ||
|
|
55908a2da5 | ||
|
|
22a7d4f091 | ||
|
|
f287f18134 | ||
|
|
de27230b7a | ||
|
|
15a6295189 | ||
|
|
674acdac66 | ||
|
|
c59152bedf | ||
|
|
b4037202dc | ||
|
|
4b4f9862bf | ||
|
|
1c42e4978f | ||
|
|
57bab63997 | ||
|
|
b1f0f18ac0 | ||
|
|
ccee5471b8 | ||
|
|
0ccd362535 | ||
|
|
7f2af7f7e2 | ||
|
|
4bd0f9e8c1 | ||
|
|
68cc996e3b | ||
|
|
f1e79d638e | ||
|
|
d484133e4c | ||
|
|
fc977ae029 | ||
|
|
f328474404 | ||
|
|
68e726a066 | ||
|
|
77a6f45909 | ||
|
|
49d1f1f1bb | ||
|
|
db8ecb1644 | ||
|
|
18cc016268 | ||
|
|
23bc463283 | ||
|
|
7b903b91b2 | ||
|
|
b3136d51b9 | ||
|
|
08372588a4 | ||
|
|
236c828041 | ||
|
|
fb13bb74d8 | ||
|
|
f076c682b6 | ||
|
|
9eda2caceb | ||
|
|
b1c9e202dd | ||
|
|
918669bc29 | ||
|
|
fd70b0544d | ||
|
|
0f2df7a5f3 | ||
|
|
857ab737b5 | ||
|
|
ee2d99edda | ||
|
|
db6ce16aca | ||
|
|
ab800eca06 | ||
|
|
e8e5572339 | ||
|
|
d48d4bbcad | ||
|
|
d1cca4c083 | ||
|
|
df0810c863 | ||
|
|
d33e54c440 | ||
|
|
35a306fe8b | ||
|
|
724df82931 | ||
|
|
8dfffdf802 | ||
|
|
b8cb85ce0b | ||
|
|
da96d437a4 | ||
|
|
feaf8062e5 | ||
|
|
4bab76f233 | ||
|
|
09416b4615 | ||
|
|
bc1c5f6b0e | ||
|
|
2f2742e6fe | ||
|
|
be3c346a74 | ||
|
|
0c7a6fff12 | ||
|
|
3b4f0e3147 | ||
|
|
51212a2a0c | ||
|
|
58533bbaf6 | ||
|
|
6ccca1602d | ||
|
|
6389b0f672 | ||
|
|
d7599b8599 | ||
|
|
8eff298293 | ||
|
|
3634101c5b | ||
|
|
163973a7df | ||
|
|
80ffecba3e | ||
|
|
3c21ac940c | ||
|
|
5c9f484d70 | ||
|
|
7567f6c25b | ||
|
|
0599a0b298 | ||
|
|
f7557fe90c | ||
|
|
13571b9772 | ||
|
|
8ee76eef69 | ||
|
|
2a31e29aa2 | ||
|
|
81abc59961 | ||
|
|
ffbfec6dd5 | ||
|
|
a0091636a8 | ||
|
|
69490ab396 | ||
|
|
7306964abf | ||
|
|
cb6b0259e3 | ||
|
|
e1b4618e58 | ||
|
|
556dcf5f62 | ||
|
|
0628eef025 | ||
|
|
38ed8bc642 | ||
|
|
2f4d6a2168 | ||
|
|
c25cb9e06b | ||
|
|
b14ab71c7f | ||
|
|
8b5060e2d3 | ||
|
|
3c9335febf | ||
|
|
1b95e4f2c3 | ||
|
|
d20a600afc | ||
|
|
c29b11fd37 | ||
|
|
6caf707072 | ||
|
|
2627b1fc40 | ||
|
|
ec6712b9b4 | ||
|
|
9d5e4d5408 | ||
|
|
c5d5b24c8f | ||
|
|
671cb56b62 | ||
|
|
51025f69a8 | ||
|
|
b2403b29c4 | ||
|
|
18ef01a47b | ||
|
|
0bf8108fb3 | ||
|
|
837ad19131 | ||
|
|
d7de9a7129 | ||
|
|
22b4e51b42 | ||
|
|
d03628ee45 | ||
|
|
0baabe0753 | ||
|
|
e1191d7abf | ||
|
|
82a2e9a0e7 | ||
|
|
1ccd1bc338 | ||
|
|
b4d42f5372 | ||
|
|
2c66450756 | ||
|
|
119d82dc89 | ||
|
|
fba7f7c508 | ||
|
|
99d384ce29 | ||
|
|
07f36718ab | ||
|
|
7e3f69c208 | ||
|
|
5f90473c3c | ||
|
|
e2a815b96a | ||
|
|
f86a1a9d47 | ||
|
|
d5945679aa | ||
|
|
51e2c51748 | ||
|
|
e2cbf98dda | ||
|
|
cd72bdf7c3 | ||
|
|
35abcf7e39 | ||
|
|
09f2d343a4 | ||
|
|
54d1f86bde | ||
|
|
a3997c9676 | ||
|
|
c90a55f85e | ||
|
|
2eab88b452 | ||
|
|
1baf0eb5e1 | ||
|
|
b61e73f7be | ||
|
|
e896734dfc | ||
|
|
cd83f52f35 | ||
|
|
3e29554c36 | ||
|
|
18e02b536e | ||
|
|
4c1c6f70ab | ||
|
|
a72e7675f5 | ||
|
|
93c2163764 | ||
|
|
de72c91561 | ||
|
|
3e6d060b75 | ||
|
|
766f045904 | ||
|
|
8acfe1cc33 | ||
|
|
7aec3eabb2 | ||
|
|
b1f11c36a4 | ||
|
|
d97fb5245a | ||
|
|
ddf9a1f5a4 | ||
|
|
47f9f96a4b | ||
|
|
6f43e73162 | ||
|
|
9b7d496f3e | ||
|
|
6390849d52 | ||
|
|
7a6d2054f6 | ||
|
|
73ebaab232 | ||
|
|
11899b29c2 | ||
|
|
877d2a56d1 | ||
|
|
dc1e94f038 | ||
|
|
9c3833d13d | ||
|
|
92f3b722ef | ||
|
|
9ef503c666 | ||
|
|
c3a43e94fa | ||
|
|
d6d94355fb | ||
|
|
bc638eabf4 | ||
|
|
5acaada7ab | ||
|
|
aaad3f29cf | ||
|
|
f13eb2d9b2 | ||
|
|
f1b3b60382 | ||
|
|
e249056289 | ||
|
|
dba195b83a | ||
|
|
9b494e6c67 | ||
|
|
2841157747 | ||
|
|
f6c1fef1a6 | ||
|
|
6ec0adf9dd | ||
|
|
22c6661567 | ||
|
|
d9ed004e35 | ||
|
|
a0d9d1f29d | ||
|
|
8aa9ed2a97 | ||
|
|
8baf29d1c3 | ||
|
|
248e48353a | ||
|
|
0d210be50b | ||
|
|
f7c0d0b215 | ||
|
|
d83428f27b | ||
|
|
45a09b8173 | ||
|
|
11dfdee6fd | ||
|
|
e53a884d13 | ||
|
|
3b318c89e3 | ||
|
|
e564bc116a | ||
|
|
410c543066 | ||
|
|
66da140801 | ||
|
|
e60aac3622 | ||
|
|
14aaa71cb1 | ||
|
|
0309dba510 | ||
|
|
967ff8a69f | ||
|
|
9ac23d50b6 | ||
|
|
265525c61e | ||
|
|
1b9d05ce62 | ||
|
|
737980b30f | ||
|
|
494ee81478 | ||
|
|
452686b282 | ||
|
|
c95c68f4e9 | ||
|
|
b02f38606d | ||
|
|
b543f3d2b7 | ||
|
|
a18fb46906 | ||
|
|
bb74f61ea2 | ||
|
|
654f3664f8 | ||
|
|
30defe08d2 | ||
|
|
41266bd931 | ||
|
|
9eebd0a47c | ||
|
|
e7f4d25e58 | ||
|
|
56cc810783 | ||
|
|
efe20bbf69 | ||
|
|
d88cf19a68 | ||
|
|
8e74f842f0 | ||
|
|
5e9773a183 | ||
|
|
a952ef5b6b | ||
|
|
815c409a9e | ||
|
|
7ca85b8d7d | ||
|
|
73291e6c4c | ||
|
|
dcafe03ea2 | ||
|
|
0390e05397 | ||
|
|
088b69b61a | ||
|
|
de34567b53 | ||
|
|
bf40532ce4 | ||
|
|
252759c822 | ||
|
|
2d43204639 | ||
|
|
7715d0cf01 | ||
|
|
6d0d87d8ef | ||
|
|
25074f9191 | ||
|
|
b06f33db5b | ||
|
|
a116755087 | ||
|
|
cddc4c244d | ||
|
|
9e7089a8c2 |
45
.github/workflows/check-generated-files.yml
vendored
Normal file
45
.github/workflows/check-generated-files.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: Check Generated Files
|
||||
|
||||
on:
|
||||
workflow_call: # 只在被其他 workflow 调用时运行
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.21
|
||||
|
||||
- name: Generate files for all workflows
|
||||
working-directory: worker
|
||||
run: make generate
|
||||
|
||||
- name: Check for differences
|
||||
run: |
|
||||
if ! git diff --exit-code; then
|
||||
echo "❌ Generated files are out of date!"
|
||||
echo "Please run: cd worker && make generate"
|
||||
echo ""
|
||||
echo "Changed files:"
|
||||
git status --porcelain
|
||||
echo ""
|
||||
echo "Diff:"
|
||||
git diff
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Generated files are up to date"
|
||||
|
||||
- name: Run metadata consistency tests
|
||||
working-directory: worker
|
||||
run: make test-metadata
|
||||
|
||||
- name: Run all tests
|
||||
working-directory: worker
|
||||
run: make test
|
||||
138
.github/workflows/docker-build.yml
vendored
138
.github/workflows/docker-build.yml
vendored
@@ -1,138 +0,0 @@
|
||||
name: Build and Push Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*' # 只在推送 v 开头的 tag 时触发(如 v1.0.0)
|
||||
workflow_dispatch: # 手动触发
|
||||
|
||||
# 并发控制:同一分支只保留最新的构建,取消之前正在运行的
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_PREFIX: yyhuni
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- image: xingrin-server
|
||||
dockerfile: docker/server/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- image: xingrin-frontend
|
||||
dockerfile: docker/frontend/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64 # ARM64 构建时 Next.js 在 QEMU 下会崩溃
|
||||
- image: xingrin-worker
|
||||
dockerfile: docker/worker/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- image: xingrin-nginx
|
||||
dockerfile: docker/nginx/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- image: xingrin-agent
|
||||
dockerfile: docker/agent/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Free disk space (for large builds like worker)
|
||||
run: |
|
||||
echo "=== Before cleanup ==="
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
||||
sudo docker image prune -af
|
||||
echo "=== After cleanup ==="
|
||||
df -h
|
||||
|
||||
- name: Generate SSL certificates for nginx build
|
||||
if: matrix.image == 'xingrin-nginx'
|
||||
run: |
|
||||
mkdir -p docker/nginx/ssl
|
||||
openssl req -x509 -nodes -days 365 -newkey rsa:2048 \
|
||||
-keyout docker/nginx/ssl/privkey.pem \
|
||||
-out docker/nginx/ssl/fullchain.pem \
|
||||
-subj "/CN=localhost"
|
||||
echo "SSL certificates generated for CI build"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Get version from git tag
|
||||
id: version
|
||||
run: |
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
echo "IS_RELEASE=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "VERSION=dev-$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||
echo "IS_RELEASE=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.IMAGE_PREFIX }}/${{ matrix.image }}:${{ steps.version.outputs.VERSION }}
|
||||
${{ steps.version.outputs.IS_RELEASE == 'true' && format('{0}/{1}:latest', env.IMAGE_PREFIX, matrix.image) || '' }}
|
||||
build-args: |
|
||||
IMAGE_TAG=${{ steps.version.outputs.VERSION }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: false
|
||||
sbom: false
|
||||
|
||||
# 所有镜像构建成功后,更新 VERSION 文件
|
||||
update-version:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Update VERSION file
|
||||
run: |
|
||||
VERSION="${GITHUB_REF#refs/tags/}"
|
||||
echo "$VERSION" > VERSION
|
||||
echo "Updated VERSION to $VERSION"
|
||||
|
||||
- name: Commit and push
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add VERSION
|
||||
git diff --staged --quiet || git commit -m "chore: bump version to ${GITHUB_REF#refs/tags/}"
|
||||
git push
|
||||
156
.gitignore
vendored
156
.gitignore
vendored
@@ -1,133 +1,51 @@
|
||||
# ============================
|
||||
# 操作系统相关文件
|
||||
# ============================
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
._*
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
ehthumbs.db
|
||||
Thumbs.db
|
||||
# Go
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
*.test
|
||||
*.out
|
||||
vendor/
|
||||
go.work
|
||||
|
||||
# ============================
|
||||
# 前端 (Next.js/Node.js) 相关
|
||||
# ============================
|
||||
# 依赖目录
|
||||
front-back/node_modules/
|
||||
front-back/.pnpm-store/
|
||||
# Build artifacts
|
||||
dist/
|
||||
build/
|
||||
bin/
|
||||
|
||||
# Next.js 构建产物
|
||||
front-back/.next/
|
||||
front-back/out/
|
||||
front-back/dist/
|
||||
|
||||
# 环境变量文件
|
||||
front-back/.env
|
||||
front-back/.env.local
|
||||
front-back/.env.development.local
|
||||
front-back/.env.test.local
|
||||
front-back/.env.production.local
|
||||
|
||||
# 运行时和缓存
|
||||
front-back/.turbo/
|
||||
front-back/.swc/
|
||||
front-back/.eslintcache
|
||||
front-back/.tsbuildinfo
|
||||
|
||||
# ============================
|
||||
# 后端 (Python/Django) 相关
|
||||
# ============================
|
||||
# Python 虚拟环境
|
||||
.venv/
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
|
||||
# Python 编译文件
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# Django 相关
|
||||
backend/db.sqlite3
|
||||
backend/db.sqlite3-journal
|
||||
backend/media/
|
||||
backend/staticfiles/
|
||||
backend/.env
|
||||
backend/.env.local
|
||||
|
||||
# Python 测试和覆盖率
|
||||
.pytest_cache/
|
||||
.coverage
|
||||
htmlcov/
|
||||
*.cover
|
||||
|
||||
# ============================
|
||||
# 后端 (Go) 相关
|
||||
# ============================
|
||||
# 编译产物
|
||||
backend/bin/
|
||||
backend/dist/
|
||||
backend/*.exe
|
||||
backend/*.exe~
|
||||
backend/*.dll
|
||||
backend/*.so
|
||||
backend/*.dylib
|
||||
|
||||
# 测试相关
|
||||
backend/*.test
|
||||
backend/*.out
|
||||
backend/*.prof
|
||||
|
||||
# Go workspace 文件
|
||||
backend/go.work
|
||||
backend/go.work.sum
|
||||
|
||||
# Go 依赖管理
|
||||
backend/vendor/
|
||||
|
||||
# ============================
|
||||
# IDE 和编辑器相关
|
||||
# ============================
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
.cursor/
|
||||
.claude/
|
||||
.playwright-mcp/
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
.DS_Store
|
||||
|
||||
# ============================
|
||||
# Docker 相关
|
||||
# ============================
|
||||
docker/.env
|
||||
docker/.env.local
|
||||
|
||||
# SSL 证书和私钥(不应提交)
|
||||
docker/nginx/ssl/*.pem
|
||||
docker/nginx/ssl/*.key
|
||||
docker/nginx/ssl/*.crt
|
||||
|
||||
# ============================
|
||||
# 日志文件和扫描结果
|
||||
# ============================
|
||||
# Environment
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
*.log
|
||||
logs/
|
||||
results/
|
||||
.venv/
|
||||
|
||||
# 开发脚本运行时文件(进程 ID 和启动日志)
|
||||
backend/scripts/dev/.pids/
|
||||
# Testing
|
||||
coverage.txt
|
||||
*.coverprofile
|
||||
.hypothesis/
|
||||
|
||||
# ============================
|
||||
# 临时文件
|
||||
# ============================
|
||||
# Temporary files
|
||||
*.tmp
|
||||
tmp/
|
||||
temp/
|
||||
.cache/
|
||||
|
||||
HGETALL
|
||||
KEYS
|
||||
.kiro/
|
||||
.claude/
|
||||
.specify/
|
||||
|
||||
# AI Assistant directories
|
||||
codex/
|
||||
openspec/
|
||||
specs/
|
||||
AGENTS.md
|
||||
WARP.md
|
||||
|
||||
4
.vscode/settings.json
vendored
Normal file
4
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"typescript.autoClosingTags": false,
|
||||
"kiroAgent.configureMCP": "Enabled"
|
||||
}
|
||||
276
README.md
276
README.md
@@ -1,276 +0,0 @@
|
||||
<h1 align="center">XingRin - 星环</h1>
|
||||
|
||||
<p align="center">
|
||||
<b>🛡️ 攻击面管理平台 (ASM) | 自动化资产发现与漏洞扫描系统</b>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/yyhuni/xingrin/stargazers"><img src="https://img.shields.io/github/stars/yyhuni/xingrin?style=flat-square&logo=github" alt="GitHub stars"></a>
|
||||
<a href="https://github.com/yyhuni/xingrin/network/members"><img src="https://img.shields.io/github/forks/yyhuni/xingrin?style=flat-square&logo=github" alt="GitHub forks"></a>
|
||||
<a href="https://github.com/yyhuni/xingrin/issues"><img src="https://img.shields.io/github/issues/yyhuni/xingrin?style=flat-square&logo=github" alt="GitHub issues"></a>
|
||||
<a href="https://github.com/yyhuni/xingrin/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-PolyForm%20NC-blue?style=flat-square" alt="License"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="#-功能特性">功能特性</a> •
|
||||
<a href="#-快速开始">快速开始</a> •
|
||||
<a href="#-文档">文档</a> •
|
||||
<a href="#-技术栈">技术栈</a> •
|
||||
<a href="#-反馈与贡献">反馈与贡献</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<sub>🔍 关键词: ASM | 攻击面管理 | 漏洞扫描 | 资产发现 | Bug Bounty | 渗透测试 | Nuclei | 子域名枚举 | EASM</sub>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<b>🌗 明暗模式切换</b>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/screenshots/light.png" alt="Light Mode" width="49%">
|
||||
<img src="docs/screenshots/dark.png" alt="Dark Mode" width="49%">
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<b>🎨 多种 UI 主题</b>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/screenshots/bubblegum.png" alt="Bubblegum" width="32%">
|
||||
<img src="docs/screenshots/cosmic-night.png" alt="Cosmic Night" width="32%">
|
||||
<img src="docs/screenshots/quantum-rose.png" alt="Quantum Rose" width="32%">
|
||||
</p>
|
||||
|
||||
## 📚 文档
|
||||
|
||||
- [📖 技术文档](./docs/README.md) - 技术文档导航(🚧 持续完善中)
|
||||
- [🚀 快速开始](./docs/quick-start.md) - 一键安装和部署指南
|
||||
- [🔄 版本管理](./docs/version-management.md) - Git Tag 驱动的自动化版本管理系统
|
||||
- [📦 Nuclei 模板架构](./docs/nuclei-template-architecture.md) - 模板仓库的存储与同步
|
||||
- [📖 字典文件架构](./docs/wordlist-architecture.md) - 字典文件的存储与同步
|
||||
- [🔍 扫描流程架构](./docs/scan-flow-architecture.md) - 完整扫描流程与工具编排
|
||||
|
||||
|
||||
---
|
||||
|
||||
## ✨ 功能特性
|
||||
|
||||
### 🎯 目标与资产管理
|
||||
- **组织管理** - 多层级目标组织,灵活分组
|
||||
- **目标管理** - 支持域名、IP目标类型
|
||||
- **资产发现** - 子域名、网站、端点、目录自动发现
|
||||
- **资产快照** - 扫描结果快照对比,追踪资产变化
|
||||
|
||||
### 🔍 漏洞扫描
|
||||
- **多引擎支持** - 集成 Nuclei 等主流扫描引擎
|
||||
- **自定义流程** - YAML 配置扫描流程,灵活编排
|
||||
- **定时扫描** - Cron 表达式配置,自动化周期扫描
|
||||
|
||||
#### 扫描流程架构
|
||||
|
||||
完整的扫描流程包括:子域名发现、端口扫描、站点发现、URL 收集、目录扫描、漏洞扫描等阶段
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
START["开始扫描"]
|
||||
|
||||
subgraph STAGE1["阶段 1: 资产发现"]
|
||||
direction TB
|
||||
SUB["子域名发现<br/>subfinder, amass, puredns"]
|
||||
PORT["端口扫描<br/>naabu"]
|
||||
SITE["站点识别<br/>httpx"]
|
||||
SUB --> PORT --> SITE
|
||||
end
|
||||
|
||||
subgraph STAGE2["阶段 2: 深度分析"]
|
||||
direction TB
|
||||
URL["URL 收集<br/>waymore, katana"]
|
||||
DIR["目录扫描<br/>ffuf"]
|
||||
end
|
||||
|
||||
subgraph STAGE3["阶段 3: 漏洞检测"]
|
||||
VULN["漏洞扫描<br/>nuclei, dalfox"]
|
||||
end
|
||||
|
||||
FINISH["扫描完成"]
|
||||
|
||||
START --> STAGE1
|
||||
SITE --> STAGE2
|
||||
STAGE2 --> STAGE3
|
||||
STAGE3 --> FINISH
|
||||
|
||||
style START fill:#34495e,stroke:#2c3e50,stroke-width:2px,color:#fff
|
||||
style FINISH fill:#27ae60,stroke:#229954,stroke-width:2px,color:#fff
|
||||
style STAGE1 fill:#3498db,stroke:#2980b9,stroke-width:2px,color:#fff
|
||||
style STAGE2 fill:#9b59b6,stroke:#8e44ad,stroke-width:2px,color:#fff
|
||||
style STAGE3 fill:#e67e22,stroke:#d35400,stroke-width:2px,color:#fff
|
||||
style SUB fill:#5dade2,stroke:#3498db,stroke-width:1px,color:#fff
|
||||
style PORT fill:#5dade2,stroke:#3498db,stroke-width:1px,color:#fff
|
||||
style SITE fill:#5dade2,stroke:#3498db,stroke-width:1px,color:#fff
|
||||
style URL fill:#bb8fce,stroke:#9b59b6,stroke-width:1px,color:#fff
|
||||
style DIR fill:#bb8fce,stroke:#9b59b6,stroke-width:1px,color:#fff
|
||||
style VULN fill:#f0b27a,stroke:#e67e22,stroke-width:1px,color:#fff
|
||||
```
|
||||
|
||||
详细说明请查看 [扫描流程架构文档](./docs/scan-flow-architecture.md)
|
||||
|
||||
### 🖥️ 分布式架构
|
||||
- **多节点扫描** - 支持部署多个 Worker 节点,横向扩展扫描能力
|
||||
- **本地节点** - 零配置,安装即自动注册本地 Docker Worker
|
||||
- **远程节点** - SSH 一键部署远程 VPS 作为扫描节点
|
||||
- **负载感知调度** - 实时感知节点负载,自动分发任务到最优节点
|
||||
- **节点监控** - 实时心跳检测,CPU/内存/磁盘状态监控
|
||||
- **断线重连** - 节点离线自动检测,恢复后自动重新接入
|
||||
|
||||
```mermaid
|
||||
flowchart TB
|
||||
subgraph MASTER["主服务器 (Master Server)"]
|
||||
direction TB
|
||||
|
||||
REDIS["Redis 负载缓存"]
|
||||
|
||||
subgraph SCHEDULER["任务调度器 (Task Distributor)"]
|
||||
direction TB
|
||||
SUBMIT["接收扫描任务"]
|
||||
SELECT["负载感知选择"]
|
||||
DISPATCH["智能分发"]
|
||||
|
||||
SUBMIT --> SELECT
|
||||
SELECT --> DISPATCH
|
||||
end
|
||||
|
||||
REDIS -.负载数据.-> SELECT
|
||||
end
|
||||
|
||||
subgraph WORKERS["Worker 节点集群"]
|
||||
direction TB
|
||||
|
||||
W1["Worker 1 (本地)<br/>CPU: 45% | MEM: 60%"]
|
||||
W2["Worker 2 (远程)<br/>CPU: 30% | MEM: 40%"]
|
||||
W3["Worker N (远程)<br/>CPU: 90% | MEM: 85%"]
|
||||
end
|
||||
|
||||
DISPATCH -->|任务分发| W1
|
||||
DISPATCH -->|任务分发| W2
|
||||
DISPATCH -->|高负载跳过| W3
|
||||
|
||||
W1 -.心跳上报.-> REDIS
|
||||
W2 -.心跳上报.-> REDIS
|
||||
W3 -.心跳上报.-> REDIS
|
||||
```
|
||||
|
||||
### 📊 可视化界面
|
||||
- **数据统计** - 资产/漏洞统计仪表盘
|
||||
- **实时通知** - WebSocket 消息推送
|
||||
- **暗色主题** - 支持明暗主题切换
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ 技术栈
|
||||
|
||||
- **前端**: Next.js + React + TailwindCSS
|
||||
- **后端**: Django + Django REST Framework
|
||||
- **数据库**: PostgreSQL + Redis
|
||||
- **部署**: Docker + Nginx
|
||||
|
||||
## 📦 快速开始
|
||||
|
||||
### 环境要求
|
||||
|
||||
- **操作系统**: Ubuntu 20.04+ / Debian 11+ (推荐)
|
||||
- **硬件**: 2核 4G 内存起步,20GB+ 磁盘空间
|
||||
|
||||
### 一键安装
|
||||
|
||||
```bash
|
||||
# 克隆项目
|
||||
git clone https://github.com/yyhuni/xingrin.git
|
||||
cd xingrin
|
||||
|
||||
# 安装并启动(生产模式)
|
||||
sudo ./install.sh
|
||||
```
|
||||
|
||||
### 访问服务
|
||||
|
||||
- **Web 界面**: `https://localhost`
|
||||
|
||||
### 常用命令
|
||||
|
||||
```bash
|
||||
# 启动服务
|
||||
sudo ./start.sh
|
||||
|
||||
# 停止服务
|
||||
sudo ./stop.sh
|
||||
|
||||
# 重启服务
|
||||
sudo ./restart.sh
|
||||
|
||||
# 卸载
|
||||
sudo ./uninstall.sh
|
||||
|
||||
# 更新
|
||||
sudo ./update.sh
|
||||
```
|
||||
|
||||
## 🤝 反馈与贡献
|
||||
|
||||
- 🐛 **如果发现 Bug** 可以点击右边链接进行提交 [Issue](https://github.com/yyhuni/xingrin/issues)
|
||||
- 💡 **有新想法,比如UI设计,功能设计等** 欢迎点击右边链接进行提交建议 [Issue](https://github.com/yyhuni/xingrin/issues)
|
||||
- 🔧 **想参与开发?** 关注我公众号与我个人联系
|
||||
|
||||
## 📧 联系
|
||||
- 目前版本就我个人使用,可能会有很多边界问题
|
||||
- 如有问题,建议,其他,优先提交[Issue](https://github.com/yyhuni/xingrin/issues),也可以直接给我的公众号发消息,我都会回复的
|
||||
|
||||
- 微信公众号: **洋洋的小黑屋**
|
||||
|
||||
<img src="docs/wechat-qrcode.png" alt="微信公众号" width="200">
|
||||
|
||||
|
||||
## ⚠️ 免责声明
|
||||
|
||||
**重要:请在使用前仔细阅读**
|
||||
|
||||
1. 本工具仅供**授权的安全测试**和**安全研究**使用
|
||||
2. 使用者必须确保已获得目标系统的**合法授权**
|
||||
3. **严禁**将本工具用于未经授权的渗透测试或攻击行为
|
||||
4. 未经授权扫描他人系统属于**违法行为**,可能面临法律责任
|
||||
5. 开发者**不对任何滥用行为负责**
|
||||
|
||||
使用本工具即表示您同意:
|
||||
- 仅在合法授权范围内使用
|
||||
- 遵守所在地区的法律法规
|
||||
- 承担因滥用产生的一切后果
|
||||
|
||||
## 🌟 Star History
|
||||
|
||||
如果这个项目对你有帮助,请给一个 ⭐ Star 支持一下!
|
||||
|
||||
[](https://star-history.com/#yyhuni/xingrin&Date)
|
||||
|
||||
## 📄 许可证
|
||||
|
||||
本项目采用 [GNU General Public License v3.0](LICENSE) 许可证。
|
||||
|
||||
### 允许的用途
|
||||
|
||||
- ✅ 个人学习和研究
|
||||
- ✅ 商业和非商业使用
|
||||
- ✅ 修改和分发
|
||||
- ✅ 专利使用
|
||||
- ✅ 私人使用
|
||||
|
||||
### 义务和限制
|
||||
|
||||
- 📋 **开源义务**:分发时必须提供源代码
|
||||
- 📋 **相同许可**:衍生作品必须使用相同许可证
|
||||
- 📋 **版权声明**:必须保留原始版权和许可证声明
|
||||
- ❌ **责任免除**:不提供任何担保
|
||||
- ❌ 未经授权的渗透测试
|
||||
- ❌ 任何违法行为
|
||||
|
||||
32
agent/go.mod
Normal file
32
agent/go.mod
Normal file
@@ -0,0 +1,32 @@
|
||||
module github.com/yyhuni/orbit/agent
|
||||
|
||||
go 1.24.5
|
||||
|
||||
require (
|
||||
github.com/docker/docker v28.5.2+incompatible
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/shirou/gopsutil/v3 v3.24.5
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/containerd/errdefs v1.0.0 // indirect
|
||||
github.com/containerd/errdefs/pkg v0.3.0 // indirect
|
||||
github.com/distribution/reference v0.6.0 // indirect
|
||||
github.com/docker/go-connections v0.6.0 // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0 // indirect
|
||||
go.opentelemetry.io/otel v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.39.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
)
|
||||
78
agent/go.sum
Normal file
78
agent/go.sum
Normal file
@@ -0,0 +1,78 @@
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
||||
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
|
||||
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
|
||||
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v28.5.2+incompatible h1:DBX0Y0zAjZbSrm1uzOkdr1onVghKaftjlSWt4AFexzM=
|
||||
github.com/docker/docker v28.5.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
|
||||
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
|
||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
||||
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/shirou/gopsutil/v3 v3.23.12/go.mod h1:1FrWgea594Jp7qmjHUUPlJDTPgcsb9mGnXDxavtikzM=
|
||||
github.com/shirou/gopsutil/v3 v3.24.5 h1:i0t8kL+kQTvpAYToeuiVk3TgDeKOFioZO3Ztz/iZ9pI=
|
||||
github.com/shirou/gopsutil/v3 v3.24.5/go.mod h1:bsoOS1aStSs9ErQ1WWfxllSeS1K5D+U30r2NfcubMVk=
|
||||
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
|
||||
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||
github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0 h1:ssfIgGNANqpVFCndZvcuyKbl0g+UAVcbBcqGkG28H0Y=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0/go.mod h1:GQ/474YrbE4Jx8gZ4q5I4hrhUzM6UPzyrqJYV2AqPoQ=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
|
||||
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
|
||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
@@ -1,249 +0,0 @@
|
||||
"""
|
||||
Django ORM 实现的 Directory Repository
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Tuple, Dict, Iterator
|
||||
from django.db import transaction, IntegrityError, OperationalError, DatabaseError
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.asset.models.asset_models import Directory
|
||||
from apps.asset.dtos import DirectoryDTO
|
||||
from apps.common.decorators import auto_ensure_db_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@auto_ensure_db_connection
|
||||
class DjangoDirectoryRepository:
|
||||
"""Django ORM 实现的 Directory Repository"""
|
||||
|
||||
def bulk_create_ignore_conflicts(self, items: List[DirectoryDTO]) -> int:
|
||||
"""
|
||||
批量创建 Directory,忽略冲突
|
||||
|
||||
Args:
|
||||
items: Directory DTO 列表
|
||||
|
||||
Returns:
|
||||
int: 实际创建的记录数
|
||||
|
||||
Raises:
|
||||
IntegrityError: 数据完整性错误
|
||||
OperationalError: 数据库操作错误
|
||||
DatabaseError: 数据库错误
|
||||
"""
|
||||
if not items:
|
||||
return 0
|
||||
|
||||
try:
|
||||
# 转换为 Django 模型对象
|
||||
directory_objects = [
|
||||
Directory(
|
||||
website_id=item.website_id,
|
||||
target_id=item.target_id,
|
||||
url=item.url,
|
||||
status=item.status,
|
||||
content_length=item.content_length,
|
||||
words=item.words,
|
||||
lines=item.lines,
|
||||
content_type=item.content_type,
|
||||
duration=item.duration
|
||||
)
|
||||
for item in items
|
||||
]
|
||||
|
||||
with transaction.atomic():
|
||||
# 批量插入或忽略冲突
|
||||
# 如果 website + url 已存在,忽略冲突
|
||||
Directory.objects.bulk_create(
|
||||
directory_objects,
|
||||
ignore_conflicts=True
|
||||
)
|
||||
|
||||
logger.debug(f"成功处理 {len(items)} 条 Directory 记录")
|
||||
return len(items)
|
||||
|
||||
except IntegrityError as e:
|
||||
logger.error(
|
||||
f"批量插入 Directory 失败 - 数据完整性错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except OperationalError as e:
|
||||
logger.error(
|
||||
f"批量插入 Directory 失败 - 数据库操作错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except DatabaseError as e:
|
||||
logger.error(
|
||||
f"批量插入 Directory 失败 - 数据库错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"批量插入 Directory 失败 - 未知错误: {e}, "
|
||||
f"记录数: {len(items)}, "
|
||||
f"错误类型: {type(e).__name__}",
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
def get_by_website(self, website_id: int) -> List[DirectoryDTO]:
|
||||
"""
|
||||
获取指定站点的所有目录
|
||||
|
||||
Args:
|
||||
website_id: 站点 ID
|
||||
|
||||
Returns:
|
||||
List[DirectoryDTO]: 目录列表
|
||||
"""
|
||||
try:
|
||||
directories = Directory.objects.filter(website_id=website_id)
|
||||
return [
|
||||
DirectoryDTO(
|
||||
website_id=d.website_id,
|
||||
target_id=d.target_id,
|
||||
url=d.url,
|
||||
status=d.status,
|
||||
content_length=d.content_length,
|
||||
words=d.words,
|
||||
lines=d.lines,
|
||||
content_type=d.content_type,
|
||||
duration=d.duration
|
||||
)
|
||||
for d in directories
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取目录列表失败 - Website ID: {website_id}, 错误: {e}")
|
||||
raise
|
||||
|
||||
def count_by_website(self, website_id: int) -> int:
|
||||
"""
|
||||
统计指定站点的目录总数
|
||||
|
||||
Args:
|
||||
website_id: 站点 ID
|
||||
|
||||
Returns:
|
||||
int: 目录总数
|
||||
"""
|
||||
try:
|
||||
count = Directory.objects.filter(website_id=website_id).count()
|
||||
logger.debug(f"Website {website_id} 的目录总数: {count}")
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"统计目录数量失败 - Website ID: {website_id}, 错误: {e}")
|
||||
raise
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
获取所有目录
|
||||
|
||||
Returns:
|
||||
QuerySet: 目录查询集
|
||||
"""
|
||||
return Directory.objects.all()
|
||||
|
||||
def get_by_target(self, target_id: int):
|
||||
return Directory.objects.filter(target_id=target_id).select_related('website').order_by('-discovered_at')
|
||||
|
||||
def get_urls_for_export(self, target_id: int, batch_size: int = 1000) -> Iterator[str]:
|
||||
"""流式导出目标下的所有目录 URL(只查 url 字段,避免加载多余数据)。"""
|
||||
try:
|
||||
queryset = (
|
||||
Directory.objects
|
||||
.filter(target_id=target_id)
|
||||
.values_list('url', flat=True)
|
||||
.order_by('url')
|
||||
.iterator(chunk_size=batch_size)
|
||||
)
|
||||
for url in queryset:
|
||||
yield url
|
||||
except Exception as e:
|
||||
logger.error("流式导出目录 URL 失败 - Target ID: %s, 错误: %s", target_id, e)
|
||||
raise
|
||||
|
||||
def soft_delete_by_ids(self, directory_ids: List[int]) -> int:
|
||||
"""
|
||||
根据 ID 列表批量软删除Directory
|
||||
|
||||
Args:
|
||||
directory_ids: Directory ID 列表
|
||||
|
||||
Returns:
|
||||
软删除的记录数
|
||||
"""
|
||||
try:
|
||||
updated_count = (
|
||||
Directory.objects
|
||||
.filter(id__in=directory_ids)
|
||||
.update(deleted_at=timezone.now())
|
||||
)
|
||||
logger.debug(
|
||||
"批量软删除Directory成功 - Count: %s, 更新记录: %s",
|
||||
len(directory_ids),
|
||||
updated_count
|
||||
)
|
||||
return updated_count
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"批量软删除Directory失败 - IDs: %s, 错误: %s",
|
||||
directory_ids,
|
||||
e
|
||||
)
|
||||
raise
|
||||
|
||||
def hard_delete_by_ids(self, directory_ids: List[int]) -> Tuple[int, Dict[str, int]]:
|
||||
"""
|
||||
根据 ID 列表硬删除Directory(使用数据库级 CASCADE)
|
||||
|
||||
Args:
|
||||
directory_ids: Directory ID 列表
|
||||
|
||||
Returns:
|
||||
(删除的记录数, 删除详情字典)
|
||||
"""
|
||||
try:
|
||||
batch_size = 1000
|
||||
total_deleted = 0
|
||||
|
||||
logger.debug(f"开始批量删除 {len(directory_ids)} 个Directory(数据库 CASCADE)...")
|
||||
|
||||
for i in range(0, len(directory_ids), batch_size):
|
||||
batch_ids = directory_ids[i:i + batch_size]
|
||||
count, _ = Directory.all_objects.filter(id__in=batch_ids).delete()
|
||||
total_deleted += count
|
||||
logger.debug(f"批次删除完成: {len(batch_ids)} 个Directory,删除 {count} 条记录")
|
||||
|
||||
deleted_details = {
|
||||
'directories': len(directory_ids),
|
||||
'total': total_deleted,
|
||||
'note': 'Database CASCADE - detailed stats unavailable'
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
"批量硬删除成功(CASCADE)- Directory数: %s, 总删除记录: %s",
|
||||
len(directory_ids),
|
||||
total_deleted
|
||||
)
|
||||
|
||||
return total_deleted, deleted_details
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"批量硬删除失败(CASCADE)- Directory数: %s, 错误: %s",
|
||||
len(directory_ids),
|
||||
str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
@@ -1,192 +0,0 @@
|
||||
"""Endpoint Repository - Django ORM 实现"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, Tuple, Dict, Any
|
||||
|
||||
from apps.asset.models import Endpoint
|
||||
from apps.asset.dtos.asset import EndpointDTO
|
||||
from apps.common.decorators import auto_ensure_db_connection
|
||||
from django.db import transaction
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@auto_ensure_db_connection
|
||||
class DjangoEndpointRepository:
|
||||
"""端点 Repository - 负责端点表的数据访问"""
|
||||
|
||||
def bulk_create_ignore_conflicts(self, items: List[EndpointDTO]) -> int:
|
||||
"""
|
||||
批量创建端点(忽略冲突)
|
||||
|
||||
Args:
|
||||
items: 端点 DTO 列表
|
||||
|
||||
Returns:
|
||||
int: 创建的记录数
|
||||
"""
|
||||
if not items:
|
||||
return 0
|
||||
|
||||
try:
|
||||
endpoints = []
|
||||
for item in items:
|
||||
# Endpoint 模型当前只关联 target,不再依赖 website 外键
|
||||
# 这里按照 EndpointDTO 的字段映射构造 Endpoint 实例
|
||||
endpoints.append(Endpoint(
|
||||
target_id=item.target_id,
|
||||
url=item.url,
|
||||
host=item.host or '',
|
||||
title=item.title or '',
|
||||
status_code=item.status_code,
|
||||
content_length=item.content_length,
|
||||
webserver=item.webserver or '',
|
||||
body_preview=item.body_preview or '',
|
||||
content_type=item.content_type or '',
|
||||
tech=item.tech if item.tech else [],
|
||||
vhost=item.vhost,
|
||||
location=item.location or '',
|
||||
matched_gf_patterns=item.matched_gf_patterns if item.matched_gf_patterns else []
|
||||
))
|
||||
|
||||
with transaction.atomic():
|
||||
created = Endpoint.objects.bulk_create(
|
||||
endpoints,
|
||||
ignore_conflicts=True,
|
||||
batch_size=1000
|
||||
)
|
||||
return len(created)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量创建端点失败: {e}")
|
||||
raise
|
||||
|
||||
def get_by_website(self, website_id: int) -> List[EndpointDTO]:
|
||||
"""
|
||||
获取网站下的所有端点
|
||||
|
||||
Args:
|
||||
website_id: 网站 ID
|
||||
|
||||
Returns:
|
||||
List[EndpointDTO]: 端点列表
|
||||
"""
|
||||
endpoints = Endpoint.objects.filter(
|
||||
website_id=website_id
|
||||
).order_by('-discovered_at')
|
||||
|
||||
result = []
|
||||
for endpoint in endpoints:
|
||||
result.append(EndpointDTO(
|
||||
website_id=endpoint.website_id,
|
||||
target_id=endpoint.target_id,
|
||||
url=endpoint.url,
|
||||
title=endpoint.title,
|
||||
status_code=endpoint.status_code,
|
||||
content_length=endpoint.content_length,
|
||||
webserver=endpoint.webserver,
|
||||
body_preview=endpoint.body_preview,
|
||||
content_type=endpoint.content_type,
|
||||
tech=endpoint.tech,
|
||||
vhost=endpoint.vhost,
|
||||
location=endpoint.location,
|
||||
matched_gf_patterns=endpoint.matched_gf_patterns
|
||||
))
|
||||
|
||||
return result
|
||||
|
||||
def get_queryset_by_target(self, target_id: int):
|
||||
return Endpoint.objects.filter(target_id=target_id).order_by('-discovered_at')
|
||||
|
||||
def get_all(self):
|
||||
"""获取所有端点(全局查询)"""
|
||||
return Endpoint.objects.all().order_by('-discovered_at')
|
||||
|
||||
def get_by_target(self, target_id: int) -> List[EndpointDTO]:
|
||||
"""
|
||||
获取目标下的所有端点
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
List[EndpointDTO]: 端点列表
|
||||
"""
|
||||
endpoints = Endpoint.objects.filter(
|
||||
target_id=target_id
|
||||
).order_by('-discovered_at')
|
||||
|
||||
result = []
|
||||
for endpoint in endpoints:
|
||||
result.append(EndpointDTO(
|
||||
website_id=endpoint.website_id,
|
||||
target_id=endpoint.target_id,
|
||||
url=endpoint.url,
|
||||
title=endpoint.title,
|
||||
status_code=endpoint.status_code,
|
||||
content_length=endpoint.content_length,
|
||||
webserver=endpoint.webserver,
|
||||
body_preview=endpoint.body_preview,
|
||||
content_type=endpoint.content_type,
|
||||
tech=endpoint.tech,
|
||||
vhost=endpoint.vhost,
|
||||
location=endpoint.location,
|
||||
matched_gf_patterns=endpoint.matched_gf_patterns
|
||||
))
|
||||
|
||||
return result
|
||||
|
||||
def count_by_website(self, website_id: int) -> int:
|
||||
"""
|
||||
统计网站下的端点数量
|
||||
|
||||
Args:
|
||||
website_id: 网站 ID
|
||||
|
||||
Returns:
|
||||
int: 端点数量
|
||||
"""
|
||||
return Endpoint.objects.filter(website_id=website_id).count()
|
||||
|
||||
def count_by_target(self, target_id: int) -> int:
|
||||
"""
|
||||
统计目标下的端点数量
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
int: 端点数量
|
||||
"""
|
||||
return Endpoint.objects.filter(target_id=target_id).count()
|
||||
|
||||
def soft_delete_by_ids(self, ids: List[int]) -> int:
|
||||
"""
|
||||
软删除端点(批量)
|
||||
|
||||
Args:
|
||||
ids: 端点 ID 列表
|
||||
|
||||
Returns:
|
||||
int: 更新的记录数
|
||||
"""
|
||||
from django.utils import timezone
|
||||
return Endpoint.objects.filter(
|
||||
id__in=ids
|
||||
).update(deleted_at=timezone.now())
|
||||
|
||||
def hard_delete_by_ids(self, ids: List[int]) -> Tuple[int, Dict[str, int]]:
|
||||
"""
|
||||
硬删除端点(批量)
|
||||
|
||||
Args:
|
||||
ids: 端点 ID 列表
|
||||
|
||||
Returns:
|
||||
Tuple[int, Dict[str, int]]: (删除总数, 详细信息)
|
||||
"""
|
||||
deleted_count, details = Endpoint.all_objects.filter(
|
||||
id__in=ids
|
||||
).delete()
|
||||
|
||||
return deleted_count, details
|
||||
@@ -1,167 +0,0 @@
|
||||
"""HostPortMapping Repository - Django ORM 实现"""
|
||||
|
||||
import logging
|
||||
from typing import List, Iterator
|
||||
|
||||
from apps.asset.models.asset_models import HostPortMapping
|
||||
from apps.asset.dtos.asset import HostPortMappingDTO
|
||||
from apps.common.decorators import auto_ensure_db_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@auto_ensure_db_connection
|
||||
class DjangoHostPortMappingRepository:
|
||||
"""HostPortMapping Repository - Django ORM 实现"""
|
||||
|
||||
def bulk_create_ignore_conflicts(self, items: List[HostPortMappingDTO]) -> int:
|
||||
"""
|
||||
批量创建主机端口关联(忽略冲突)
|
||||
|
||||
Args:
|
||||
items: 主机端口关联 DTO 列表
|
||||
|
||||
Returns:
|
||||
int: 实际创建的记录数(注意:ignore_conflicts 时可能为 0)
|
||||
|
||||
Note:
|
||||
- 基于唯一约束 (target + host + ip + port) 自动去重
|
||||
- 忽略已存在的记录,不更新
|
||||
"""
|
||||
try:
|
||||
logger.debug("准备批量创建主机端口关联 - 数量: %d", len(items))
|
||||
|
||||
if not items:
|
||||
logger.debug("主机端口关联为空,跳过创建")
|
||||
return 0
|
||||
|
||||
# 构建记录对象
|
||||
records = []
|
||||
for item in items:
|
||||
records.append(HostPortMapping(
|
||||
target_id=item.target_id,
|
||||
host=item.host,
|
||||
ip=item.ip,
|
||||
port=item.port
|
||||
))
|
||||
|
||||
# 批量创建(忽略冲突,基于唯一约束去重)
|
||||
created = HostPortMapping.objects.bulk_create(
|
||||
records,
|
||||
ignore_conflicts=True
|
||||
)
|
||||
|
||||
created_count = len(created) if created else 0
|
||||
logger.debug("主机端口关联创建完成 - 数量: %d", created_count)
|
||||
|
||||
return created_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"批量创建主机端口关联失败 - 数量: %d, 错误: %s",
|
||||
len(items),
|
||||
str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
def get_for_export(self, target_id: int, batch_size: int = 1000):
|
||||
queryset = (
|
||||
HostPortMapping.objects
|
||||
.filter(target_id=target_id)
|
||||
.order_by("host", "port")
|
||||
.values("host", "port")
|
||||
.iterator(chunk_size=batch_size)
|
||||
)
|
||||
for item in queryset:
|
||||
yield item
|
||||
|
||||
def get_ips_for_export(self, target_id: int, batch_size: int = 1000) -> Iterator[str]:
|
||||
"""流式导出目标下的所有唯一 IP 地址。"""
|
||||
queryset = (
|
||||
HostPortMapping.objects
|
||||
.filter(target_id=target_id)
|
||||
.values_list("ip", flat=True)
|
||||
.distinct()
|
||||
.order_by("ip")
|
||||
.iterator(chunk_size=batch_size)
|
||||
)
|
||||
for ip in queryset:
|
||||
yield ip
|
||||
|
||||
def get_ip_aggregation_by_target(self, target_id: int, search: str = None):
|
||||
from django.db.models import Min
|
||||
|
||||
qs = HostPortMapping.objects.filter(target_id=target_id)
|
||||
if search:
|
||||
qs = qs.filter(ip__icontains=search)
|
||||
|
||||
ip_aggregated = (
|
||||
qs
|
||||
.values('ip')
|
||||
.annotate(
|
||||
discovered_at=Min('discovered_at')
|
||||
)
|
||||
.order_by('-discovered_at')
|
||||
)
|
||||
|
||||
results = []
|
||||
for item in ip_aggregated:
|
||||
ip = item['ip']
|
||||
mappings = (
|
||||
HostPortMapping.objects
|
||||
.filter(target_id=target_id, ip=ip)
|
||||
.values('host', 'port')
|
||||
.distinct()
|
||||
)
|
||||
|
||||
hosts = sorted({m['host'] for m in mappings})
|
||||
ports = sorted({m['port'] for m in mappings})
|
||||
|
||||
results.append({
|
||||
'ip': ip,
|
||||
'hosts': hosts,
|
||||
'ports': ports,
|
||||
'discovered_at': item['discovered_at'],
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
def get_all_ip_aggregation(self, search: str = None):
|
||||
"""获取所有 IP 聚合数据(全局查询)"""
|
||||
from django.db.models import Min
|
||||
|
||||
qs = HostPortMapping.objects.all()
|
||||
if search:
|
||||
qs = qs.filter(ip__icontains=search)
|
||||
|
||||
ip_aggregated = (
|
||||
qs
|
||||
.values('ip')
|
||||
.annotate(
|
||||
discovered_at=Min('discovered_at')
|
||||
)
|
||||
.order_by('-discovered_at')
|
||||
)
|
||||
|
||||
results = []
|
||||
for item in ip_aggregated:
|
||||
ip = item['ip']
|
||||
mappings = (
|
||||
HostPortMapping.objects
|
||||
.filter(ip=ip)
|
||||
.values('host', 'port')
|
||||
.distinct()
|
||||
)
|
||||
|
||||
hosts = sorted({m['host'] for m in mappings})
|
||||
ports = sorted({m['port'] for m in mappings})
|
||||
|
||||
results.append({
|
||||
'ip': ip,
|
||||
'hosts': hosts,
|
||||
'ports': ports,
|
||||
'discovered_at': item['discovered_at'],
|
||||
})
|
||||
|
||||
return results
|
||||
@@ -1,256 +0,0 @@
|
||||
import logging
|
||||
from typing import List, Iterator
|
||||
|
||||
from django.db import transaction, IntegrityError, OperationalError, DatabaseError
|
||||
from django.utils import timezone
|
||||
from typing import Tuple, Dict
|
||||
|
||||
from apps.asset.models.asset_models import Subdomain
|
||||
from apps.asset.dtos import SubdomainDTO
|
||||
from apps.common.decorators import auto_ensure_db_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@auto_ensure_db_connection
|
||||
class DjangoSubdomainRepository:
|
||||
"""基于 Django ORM 的子域名仓储实现。"""
|
||||
|
||||
def bulk_create_ignore_conflicts(self, items: List[SubdomainDTO]) -> None:
|
||||
"""
|
||||
批量创建子域名,忽略冲突
|
||||
|
||||
Args:
|
||||
items: 子域名 DTO 列表
|
||||
|
||||
Raises:
|
||||
IntegrityError: 数据完整性错误(如唯一约束冲突)
|
||||
OperationalError: 数据库操作错误(如连接失败)
|
||||
DatabaseError: 其他数据库错误
|
||||
"""
|
||||
if not items:
|
||||
return
|
||||
|
||||
try:
|
||||
subdomain_objects = [
|
||||
Subdomain(
|
||||
name=item.name,
|
||||
target_id=item.target_id,
|
||||
)
|
||||
for item in items
|
||||
]
|
||||
|
||||
with transaction.atomic():
|
||||
# 使用 ignore_conflicts 策略:
|
||||
# - 新子域名:INSERT 完整记录
|
||||
# - 已存在子域名:忽略(不更新,因为没有探测字段数据)
|
||||
# 注意:ignore_conflicts 无法返回实际创建的数量
|
||||
Subdomain.objects.bulk_create( # type: ignore[attr-defined]
|
||||
subdomain_objects,
|
||||
ignore_conflicts=True, # 忽略重复记录
|
||||
)
|
||||
|
||||
logger.debug(f"成功处理 {len(items)} 条子域名记录")
|
||||
|
||||
except IntegrityError as e:
|
||||
logger.error(
|
||||
f"批量插入子域名失败 - 数据完整性错误: {e}, "
|
||||
f"记录数: {len(items)}, "
|
||||
f"示例域名: {items[0].name if items else 'N/A'}"
|
||||
)
|
||||
raise
|
||||
|
||||
except OperationalError as e:
|
||||
logger.error(
|
||||
f"批量插入子域名失败 - 数据库操作错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except DatabaseError as e:
|
||||
logger.error(
|
||||
f"批量插入子域名失败 - 数据库错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"批量插入子域名失败 - 未知错误: {e}, "
|
||||
f"记录数: {len(items)}, "
|
||||
f"错误类型: {type(e).__name__}",
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
def get_or_create(self, name: str, target_id: int) -> Tuple[Subdomain, bool]:
|
||||
"""
|
||||
获取或创建子域名
|
||||
|
||||
Args:
|
||||
name: 子域名名称
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
(Subdomain对象, 是否新创建)
|
||||
"""
|
||||
return Subdomain.objects.get_or_create(
|
||||
name=name,
|
||||
target_id=target_id,
|
||||
)
|
||||
|
||||
def get_domains_for_export(self, target_id: int, batch_size: int = 1000) -> Iterator[str]:
|
||||
"""
|
||||
流式导出域名(用于生成扫描工具输入文件)
|
||||
|
||||
使用 iterator() 进行流式查询,避免一次性加载所有数据到内存
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
batch_size: 每次从数据库读取的行数
|
||||
|
||||
Yields:
|
||||
str: 域名
|
||||
"""
|
||||
queryset = Subdomain.objects.filter(
|
||||
target_id=target_id
|
||||
).only('name').iterator(chunk_size=batch_size)
|
||||
|
||||
for subdomain in queryset:
|
||||
yield subdomain.name
|
||||
|
||||
def get_by_target(self, target_id: int):
|
||||
return Subdomain.objects.filter(target_id=target_id).order_by('-discovered_at')
|
||||
|
||||
def count_by_target(self, target_id: int) -> int:
|
||||
"""
|
||||
统计目标下的域名数量
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
int: 域名数量
|
||||
"""
|
||||
return Subdomain.objects.filter(target_id=target_id).count()
|
||||
|
||||
def get_by_names_and_target_id(self, names: set, target_id: int) -> dict:
|
||||
"""
|
||||
根据域名列表和目标ID批量查询 Subdomain
|
||||
|
||||
Args:
|
||||
names: 域名集合
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
dict: {domain_name: Subdomain对象}
|
||||
"""
|
||||
subdomains = Subdomain.objects.filter(
|
||||
name__in=names,
|
||||
target_id=target_id
|
||||
).only('id', 'name')
|
||||
|
||||
return {sd.name: sd for sd in subdomains}
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
获取所有子域名
|
||||
|
||||
Returns:
|
||||
QuerySet: 子域名查询集
|
||||
"""
|
||||
return Subdomain.objects.all()
|
||||
|
||||
def soft_delete_by_ids(self, subdomain_ids: List[int]) -> int:
|
||||
"""
|
||||
根据 ID 列表批量软删除子域名
|
||||
|
||||
Args:
|
||||
subdomain_ids: 子域名 ID 列表
|
||||
|
||||
Returns:
|
||||
软删除的记录数
|
||||
|
||||
Note:
|
||||
- 使用软删除:只标记为已删除,不真正删除数据库记录
|
||||
- 保留所有关联数据,可恢复
|
||||
"""
|
||||
try:
|
||||
updated_count = (
|
||||
Subdomain.objects
|
||||
.filter(id__in=subdomain_ids)
|
||||
.update(deleted_at=timezone.now())
|
||||
)
|
||||
logger.debug(
|
||||
"批量软删除子域名成功 - Count: %s, 更新记录: %s",
|
||||
len(subdomain_ids),
|
||||
updated_count
|
||||
)
|
||||
return updated_count
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"批量软删除子域名失败 - IDs: %s, 错误: %s",
|
||||
subdomain_ids,
|
||||
e
|
||||
)
|
||||
raise
|
||||
|
||||
def hard_delete_by_ids(self, subdomain_ids: List[int]) -> Tuple[int, Dict[str, int]]:
|
||||
"""
|
||||
根据 ID 列表硬删除子域名(使用数据库级 CASCADE)
|
||||
|
||||
Args:
|
||||
subdomain_ids: 子域名 ID 列表
|
||||
|
||||
Returns:
|
||||
(删除的记录数, 删除详情字典)
|
||||
|
||||
Strategy:
|
||||
使用数据库级 CASCADE 删除,性能最优
|
||||
|
||||
Note:
|
||||
- 硬删除:从数据库中永久删除
|
||||
- 数据库自动处理所有外键级联删除
|
||||
- 不触发 Django 信号(pre_delete/post_delete)
|
||||
"""
|
||||
try:
|
||||
batch_size = 1000 # 每批处理1000个子域名
|
||||
total_deleted = 0
|
||||
|
||||
logger.debug(f"开始批量删除 {len(subdomain_ids)} 个子域名(数据库 CASCADE)...")
|
||||
|
||||
# 分批处理子域名ID,避免单次删除过多
|
||||
for i in range(0, len(subdomain_ids), batch_size):
|
||||
batch_ids = subdomain_ids[i:i + batch_size]
|
||||
|
||||
# 直接删除子域名,数据库自动级联删除所有关联数据
|
||||
count, _ = Subdomain.all_objects.filter(id__in=batch_ids).delete()
|
||||
total_deleted += count
|
||||
|
||||
logger.debug(f"批次删除完成: {len(batch_ids)} 个子域名,删除 {count} 条记录")
|
||||
|
||||
# 由于使用数据库 CASCADE,无法获取详细统计
|
||||
deleted_details = {
|
||||
'subdomains': len(subdomain_ids),
|
||||
'total': total_deleted,
|
||||
'note': 'Database CASCADE - detailed stats unavailable'
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
"批量硬删除成功(CASCADE)- 子域名数: %s, 总删除记录: %s",
|
||||
len(subdomain_ids),
|
||||
total_deleted
|
||||
)
|
||||
|
||||
return total_deleted, deleted_details
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"批量硬删除失败(CASCADE)- 子域名数: %s, 错误: %s",
|
||||
len(subdomain_ids),
|
||||
str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
@@ -1,260 +0,0 @@
|
||||
"""
|
||||
Django ORM 实现的 WebSite Repository
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Generator, Tuple, Dict, Optional
|
||||
from django.db import transaction, IntegrityError, OperationalError, DatabaseError
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.asset.models.asset_models import WebSite
|
||||
from apps.asset.dtos import WebSiteDTO
|
||||
from apps.common.decorators import auto_ensure_db_connection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@auto_ensure_db_connection
|
||||
class DjangoWebSiteRepository:
|
||||
"""Django ORM 实现的 WebSite Repository"""
|
||||
|
||||
def bulk_create_ignore_conflicts(self, items: List[WebSiteDTO]) -> None:
|
||||
"""
|
||||
批量创建 WebSite,忽略冲突
|
||||
|
||||
Args:
|
||||
items: WebSite DTO 列表
|
||||
|
||||
Raises:
|
||||
IntegrityError: 数据完整性错误
|
||||
OperationalError: 数据库操作错误
|
||||
DatabaseError: 数据库错误
|
||||
"""
|
||||
if not items:
|
||||
return
|
||||
|
||||
try:
|
||||
# 转换为 Django 模型对象
|
||||
website_objects = [
|
||||
WebSite(
|
||||
target_id=item.target_id,
|
||||
url=item.url,
|
||||
host=item.host,
|
||||
location=item.location,
|
||||
title=item.title,
|
||||
webserver=item.webserver,
|
||||
body_preview=item.body_preview,
|
||||
content_type=item.content_type,
|
||||
tech=item.tech,
|
||||
status_code=item.status_code,
|
||||
content_length=item.content_length,
|
||||
vhost=item.vhost
|
||||
)
|
||||
for item in items
|
||||
]
|
||||
|
||||
with transaction.atomic():
|
||||
# 批量插入或更新
|
||||
# 如果URL和目标已存在,忽略冲突
|
||||
WebSite.objects.bulk_create(
|
||||
website_objects,
|
||||
ignore_conflicts=True
|
||||
)
|
||||
|
||||
logger.debug(f"成功处理 {len(items)} 条 WebSite 记录")
|
||||
|
||||
except IntegrityError as e:
|
||||
logger.error(
|
||||
f"批量插入 WebSite 失败 - 数据完整性错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except OperationalError as e:
|
||||
logger.error(
|
||||
f"批量插入 WebSite 失败 - 数据库操作错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except DatabaseError as e:
|
||||
logger.error(
|
||||
f"批量插入 WebSite 失败 - 数据库错误: {e}, "
|
||||
f"记录数: {len(items)}"
|
||||
)
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"批量插入 WebSite 失败 - 未知错误: {e}, "
|
||||
f"记录数: {len(items)}, "
|
||||
f"错误类型: {type(e).__name__}",
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
def get_urls_for_export(self, target_id: int, batch_size: int = 1000) -> Generator[str, None, None]:
|
||||
"""
|
||||
流式导出目标下的所有站点 URL
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
batch_size: 批次大小
|
||||
|
||||
Yields:
|
||||
str: 站点 URL
|
||||
"""
|
||||
try:
|
||||
# 查询目标下的站点,只选择 URL 字段,避免不必要的数据传输
|
||||
queryset = WebSite.objects.filter(
|
||||
target_id=target_id
|
||||
).values_list('url', flat=True).iterator(chunk_size=batch_size)
|
||||
|
||||
for url in queryset:
|
||||
yield url
|
||||
except Exception as e:
|
||||
logger.error(f"流式导出站点 URL 失败 - Target ID: {target_id}, 错误: {e}")
|
||||
raise
|
||||
|
||||
def get_by_target(self, target_id: int):
|
||||
return WebSite.objects.filter(target_id=target_id).order_by('-discovered_at')
|
||||
|
||||
def count_by_target(self, target_id: int) -> int:
|
||||
"""
|
||||
统计目标下的站点总数
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
int: 站点总数
|
||||
"""
|
||||
try:
|
||||
count = WebSite.objects.filter(target_id=target_id).count()
|
||||
logger.debug(f"Target {target_id} 的站点总数: {count}")
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"统计站点数量失败 - Target ID: {target_id}, 错误: {e}")
|
||||
raise
|
||||
|
||||
def count_by_scan(self, scan_id: int) -> int:
|
||||
"""
|
||||
统计扫描下的站点总数
|
||||
"""
|
||||
try:
|
||||
count = WebSite.objects.filter(scan_id=scan_id).count()
|
||||
logger.debug(f"Scan {scan_id} 的站点总数: {count}")
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error(f"统计站点数量失败 - Scan ID: {scan_id}, 错误: {e}")
|
||||
raise
|
||||
|
||||
def get_by_url(self, url: str, target_id: int) -> Optional[int]:
|
||||
"""
|
||||
根据 URL 和 target_id 查找站点 ID
|
||||
|
||||
Args:
|
||||
url: 站点 URL
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
Optional[int]: 站点 ID,如果不存在返回 None
|
||||
|
||||
Raises:
|
||||
ValueError: 发现多个站点时
|
||||
"""
|
||||
try:
|
||||
website = WebSite.objects.filter(url=url, target_id=target_id).first()
|
||||
if website:
|
||||
return website.id
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"查询站点失败 - URL: {url}, Target ID: {target_id}, 错误: {e}")
|
||||
raise
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
获取所有网站
|
||||
|
||||
Returns:
|
||||
QuerySet: 网站查询集
|
||||
"""
|
||||
return WebSite.objects.all()
|
||||
|
||||
def soft_delete_by_ids(self, website_ids: List[int]) -> int:
|
||||
"""
|
||||
根据 ID 列表批量软删除WebSite
|
||||
|
||||
Args:
|
||||
website_ids: WebSite ID 列表
|
||||
|
||||
Returns:
|
||||
软删除的记录数
|
||||
"""
|
||||
try:
|
||||
updated_count = (
|
||||
WebSite.objects
|
||||
.filter(id__in=website_ids)
|
||||
.update(deleted_at=timezone.now())
|
||||
)
|
||||
logger.debug(
|
||||
"批量软删除WebSite成功 - Count: %s, 更新记录: %s",
|
||||
len(website_ids),
|
||||
updated_count
|
||||
)
|
||||
return updated_count
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"批量软删除WebSite失败 - IDs: %s, 错误: %s",
|
||||
website_ids,
|
||||
e
|
||||
)
|
||||
raise
|
||||
|
||||
def hard_delete_by_ids(self, website_ids: List[int]) -> Tuple[int, Dict[str, int]]:
|
||||
"""
|
||||
根据 ID 列表硬删除WebSite(使用数据库级 CASCADE)
|
||||
|
||||
Args:
|
||||
website_ids: WebSite ID 列表
|
||||
|
||||
Returns:
|
||||
(删除的记录数, 删除详情字典)
|
||||
"""
|
||||
try:
|
||||
batch_size = 1000
|
||||
total_deleted = 0
|
||||
|
||||
logger.debug(f"开始批量删除 {len(website_ids)} 个WebSite(数据库 CASCADE)...")
|
||||
|
||||
for i in range(0, len(website_ids), batch_size):
|
||||
batch_ids = website_ids[i:i + batch_size]
|
||||
count, _ = WebSite.all_objects.filter(id__in=batch_ids).delete()
|
||||
total_deleted += count
|
||||
logger.debug(f"批次删除完成: {len(batch_ids)} 个WebSite,删除 {count} 条记录")
|
||||
|
||||
deleted_details = {
|
||||
'websites': len(website_ids),
|
||||
'total': total_deleted,
|
||||
'note': 'Database CASCADE - detailed stats unavailable'
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
"批量硬删除成功(CASCADE)- WebSite数: %s, 总删除记录: %s",
|
||||
len(website_ids),
|
||||
total_deleted
|
||||
)
|
||||
|
||||
return total_deleted, deleted_details
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"批量硬删除失败(CASCADE)- WebSite数: %s, 错误: %s",
|
||||
len(website_ids),
|
||||
str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
@@ -1,55 +0,0 @@
|
||||
import logging
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from apps.asset.models.asset_models import Directory
|
||||
from apps.asset.repositories import DjangoDirectoryRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DirectoryService:
|
||||
"""目录业务逻辑层"""
|
||||
|
||||
def __init__(self, repository=None):
|
||||
"""
|
||||
初始化目录服务
|
||||
|
||||
Args:
|
||||
repository: 目录仓储实例(用于依赖注入)
|
||||
"""
|
||||
self.repo = repository or DjangoDirectoryRepository()
|
||||
|
||||
# ==================== 创建操作 ====================
|
||||
|
||||
def bulk_create_ignore_conflicts(self, directory_dtos: list) -> None:
|
||||
"""
|
||||
批量创建目录记录,忽略冲突(用于扫描任务)
|
||||
|
||||
Args:
|
||||
directory_dtos: DirectoryDTO 列表
|
||||
"""
|
||||
return self.repo.bulk_create_ignore_conflicts(directory_dtos)
|
||||
|
||||
# ==================== 查询操作 ====================
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
获取所有目录
|
||||
|
||||
Returns:
|
||||
QuerySet: 目录查询集
|
||||
"""
|
||||
logger.debug("获取所有目录")
|
||||
return self.repo.get_all()
|
||||
|
||||
def get_directories_by_target(self, target_id: int):
|
||||
logger.debug("获取目标下所有目录 - Target ID: %d", target_id)
|
||||
return self.repo.get_by_target(target_id)
|
||||
|
||||
def iter_directory_urls_by_target(self, target_id: int, chunk_size: int = 1000) -> Iterator[str]:
|
||||
"""流式获取目标下的所有目录 URL,用于导出大批量数据。"""
|
||||
logger.debug("流式导出目标下目录 URL - Target ID: %d", target_id)
|
||||
return self.repo.get_urls_for_export(target_id=target_id, batch_size=chunk_size)
|
||||
|
||||
|
||||
__all__ = ['DirectoryService']
|
||||
@@ -1,178 +0,0 @@
|
||||
"""
|
||||
Endpoint 服务层
|
||||
|
||||
处理 URL/端点相关的业务逻辑
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, Dict, Any, Iterator
|
||||
|
||||
from apps.asset.dtos.asset import EndpointDTO
|
||||
from apps.asset.repositories.asset import DjangoEndpointRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EndpointService:
|
||||
"""
|
||||
Endpoint 服务类
|
||||
|
||||
提供 Endpoint(URL/端点)相关的业务逻辑
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""初始化 Endpoint 服务"""
|
||||
self.repo = DjangoEndpointRepository()
|
||||
|
||||
def bulk_create_endpoints(
|
||||
self,
|
||||
endpoints: List[EndpointDTO],
|
||||
ignore_conflicts: bool = True
|
||||
) -> int:
|
||||
"""
|
||||
批量创建端点记录
|
||||
|
||||
Args:
|
||||
endpoints: 端点数据列表
|
||||
ignore_conflicts: 是否忽略冲突(去重)
|
||||
|
||||
Returns:
|
||||
int: 创建的记录数
|
||||
"""
|
||||
if not endpoints:
|
||||
return 0
|
||||
|
||||
try:
|
||||
if ignore_conflicts:
|
||||
return self.repo.bulk_create_ignore_conflicts(endpoints)
|
||||
else:
|
||||
# 如果需要非忽略冲突的版本,可以在 repository 中添加
|
||||
return self.repo.bulk_create_ignore_conflicts(endpoints)
|
||||
except Exception as e:
|
||||
logger.error(f"批量创建端点失败: {e}")
|
||||
raise
|
||||
|
||||
def get_endpoints_by_website(
|
||||
self,
|
||||
website_id: int,
|
||||
limit: Optional[int] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取网站下的端点列表
|
||||
|
||||
Args:
|
||||
website_id: 网站 ID
|
||||
limit: 返回数量限制
|
||||
|
||||
Returns:
|
||||
List[Dict]: 端点列表
|
||||
"""
|
||||
endpoints_dto = self.repo.get_by_website(website_id)
|
||||
|
||||
if limit:
|
||||
endpoints_dto = endpoints_dto[:limit]
|
||||
|
||||
endpoints = []
|
||||
for dto in endpoints_dto:
|
||||
endpoints.append({
|
||||
'url': dto.url,
|
||||
'title': dto.title,
|
||||
'status_code': dto.status_code,
|
||||
'content_length': dto.content_length,
|
||||
'webserver': dto.webserver
|
||||
})
|
||||
|
||||
return endpoints
|
||||
|
||||
def get_endpoints_by_target(
|
||||
self,
|
||||
target_id: int,
|
||||
limit: Optional[int] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
获取目标下的端点列表
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
limit: 返回数量限制
|
||||
|
||||
Returns:
|
||||
List[Dict]: 端点列表
|
||||
"""
|
||||
endpoints_dto = self.repo.get_by_target(target_id)
|
||||
|
||||
if limit:
|
||||
endpoints_dto = endpoints_dto[:limit]
|
||||
|
||||
endpoints = []
|
||||
for dto in endpoints_dto:
|
||||
endpoints.append({
|
||||
'url': dto.url,
|
||||
'title': dto.title,
|
||||
'status_code': dto.status_code,
|
||||
'content_length': dto.content_length,
|
||||
'webserver': dto.webserver
|
||||
})
|
||||
|
||||
return endpoints
|
||||
|
||||
def count_endpoints_by_target(self, target_id: int) -> int:
|
||||
"""
|
||||
统计目标下的端点数量
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
int: 端点数量
|
||||
"""
|
||||
return self.repo.count_by_target(target_id)
|
||||
|
||||
def get_queryset_by_target(self, target_id: int):
|
||||
return self.repo.get_queryset_by_target(target_id)
|
||||
|
||||
def get_all(self):
|
||||
"""获取所有端点(全局查询)"""
|
||||
return self.repo.get_all()
|
||||
|
||||
def iter_endpoint_urls_by_target(self, target_id: int, chunk_size: int = 1000) -> Iterator[str]:
|
||||
"""流式获取目标下的所有端点 URL,用于导出。"""
|
||||
queryset = self.repo.get_queryset_by_target(target_id)
|
||||
for url in queryset.values_list('url', flat=True).iterator(chunk_size=chunk_size):
|
||||
yield url
|
||||
|
||||
def count_endpoints_by_website(self, website_id: int) -> int:
|
||||
"""
|
||||
统计网站下的端点数量
|
||||
|
||||
Args:
|
||||
website_id: 网站 ID
|
||||
|
||||
Returns:
|
||||
int: 端点数量
|
||||
"""
|
||||
return self.repo.count_by_website(website_id)
|
||||
|
||||
def soft_delete_endpoints(self, endpoint_ids: List[int]) -> int:
|
||||
"""
|
||||
软删除端点
|
||||
|
||||
Args:
|
||||
endpoint_ids: 端点 ID 列表
|
||||
|
||||
Returns:
|
||||
int: 更新的数量
|
||||
"""
|
||||
return self.repo.soft_delete_by_ids(endpoint_ids)
|
||||
|
||||
def hard_delete_endpoints(self, endpoint_ids: List[int]) -> tuple:
|
||||
"""
|
||||
硬删除端点
|
||||
|
||||
Args:
|
||||
endpoint_ids: 端点 ID 列表
|
||||
|
||||
Returns:
|
||||
tuple: (删除总数, 详细信息)
|
||||
"""
|
||||
return self.repo.hard_delete_by_ids(endpoint_ids)
|
||||
@@ -1,61 +0,0 @@
|
||||
"""HostPortMapping Service - 业务逻辑层"""
|
||||
|
||||
import logging
|
||||
from typing import List, Iterator
|
||||
|
||||
from apps.asset.repositories.asset import DjangoHostPortMappingRepository
|
||||
from apps.asset.dtos.asset import HostPortMappingDTO
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HostPortMappingService:
|
||||
"""主机端口映射服务 - 负责主机端口映射数据的业务逻辑"""
|
||||
|
||||
def __init__(self):
|
||||
self.repo = DjangoHostPortMappingRepository()
|
||||
|
||||
def bulk_create_ignore_conflicts(self, items: List[HostPortMappingDTO]) -> int:
|
||||
"""
|
||||
批量创建主机端口映射(忽略冲突)
|
||||
|
||||
Args:
|
||||
items: 主机端口映射 DTO 列表
|
||||
|
||||
Returns:
|
||||
int: 实际创建的记录数
|
||||
|
||||
Note:
|
||||
使用数据库唯一约束 + ignore_conflicts 自动去重
|
||||
"""
|
||||
try:
|
||||
logger.debug("Service: 准备批量创建主机端口映射 - 数量: %d", len(items))
|
||||
|
||||
created_count = self.repo.bulk_create_ignore_conflicts(items)
|
||||
|
||||
logger.info("Service: 主机端口映射创建成功 - 数量: %d", created_count)
|
||||
|
||||
return created_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Service: 批量创建主机端口映射失败 - 数量: %d, 错误: %s",
|
||||
len(items),
|
||||
str(e),
|
||||
exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
def iter_host_port_by_target(self, target_id: int, batch_size: int = 1000):
|
||||
return self.repo.get_for_export(target_id=target_id, batch_size=batch_size)
|
||||
|
||||
def get_ip_aggregation_by_target(self, target_id: int, search: str = None):
|
||||
return self.repo.get_ip_aggregation_by_target(target_id, search=search)
|
||||
|
||||
def get_all_ip_aggregation(self, search: str = None):
|
||||
"""获取所有 IP 聚合数据(全局查询)"""
|
||||
return self.repo.get_all_ip_aggregation(search=search)
|
||||
|
||||
def iter_ips_by_target(self, target_id: int, batch_size: int = 1000) -> Iterator[str]:
|
||||
"""流式获取目标下的所有唯一 IP 地址。"""
|
||||
return self.repo.get_ips_for_export(target_id=target_id, batch_size=batch_size)
|
||||
@@ -1,123 +0,0 @@
|
||||
import logging
|
||||
from typing import Tuple, List, Dict
|
||||
|
||||
from apps.asset.repositories import DjangoSubdomainRepository
|
||||
from apps.asset.dtos import SubdomainDTO
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SubdomainService:
|
||||
"""子域名业务逻辑层"""
|
||||
|
||||
def __init__(self, repository=None):
|
||||
"""
|
||||
初始化子域名服务
|
||||
|
||||
Args:
|
||||
repository: 子域名仓储实例(用于依赖注入)
|
||||
"""
|
||||
self.repo = repository or DjangoSubdomainRepository()
|
||||
|
||||
# ==================== 查询操作 ====================
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
获取所有子域名
|
||||
|
||||
Returns:
|
||||
QuerySet: 子域名查询集
|
||||
"""
|
||||
logger.debug("获取所有子域名")
|
||||
return self.repo.get_all()
|
||||
|
||||
# ==================== 创建操作 ====================
|
||||
|
||||
def get_or_create(self, name: str, target_id: int) -> Tuple[any, bool]:
|
||||
"""
|
||||
获取或创建子域名
|
||||
|
||||
Args:
|
||||
name: 子域名名称
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
(Subdomain对象, 是否新创建)
|
||||
"""
|
||||
logger.debug("获取或创建子域名 - Name: %s, Target ID: %d", name, target_id)
|
||||
return self.repo.get_or_create(name, target_id)
|
||||
|
||||
def bulk_create_ignore_conflicts(self, items: List[SubdomainDTO]) -> None:
|
||||
"""
|
||||
批量创建子域名,忽略冲突
|
||||
|
||||
Args:
|
||||
items: 子域名 DTO 列表
|
||||
|
||||
Note:
|
||||
使用 ignore_conflicts 策略,重复记录会被跳过
|
||||
"""
|
||||
logger.debug("批量创建子域名 - 数量: %d", len(items))
|
||||
return self.repo.bulk_create_ignore_conflicts(items)
|
||||
|
||||
def get_by_names_and_target_id(self, names: set, target_id: int) -> dict:
|
||||
"""
|
||||
根据域名列表和目标ID批量查询子域名
|
||||
|
||||
Args:
|
||||
names: 域名集合
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
dict: {域名: Subdomain对象}
|
||||
"""
|
||||
logger.debug("批量查询子域名 - 数量: %d, Target ID: %d", len(names), target_id)
|
||||
return self.repo.get_by_names_and_target_id(names, target_id)
|
||||
|
||||
def get_subdomain_names_by_target(self, target_id: int) -> List[str]:
|
||||
"""
|
||||
获取目标下的所有子域名名称
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
List[str]: 子域名名称列表
|
||||
"""
|
||||
logger.debug("获取目标下所有子域名 - Target ID: %d", target_id)
|
||||
# 通过仓储层统一访问数据库,内部已使用 iterator() 做流式查询
|
||||
return list(self.repo.get_domains_for_export(target_id=target_id))
|
||||
|
||||
def get_subdomains_by_target(self, target_id: int):
|
||||
return self.repo.get_by_target(target_id)
|
||||
|
||||
def count_subdomains_by_target(self, target_id: int) -> int:
|
||||
"""
|
||||
统计目标下的子域名数量
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
int: 子域名数量
|
||||
"""
|
||||
logger.debug("统计目标下子域名数量 - Target ID: %d", target_id)
|
||||
return self.repo.count_by_target(target_id)
|
||||
|
||||
def iter_subdomain_names_by_target(self, target_id: int, chunk_size: int = 1000):
|
||||
"""
|
||||
流式获取目标下的所有子域名名称(内存优化)
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
chunk_size: 批次大小
|
||||
|
||||
Yields:
|
||||
str: 子域名名称
|
||||
"""
|
||||
logger.debug("流式获取目标下所有子域名 - Target ID: %d, 批次大小: %d", target_id, chunk_size)
|
||||
# 通过仓储层统一访问数据库,内部已使用 iterator() 做流式查询
|
||||
return self.repo.get_domains_for_export(target_id=target_id, batch_size=chunk_size)
|
||||
|
||||
|
||||
__all__ = ['SubdomainService']
|
||||
@@ -1,91 +0,0 @@
|
||||
import logging
|
||||
from typing import Tuple, List
|
||||
|
||||
from apps.asset.repositories import DjangoWebSiteRepository
|
||||
from apps.asset.dtos import WebSiteDTO
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WebSiteService:
|
||||
"""网站业务逻辑层"""
|
||||
|
||||
def __init__(self, repository=None):
|
||||
"""
|
||||
初始化网站服务
|
||||
|
||||
Args:
|
||||
repository: 网站仓储实例(用于依赖注入)
|
||||
"""
|
||||
self.repo = repository or DjangoWebSiteRepository()
|
||||
|
||||
# ==================== 创建操作 ====================
|
||||
|
||||
def bulk_create_ignore_conflicts(self, website_dtos: List[WebSiteDTO]) -> None:
|
||||
"""
|
||||
批量创建网站记录,忽略冲突(用于扫描任务)
|
||||
|
||||
Args:
|
||||
website_dtos: WebSiteDTO 列表
|
||||
|
||||
Note:
|
||||
使用 ignore_conflicts 策略,重复记录会被跳过
|
||||
"""
|
||||
logger.debug("批量创建网站 - 数量: %d", len(website_dtos))
|
||||
return self.repo.bulk_create_ignore_conflicts(website_dtos)
|
||||
|
||||
# ==================== 查询操作 ====================
|
||||
|
||||
def get_by_url(self, url: str, target_id: int) -> int:
|
||||
"""
|
||||
根据 URL 和 target_id 查找网站 ID
|
||||
|
||||
Args:
|
||||
url: 网站 URL
|
||||
target_id: 目标 ID
|
||||
|
||||
Returns:
|
||||
int: 网站 ID,如果不存在返回 None
|
||||
"""
|
||||
return self.repo.get_by_url(url=url, target_id=target_id)
|
||||
|
||||
# ==================== 查询操作 ====================
|
||||
|
||||
def get_all(self):
|
||||
"""
|
||||
获取所有网站
|
||||
|
||||
Returns:
|
||||
QuerySet: 网站查询集
|
||||
"""
|
||||
logger.debug("获取所有网站")
|
||||
return self.repo.get_all()
|
||||
|
||||
def get_websites_by_target(self, target_id: int):
|
||||
return self.repo.get_by_target(target_id)
|
||||
|
||||
def count_websites_by_scan(self, scan_id: int) -> int:
|
||||
"""
|
||||
统计扫描下的网站数量
|
||||
|
||||
Args:
|
||||
scan_id: 扫描 ID
|
||||
|
||||
Returns:
|
||||
int: 网站数量
|
||||
"""
|
||||
logger.debug("统计扫描下网站数量 - Scan ID: %d", scan_id)
|
||||
return self.repo.count_by_scan(scan_id)
|
||||
|
||||
def iter_website_urls_by_target(self, target_id: int, chunk_size: int = 1000):
|
||||
"""流式获取目标下的所有站点 URL(内存优化,委托给 Repository 层)"""
|
||||
logger.debug(
|
||||
"流式获取目标下所有站点 URL - Target ID: %d, 批次大小: %d",
|
||||
target_id,
|
||||
chunk_size,
|
||||
)
|
||||
# 通过仓储层统一访问数据库,避免 Service 直接依赖 ORM
|
||||
return self.repo.get_urls_for_export(target_id=target_id, batch_size=chunk_size)
|
||||
|
||||
|
||||
__all__ = ['WebSiteService']
|
||||
@@ -1,562 +0,0 @@
|
||||
import logging
|
||||
from rest_framework import viewsets, status, filters
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.exceptions import NotFound, ValidationError as DRFValidationError
|
||||
from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
||||
from django.db import DatabaseError, IntegrityError, OperationalError
|
||||
from django.http import StreamingHttpResponse
|
||||
|
||||
from .serializers import (
|
||||
SubdomainListSerializer, WebSiteSerializer, DirectorySerializer,
|
||||
VulnerabilitySerializer, EndpointListSerializer, IPAddressAggregatedSerializer,
|
||||
SubdomainSnapshotSerializer, WebsiteSnapshotSerializer, DirectorySnapshotSerializer,
|
||||
EndpointSnapshotSerializer, VulnerabilitySnapshotSerializer
|
||||
)
|
||||
from .services import (
|
||||
SubdomainService, WebSiteService, DirectoryService,
|
||||
VulnerabilityService, AssetStatisticsService, EndpointService, HostPortMappingService
|
||||
)
|
||||
from .services.snapshot import (
|
||||
SubdomainSnapshotsService, WebsiteSnapshotsService, DirectorySnapshotsService,
|
||||
EndpointSnapshotsService, HostPortMappingSnapshotsService, VulnerabilitySnapshotsService
|
||||
)
|
||||
from apps.common.pagination import BasePagination
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AssetStatisticsViewSet(viewsets.ViewSet):
|
||||
"""
|
||||
资产统计 API
|
||||
|
||||
提供仪表盘所需的统计数据(预聚合,读取缓存表)
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = AssetStatisticsService()
|
||||
|
||||
def list(self, request):
|
||||
"""
|
||||
获取资产统计数据
|
||||
|
||||
GET /assets/statistics/
|
||||
|
||||
返回:
|
||||
- totalTargets: 目标总数
|
||||
- totalSubdomains: 子域名总数
|
||||
- totalIps: IP 总数
|
||||
- totalEndpoints: 端点总数
|
||||
- totalWebsites: 网站总数
|
||||
- totalVulns: 漏洞总数
|
||||
- totalAssets: 总资产数
|
||||
- runningScans: 运行中的扫描数
|
||||
- updatedAt: 统计更新时间
|
||||
"""
|
||||
try:
|
||||
stats = self.service.get_statistics()
|
||||
return Response({
|
||||
'totalTargets': stats['total_targets'],
|
||||
'totalSubdomains': stats['total_subdomains'],
|
||||
'totalIps': stats['total_ips'],
|
||||
'totalEndpoints': stats['total_endpoints'],
|
||||
'totalWebsites': stats['total_websites'],
|
||||
'totalVulns': stats['total_vulns'],
|
||||
'totalAssets': stats['total_assets'],
|
||||
'runningScans': stats['running_scans'],
|
||||
'updatedAt': stats['updated_at'],
|
||||
# 变化值
|
||||
'changeTargets': stats['change_targets'],
|
||||
'changeSubdomains': stats['change_subdomains'],
|
||||
'changeIps': stats['change_ips'],
|
||||
'changeEndpoints': stats['change_endpoints'],
|
||||
'changeWebsites': stats['change_websites'],
|
||||
'changeVulns': stats['change_vulns'],
|
||||
'changeAssets': stats['change_assets'],
|
||||
# 漏洞严重程度分布
|
||||
'vulnBySeverity': stats['vuln_by_severity'],
|
||||
})
|
||||
except (DatabaseError, OperationalError) as e:
|
||||
logger.exception("获取资产统计数据失败")
|
||||
return Response(
|
||||
{'error': '获取统计数据失败'},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='history')
|
||||
def history(self, request: Request):
|
||||
"""
|
||||
获取统计历史数据(用于折线图)
|
||||
|
||||
GET /assets/statistics/history/?days=7
|
||||
|
||||
Query Parameters:
|
||||
days: 获取最近多少天的数据,默认 7,最大 90
|
||||
|
||||
Returns:
|
||||
历史数据列表
|
||||
"""
|
||||
try:
|
||||
days_param = request.query_params.get('days', '7')
|
||||
try:
|
||||
days = int(days_param)
|
||||
except (ValueError, TypeError):
|
||||
days = 7
|
||||
days = min(max(days, 1), 90) # 限制在 1-90 天
|
||||
|
||||
history = self.service.get_statistics_history(days=days)
|
||||
return Response(history)
|
||||
except (DatabaseError, OperationalError) as e:
|
||||
logger.exception("获取统计历史数据失败")
|
||||
return Response(
|
||||
{'error': '获取历史数据失败'},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
|
||||
# 注意:IPAddress 模型已被重构为 HostPortMapping
|
||||
# IPAddressViewSet 已删除,需要根据新架构重新实现
|
||||
|
||||
|
||||
class SubdomainViewSet(viewsets.ModelViewSet):
|
||||
"""子域名管理 ViewSet
|
||||
|
||||
支持两种访问方式:
|
||||
1. 嵌套路由:GET /api/targets/{target_pk}/subdomains/
|
||||
2. 独立路由:GET /api/subdomains/(全局查询)
|
||||
"""
|
||||
|
||||
serializer_class = SubdomainListSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['name']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = SubdomainService()
|
||||
|
||||
def get_queryset(self):
|
||||
"""根据是否有 target_pk 参数决定查询范围"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if target_pk:
|
||||
return self.service.get_subdomains_by_target(target_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
"""导出子域名(纯文本,一行一个)"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if not target_pk:
|
||||
raise DRFValidationError('必须在目标下导出')
|
||||
|
||||
def line_iterator():
|
||||
for name in self.service.iter_subdomain_names_by_target(target_pk):
|
||||
yield f"{name}\n"
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
line_iterator(),
|
||||
content_type='text/plain; charset=utf-8',
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename="target-{target_pk}-subdomains.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class WebSiteViewSet(viewsets.ModelViewSet):
|
||||
"""站点管理 ViewSet
|
||||
|
||||
支持两种访问方式:
|
||||
1. 嵌套路由:GET /api/targets/{target_pk}/websites/
|
||||
2. 独立路由:GET /api/websites/(全局查询)
|
||||
"""
|
||||
|
||||
serializer_class = WebSiteSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['host']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = WebSiteService()
|
||||
|
||||
def get_queryset(self):
|
||||
"""根据是否有 target_pk 参数决定查询范围"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if target_pk:
|
||||
return self.service.get_websites_by_target(target_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
"""导出站点 URL(纯文本,一行一个)"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if not target_pk:
|
||||
raise DRFValidationError('必须在目标下导出')
|
||||
|
||||
def line_iterator():
|
||||
for url in self.service.iter_website_urls_by_target(target_pk):
|
||||
yield f"{url}\n"
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
line_iterator(),
|
||||
content_type='text/plain; charset=utf-8',
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename="target-{target_pk}-websites.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class DirectoryViewSet(viewsets.ModelViewSet):
|
||||
"""目录管理 ViewSet
|
||||
|
||||
支持两种访问方式:
|
||||
1. 嵌套路由:GET /api/targets/{target_pk}/directories/
|
||||
2. 独立路由:GET /api/directories/(全局查询)
|
||||
"""
|
||||
|
||||
serializer_class = DirectorySerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['url']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = DirectoryService()
|
||||
|
||||
def get_queryset(self):
|
||||
"""根据是否有 target_pk 参数决定查询范围"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if target_pk:
|
||||
return self.service.get_directories_by_target(target_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
"""导出目录 URL(纯文本,一行一个)"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if not target_pk:
|
||||
raise DRFValidationError('必须在目标下导出')
|
||||
|
||||
def line_iterator():
|
||||
for url in self.service.iter_directory_urls_by_target(target_pk):
|
||||
yield f"{url}\n"
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
line_iterator(),
|
||||
content_type='text/plain; charset=utf-8',
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename="target-{target_pk}-directories.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class EndpointViewSet(viewsets.ModelViewSet):
|
||||
"""端点管理 ViewSet
|
||||
|
||||
支持两种访问方式:
|
||||
1. 嵌套路由:GET /api/targets/{target_pk}/endpoints/
|
||||
2. 独立路由:GET /api/endpoints/(全局查询)
|
||||
"""
|
||||
|
||||
serializer_class = EndpointListSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['host']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = EndpointService()
|
||||
|
||||
def get_queryset(self):
|
||||
"""根据是否有 target_pk 参数决定查询范围"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if target_pk:
|
||||
return self.service.get_queryset_by_target(target_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
"""导出端点 URL(纯文本,一行一个)"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if not target_pk:
|
||||
raise DRFValidationError('必须在目标下导出')
|
||||
|
||||
def line_iterator():
|
||||
for url in self.service.iter_endpoint_urls_by_target(target_pk):
|
||||
yield f"{url}\n"
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
line_iterator(),
|
||||
content_type='text/plain; charset=utf-8',
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename="target-{target_pk}-endpoints.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class HostPortMappingViewSet(viewsets.ModelViewSet):
|
||||
"""主机端口映射管理 ViewSet(IP 地址聚合视图)
|
||||
|
||||
支持两种访问方式:
|
||||
1. 嵌套路由:GET /api/targets/{target_pk}/ip-addresses/
|
||||
2. 独立路由:GET /api/ip-addresses/(全局查询)
|
||||
|
||||
返回按 IP 聚合的数据,每个 IP 显示其关联的所有 hosts 和 ports
|
||||
|
||||
注意:由于返回的是聚合数据(字典列表),不支持 DRF SearchFilter
|
||||
"""
|
||||
|
||||
serializer_class = IPAddressAggregatedSerializer
|
||||
pagination_class = BasePagination
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = HostPortMappingService()
|
||||
|
||||
def get_queryset(self):
|
||||
"""根据是否有 target_pk 参数决定查询范围,返回按 IP 聚合的数据"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
search = self.request.query_params.get('search', None)
|
||||
if target_pk:
|
||||
return self.service.get_ip_aggregation_by_target(target_pk, search=search)
|
||||
return self.service.get_all_ip_aggregation(search=search)
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
"""导出 IP 地址(纯文本,一行一个)"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if not target_pk:
|
||||
raise DRFValidationError('必须在目标下导出')
|
||||
|
||||
def line_iterator():
|
||||
for ip in self.service.iter_ips_by_target(target_pk):
|
||||
yield f"{ip}\n"
|
||||
|
||||
response = StreamingHttpResponse(
|
||||
line_iterator(),
|
||||
content_type='text/plain; charset=utf-8',
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename="target-{target_pk}-ip-addresses.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class VulnerabilityViewSet(viewsets.ModelViewSet):
|
||||
"""漏洞资产管理 ViewSet(只读)
|
||||
|
||||
支持两种访问方式:
|
||||
1. 嵌套路由:GET /api/targets/{target_pk}/vulnerabilities/
|
||||
2. 独立路由:GET /api/vulnerabilities/(全局查询)
|
||||
"""
|
||||
|
||||
serializer_class = VulnerabilitySerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['vuln_type']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = VulnerabilityService()
|
||||
|
||||
def get_queryset(self):
|
||||
"""根据是否有 target_pk 参数决定查询范围"""
|
||||
target_pk = self.kwargs.get('target_pk')
|
||||
if target_pk:
|
||||
return self.service.get_queryset_by_target(target_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
|
||||
# ==================== 快照 ViewSet(Scan 嵌套路由) ====================
|
||||
|
||||
class SubdomainSnapshotViewSet(viewsets.ModelViewSet):
|
||||
"""子域名快照 ViewSet - 嵌套路由:GET /api/scans/{scan_pk}/subdomains/"""
|
||||
|
||||
serializer_class = SubdomainSnapshotSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['name']
|
||||
ordering_fields = ['name', 'discovered_at']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = SubdomainSnapshotsService()
|
||||
|
||||
def get_queryset(self):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if scan_pk:
|
||||
return self.service.get_by_scan(scan_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if not scan_pk:
|
||||
raise DRFValidationError('必须在扫描下导出')
|
||||
|
||||
def line_iterator():
|
||||
for name in self.service.iter_subdomain_names_by_scan(scan_pk):
|
||||
yield f"{name}\n"
|
||||
|
||||
response = StreamingHttpResponse(line_iterator(), content_type='text/plain; charset=utf-8')
|
||||
response['Content-Disposition'] = f'attachment; filename="scan-{scan_pk}-subdomains.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class WebsiteSnapshotViewSet(viewsets.ModelViewSet):
|
||||
"""网站快照 ViewSet - 嵌套路由:GET /api/scans/{scan_pk}/websites/"""
|
||||
|
||||
serializer_class = WebsiteSnapshotSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['host']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = WebsiteSnapshotsService()
|
||||
|
||||
def get_queryset(self):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if scan_pk:
|
||||
return self.service.get_by_scan(scan_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if not scan_pk:
|
||||
raise DRFValidationError('必须在扫描下导出')
|
||||
|
||||
def line_iterator():
|
||||
for url in self.service.iter_website_urls_by_scan(scan_pk):
|
||||
yield f"{url}\n"
|
||||
|
||||
response = StreamingHttpResponse(line_iterator(), content_type='text/plain; charset=utf-8')
|
||||
response['Content-Disposition'] = f'attachment; filename="scan-{scan_pk}-websites.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class DirectorySnapshotViewSet(viewsets.ModelViewSet):
|
||||
"""目录快照 ViewSet - 嵌套路由:GET /api/scans/{scan_pk}/directories/"""
|
||||
|
||||
serializer_class = DirectorySnapshotSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['url']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = DirectorySnapshotsService()
|
||||
|
||||
def get_queryset(self):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if scan_pk:
|
||||
return self.service.get_by_scan(scan_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if not scan_pk:
|
||||
raise DRFValidationError('必须在扫描下导出')
|
||||
|
||||
def line_iterator():
|
||||
for url in self.service.iter_directory_urls_by_scan(scan_pk):
|
||||
yield f"{url}\n"
|
||||
|
||||
response = StreamingHttpResponse(line_iterator(), content_type='text/plain; charset=utf-8')
|
||||
response['Content-Disposition'] = f'attachment; filename="scan-{scan_pk}-directories.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class EndpointSnapshotViewSet(viewsets.ModelViewSet):
|
||||
"""端点快照 ViewSet - 嵌套路由:GET /api/scans/{scan_pk}/endpoints/"""
|
||||
|
||||
serializer_class = EndpointSnapshotSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['host']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = EndpointSnapshotsService()
|
||||
|
||||
def get_queryset(self):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if scan_pk:
|
||||
return self.service.get_by_scan(scan_pk)
|
||||
return self.service.get_all()
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if not scan_pk:
|
||||
raise DRFValidationError('必须在扫描下导出')
|
||||
|
||||
def line_iterator():
|
||||
for url in self.service.iter_endpoint_urls_by_scan(scan_pk):
|
||||
yield f"{url}\n"
|
||||
|
||||
response = StreamingHttpResponse(line_iterator(), content_type='text/plain; charset=utf-8')
|
||||
response['Content-Disposition'] = f'attachment; filename="scan-{scan_pk}-endpoints.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class HostPortMappingSnapshotViewSet(viewsets.ModelViewSet):
|
||||
"""主机端口映射快照 ViewSet - 嵌套路由:GET /api/scans/{scan_pk}/ip-addresses/
|
||||
|
||||
注意:由于返回的是聚合数据(字典列表),不支持 DRF SearchFilter
|
||||
"""
|
||||
|
||||
serializer_class = IPAddressAggregatedSerializer
|
||||
pagination_class = BasePagination
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = HostPortMappingSnapshotsService()
|
||||
|
||||
def get_queryset(self):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
search = self.request.query_params.get('search', None)
|
||||
if scan_pk:
|
||||
return self.service.get_ip_aggregation_by_scan(scan_pk, search=search)
|
||||
return self.service.get_all_ip_aggregation(search=search)
|
||||
|
||||
@action(detail=False, methods=['get'], url_path='export')
|
||||
def export(self, request):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if not scan_pk:
|
||||
raise DRFValidationError('必须在扫描下导出')
|
||||
|
||||
def line_iterator():
|
||||
for ip in self.service.iter_ips_by_scan(scan_pk):
|
||||
yield f"{ip}\n"
|
||||
|
||||
response = StreamingHttpResponse(line_iterator(), content_type='text/plain; charset=utf-8')
|
||||
response['Content-Disposition'] = f'attachment; filename="scan-{scan_pk}-ip-addresses.txt"'
|
||||
return response
|
||||
|
||||
|
||||
class VulnerabilitySnapshotViewSet(viewsets.ModelViewSet):
|
||||
"""漏洞快照 ViewSet - 嵌套路由:GET /api/scans/{scan_pk}/vulnerabilities/"""
|
||||
|
||||
serializer_class = VulnerabilitySnapshotSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [filters.SearchFilter, filters.OrderingFilter]
|
||||
search_fields = ['vuln_type']
|
||||
ordering = ['-discovered_at']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = VulnerabilitySnapshotsService()
|
||||
|
||||
def get_queryset(self):
|
||||
scan_pk = self.kwargs.get('scan_pk')
|
||||
if scan_pk:
|
||||
return self.service.get_by_scan(scan_pk)
|
||||
return self.service.get_all()
|
||||
@@ -1,42 +0,0 @@
|
||||
"""
|
||||
Prefect Flow Django 环境初始化模块
|
||||
|
||||
在所有 Prefect Flow 文件开头导入此模块即可自动配置 Django 环境
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def setup_django_for_prefect():
|
||||
"""
|
||||
为 Prefect Flow 配置 Django 环境
|
||||
|
||||
此函数会:
|
||||
1. 添加项目根目录到 Python 路径
|
||||
2. 设置 DJANGO_SETTINGS_MODULE 环境变量
|
||||
3. 调用 django.setup() 初始化 Django
|
||||
|
||||
使用方式:
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
setup_django_for_prefect()
|
||||
"""
|
||||
# 获取项目根目录(backend 目录)
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
backend_dir = os.path.join(current_dir, '../..')
|
||||
backend_dir = os.path.abspath(backend_dir)
|
||||
|
||||
# 添加到 Python 路径
|
||||
if backend_dir not in sys.path:
|
||||
sys.path.insert(0, backend_dir)
|
||||
|
||||
# 配置 Django
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')
|
||||
|
||||
# 初始化 Django
|
||||
import django
|
||||
django.setup()
|
||||
|
||||
|
||||
# 自动执行初始化(导入即生效)
|
||||
setup_django_for_prefect()
|
||||
@@ -1,10 +0,0 @@
|
||||
"""
|
||||
通用服务模块
|
||||
|
||||
提供系统级别的公共服务,包括:
|
||||
- SystemLogService: 系统日志读取服务
|
||||
"""
|
||||
|
||||
from .system_log_service import SystemLogService
|
||||
|
||||
__all__ = ['SystemLogService']
|
||||
@@ -1,69 +0,0 @@
|
||||
"""
|
||||
系统日志服务模块
|
||||
|
||||
提供系统日志的读取功能,支持:
|
||||
- 从日志目录读取日志文件
|
||||
- 限制返回行数,防止内存溢出
|
||||
"""
|
||||
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SystemLogService:
|
||||
"""
|
||||
系统日志服务类
|
||||
|
||||
负责读取系统日志文件,支持从容器内路径或宿主机挂载路径读取日志。
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# 日志文件路径(容器内路径,通过 volume 挂载到宿主机 /opt/xingrin/logs)
|
||||
self.log_file = "/app/backend/logs/xingrin.log"
|
||||
self.default_lines = 200 # 默认返回行数
|
||||
self.max_lines = 10000 # 最大返回行数限制
|
||||
self.timeout_seconds = 3 # tail 命令超时时间
|
||||
|
||||
def get_logs_content(self, lines: int | None = None) -> str:
|
||||
"""
|
||||
获取系统日志内容
|
||||
|
||||
Args:
|
||||
lines: 返回的日志行数,默认 200 行,最大 10000 行
|
||||
|
||||
Returns:
|
||||
str: 日志内容,每行以换行符分隔,保持原始顺序
|
||||
"""
|
||||
# 参数校验和默认值处理
|
||||
if lines is None:
|
||||
lines = self.default_lines
|
||||
|
||||
lines = int(lines)
|
||||
if lines < 1:
|
||||
lines = 1
|
||||
if lines > self.max_lines:
|
||||
lines = self.max_lines
|
||||
|
||||
# 使用 tail 命令读取日志文件末尾内容
|
||||
cmd = ["tail", "-n", str(lines), self.log_file]
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=self.timeout_seconds,
|
||||
check=False,
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
logger.warning(
|
||||
"tail command failed: returncode=%s stderr=%s",
|
||||
result.returncode,
|
||||
(result.stderr or "").strip(),
|
||||
)
|
||||
|
||||
# 直接返回原始内容,保持文件中的顺序
|
||||
return result.stdout or ""
|
||||
@@ -1,21 +0,0 @@
|
||||
"""
|
||||
通用模块 URL 配置
|
||||
|
||||
路由说明:
|
||||
- /api/auth/* 认证相关接口(登录、登出、用户信息)
|
||||
- /api/system/* 系统管理接口(日志查看等)
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from .views import LoginView, LogoutView, MeView, ChangePasswordView, SystemLogsView
|
||||
|
||||
urlpatterns = [
|
||||
# 认证相关
|
||||
path('auth/login/', LoginView.as_view(), name='auth-login'),
|
||||
path('auth/logout/', LogoutView.as_view(), name='auth-logout'),
|
||||
path('auth/me/', MeView.as_view(), name='auth-me'),
|
||||
path('auth/change-password/', ChangePasswordView.as_view(), name='auth-change-password'),
|
||||
|
||||
# 系统管理
|
||||
path('system/logs/', SystemLogsView.as_view(), name='system-logs'),
|
||||
]
|
||||
@@ -1,142 +0,0 @@
|
||||
"""域名、IP、端口和目标验证工具函数"""
|
||||
import ipaddress
|
||||
import logging
|
||||
import validators
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_domain(domain: str) -> None:
|
||||
"""
|
||||
验证域名格式(使用 validators 库)
|
||||
|
||||
Args:
|
||||
domain: 域名字符串(应该已经规范化)
|
||||
|
||||
Raises:
|
||||
ValueError: 域名格式无效
|
||||
"""
|
||||
if not domain:
|
||||
raise ValueError("域名不能为空")
|
||||
|
||||
# 使用 validators 库验证域名格式
|
||||
# 支持国际化域名(IDN)和各种边界情况
|
||||
if not validators.domain(domain):
|
||||
raise ValueError(f"域名格式无效: {domain}")
|
||||
|
||||
|
||||
def validate_ip(ip: str) -> None:
|
||||
"""
|
||||
验证 IP 地址格式(支持 IPv4 和 IPv6)
|
||||
|
||||
Args:
|
||||
ip: IP 地址字符串(应该已经规范化)
|
||||
|
||||
Raises:
|
||||
ValueError: IP 地址格式无效
|
||||
"""
|
||||
if not ip:
|
||||
raise ValueError("IP 地址不能为空")
|
||||
|
||||
try:
|
||||
ipaddress.ip_address(ip)
|
||||
except ValueError:
|
||||
raise ValueError(f"IP 地址格式无效: {ip}")
|
||||
|
||||
|
||||
def validate_cidr(cidr: str) -> None:
|
||||
"""
|
||||
验证 CIDR 格式(支持 IPv4 和 IPv6)
|
||||
|
||||
Args:
|
||||
cidr: CIDR 字符串(应该已经规范化)
|
||||
|
||||
Raises:
|
||||
ValueError: CIDR 格式无效
|
||||
"""
|
||||
if not cidr:
|
||||
raise ValueError("CIDR 不能为空")
|
||||
|
||||
try:
|
||||
ipaddress.ip_network(cidr, strict=False)
|
||||
except ValueError:
|
||||
raise ValueError(f"CIDR 格式无效: {cidr}")
|
||||
|
||||
|
||||
def detect_target_type(name: str) -> str:
|
||||
"""
|
||||
检测目标类型(不做规范化,只验证)
|
||||
|
||||
Args:
|
||||
name: 目标名称(应该已经规范化)
|
||||
|
||||
Returns:
|
||||
str: 目标类型 ('domain', 'ip', 'cidr') - 使用 Target.TargetType 枚举值
|
||||
|
||||
Raises:
|
||||
ValueError: 如果无法识别目标类型
|
||||
"""
|
||||
# 在函数内部导入模型,避免 AppRegistryNotReady 错误
|
||||
from apps.targets.models import Target
|
||||
|
||||
if not name:
|
||||
raise ValueError("目标名称不能为空")
|
||||
|
||||
# 检查是否是 CIDR 格式(包含 /)
|
||||
if '/' in name:
|
||||
validate_cidr(name)
|
||||
return Target.TargetType.CIDR
|
||||
|
||||
# 检查是否是 IP 地址
|
||||
try:
|
||||
validate_ip(name)
|
||||
return Target.TargetType.IP
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# 检查是否是合法域名
|
||||
try:
|
||||
validate_domain(name)
|
||||
return Target.TargetType.DOMAIN
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# 无法识别的格式
|
||||
raise ValueError(f"无法识别的目标格式: {name},必须是域名、IP地址或CIDR范围")
|
||||
|
||||
|
||||
def validate_port(port: any) -> tuple[bool, int | None]:
|
||||
"""
|
||||
验证并转换端口号
|
||||
|
||||
Args:
|
||||
port: 待验证的端口号(可能是字符串、整数或其他类型)
|
||||
|
||||
Returns:
|
||||
tuple: (is_valid, port_number)
|
||||
- is_valid: 端口是否有效
|
||||
- port_number: 有效时为整数端口号,无效时为 None
|
||||
|
||||
验证规则:
|
||||
1. 必须能转换为整数
|
||||
2. 必须在 1-65535 范围内
|
||||
|
||||
示例:
|
||||
>>> is_valid, port_num = validate_port(8080)
|
||||
>>> is_valid, port_num
|
||||
(True, 8080)
|
||||
|
||||
>>> is_valid, port_num = validate_port("invalid")
|
||||
>>> is_valid, port_num
|
||||
(False, None)
|
||||
"""
|
||||
try:
|
||||
port_num = int(port)
|
||||
if 1 <= port_num <= 65535:
|
||||
return True, port_num
|
||||
else:
|
||||
logger.warning("端口号超出有效范围 (1-65535): %d", port_num)
|
||||
return False, None
|
||||
except (ValueError, TypeError):
|
||||
logger.warning("端口号格式错误,无法转换为整数: %s", port)
|
||||
return False, None
|
||||
@@ -1,12 +0,0 @@
|
||||
"""
|
||||
通用模块视图导出
|
||||
|
||||
包含:
|
||||
- 认证相关视图:登录、登出、用户信息、修改密码
|
||||
- 系统日志视图:实时日志查看
|
||||
"""
|
||||
|
||||
from .auth_views import LoginView, LogoutView, MeView, ChangePasswordView
|
||||
from .system_log_views import SystemLogsView
|
||||
|
||||
__all__ = ['LoginView', 'LogoutView', 'MeView', 'ChangePasswordView', 'SystemLogsView']
|
||||
@@ -1,69 +0,0 @@
|
||||
"""
|
||||
系统日志视图模块
|
||||
|
||||
提供系统日志的 REST API 接口,供前端实时查看系统运行日志。
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.common.services.system_log_service import SystemLogService
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class SystemLogsView(APIView):
|
||||
"""
|
||||
系统日志 API 视图
|
||||
|
||||
GET /api/system/logs/
|
||||
获取系统日志内容
|
||||
|
||||
Query Parameters:
|
||||
lines (int, optional): 返回的日志行数,默认 200,最大 10000
|
||||
|
||||
Response:
|
||||
{
|
||||
"content": "日志内容字符串..."
|
||||
}
|
||||
|
||||
Note:
|
||||
- 当前为开发阶段,暂时允许匿名访问
|
||||
- 生产环境应添加管理员权限验证
|
||||
"""
|
||||
|
||||
# TODO: 生产环境应改为 IsAdminUser 权限
|
||||
authentication_classes = []
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = SystemLogService()
|
||||
|
||||
def get(self, request):
|
||||
"""
|
||||
获取系统日志
|
||||
|
||||
支持通过 lines 参数控制返回行数,用于前端分页或实时刷新场景。
|
||||
"""
|
||||
try:
|
||||
# 解析 lines 参数
|
||||
lines_raw = request.query_params.get("lines")
|
||||
lines = int(lines_raw) if lines_raw is not None else None
|
||||
|
||||
# 调用服务获取日志内容
|
||||
content = self.service.get_logs_content(lines=lines)
|
||||
return Response({"content": content})
|
||||
except ValueError:
|
||||
return Response({"error": "lines 参数必须是整数"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception:
|
||||
logger.exception("获取系统日志失败")
|
||||
return Response({"error": "获取系统日志失败"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
@@ -1,22 +0,0 @@
|
||||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from .views import (
|
||||
ScanEngineViewSet,
|
||||
WorkerNodeViewSet,
|
||||
WordlistViewSet,
|
||||
NucleiTemplateRepoViewSet,
|
||||
)
|
||||
|
||||
|
||||
# 创建路由器
|
||||
router = DefaultRouter()
|
||||
router.register(r"engines", ScanEngineViewSet, basename="engine")
|
||||
router.register(r"workers", WorkerNodeViewSet, basename="worker")
|
||||
router.register(r"wordlists", WordlistViewSet, basename="wordlist")
|
||||
router.register(r"nuclei/repos", NucleiTemplateRepoViewSet, basename="nuclei-repos")
|
||||
|
||||
urlpatterns = [
|
||||
path("", include(router.urls)),
|
||||
]
|
||||
|
||||
@@ -1,482 +0,0 @@
|
||||
"""
|
||||
目录扫描 Flow
|
||||
|
||||
负责编排目录扫描的完整流程
|
||||
|
||||
架构:
|
||||
- Flow 负责编排多个原子 Task
|
||||
- 支持串行执行扫描工具(流式处理)
|
||||
- 每个 Task 可独立重试
|
||||
- 配置由 YAML 解析
|
||||
"""
|
||||
|
||||
# Django 环境初始化(导入即生效)
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
from prefect import flow
|
||||
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from apps.scan.tasks.directory_scan import (
|
||||
export_sites_task,
|
||||
run_and_stream_save_directories_task
|
||||
)
|
||||
from apps.scan.handlers.scan_flow_handlers import (
|
||||
on_scan_flow_running,
|
||||
on_scan_flow_completed,
|
||||
on_scan_flow_failed,
|
||||
)
|
||||
from apps.scan.utils import config_parser, build_scan_command, ensure_wordlist_local
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculate_directory_scan_timeout(
|
||||
tool_config: dict,
|
||||
base_per_word: float = 1.0,
|
||||
min_timeout: int = 60,
|
||||
max_timeout: int = 7200
|
||||
) -> int:
|
||||
"""
|
||||
根据字典行数计算目录扫描超时时间
|
||||
|
||||
计算公式:超时时间 = 字典行数 × 每个单词基础时间
|
||||
超时范围:60秒 ~ 2小时(7200秒)
|
||||
|
||||
Args:
|
||||
tool_config: 工具配置字典,包含 wordlist 路径
|
||||
base_per_word: 每个单词的基础时间(秒),默认 1.0秒
|
||||
min_timeout: 最小超时时间(秒),默认 60秒
|
||||
max_timeout: 最大超时时间(秒),默认 7200秒(2小时)
|
||||
|
||||
Returns:
|
||||
int: 计算出的超时时间(秒),范围:60 ~ 7200
|
||||
|
||||
Example:
|
||||
# 1000行字典 × 1.0秒 = 1000秒 → 限制为7200秒中的 1000秒
|
||||
# 10000行字典 × 1.0秒 = 10000秒 → 限制为7200秒(最大值)
|
||||
timeout = calculate_directory_scan_timeout(
|
||||
tool_config={'wordlist': '/path/to/wordlist.txt'}
|
||||
)
|
||||
"""
|
||||
try:
|
||||
# 从 tool_config 中获取 wordlist 路径
|
||||
wordlist_path = tool_config.get('wordlist')
|
||||
if not wordlist_path:
|
||||
logger.warning("工具配置中未指定 wordlist,使用默认超时: %d秒", min_timeout)
|
||||
return min_timeout
|
||||
|
||||
# 展开用户目录(~)
|
||||
wordlist_path = os.path.expanduser(wordlist_path)
|
||||
|
||||
# 检查文件是否存在
|
||||
if not os.path.exists(wordlist_path):
|
||||
logger.warning("字典文件不存在: %s,使用默认超时: %d秒", wordlist_path, min_timeout)
|
||||
return min_timeout
|
||||
|
||||
# 使用 wc -l 快速统计字典行数
|
||||
result = subprocess.run(
|
||||
['wc', '-l', wordlist_path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
# wc -l 输出格式:行数 + 空格 + 文件名
|
||||
line_count = int(result.stdout.strip().split()[0])
|
||||
|
||||
# 计算超时时间
|
||||
timeout = int(line_count * base_per_word)
|
||||
|
||||
# 设置合理的下限(不再设置上限)
|
||||
timeout = max(min_timeout, timeout)
|
||||
|
||||
logger.info(
|
||||
"目录扫描超时计算 - 字典: %s, 行数: %d, 基础时间: %.3f秒/词, 计算超时: %d秒",
|
||||
wordlist_path, line_count, base_per_word, timeout
|
||||
)
|
||||
|
||||
return timeout
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.error("统计字典行数失败: %s", e)
|
||||
# 失败时返回默认超时
|
||||
return min_timeout
|
||||
except (ValueError, IndexError) as e:
|
||||
logger.error("解析字典行数失败: %s", e)
|
||||
return min_timeout
|
||||
except Exception as e:
|
||||
logger.error("计算超时时间异常: %s", e)
|
||||
return min_timeout
|
||||
|
||||
|
||||
def _setup_directory_scan_directory(scan_workspace_dir: str) -> Path:
|
||||
"""
|
||||
创建并验证目录扫描工作目录
|
||||
|
||||
Args:
|
||||
scan_workspace_dir: 扫描工作空间目录
|
||||
|
||||
Returns:
|
||||
Path: 目录扫描目录路径
|
||||
|
||||
Raises:
|
||||
RuntimeError: 目录创建或验证失败
|
||||
"""
|
||||
directory_scan_dir = Path(scan_workspace_dir) / 'directory_scan'
|
||||
directory_scan_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not directory_scan_dir.is_dir():
|
||||
raise RuntimeError(f"目录扫描目录创建失败: {directory_scan_dir}")
|
||||
if not os.access(directory_scan_dir, os.W_OK):
|
||||
raise RuntimeError(f"目录扫描目录不可写: {directory_scan_dir}")
|
||||
|
||||
return directory_scan_dir
|
||||
|
||||
|
||||
def _export_site_urls(target_id: int, directory_scan_dir: Path) -> tuple[str, int]:
|
||||
"""
|
||||
导出目标下的所有站点 URL 到文件
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
directory_scan_dir: 目录扫描目录
|
||||
|
||||
Returns:
|
||||
tuple: (sites_file, site_count)
|
||||
|
||||
Raises:
|
||||
ValueError: 站点数量为 0
|
||||
"""
|
||||
logger.info("Step 1: 导出目标的所有站点 URL")
|
||||
|
||||
sites_file = str(directory_scan_dir / 'sites.txt')
|
||||
export_result = export_sites_task(
|
||||
target_id=target_id,
|
||||
output_file=sites_file,
|
||||
batch_size=1000 # 每次读取 1000 条,优化内存占用
|
||||
)
|
||||
|
||||
site_count = export_result['total_count']
|
||||
|
||||
logger.info(
|
||||
"✓ 站点 URL 导出完成 - 文件: %s, 数量: %d",
|
||||
export_result['output_file'],
|
||||
site_count
|
||||
)
|
||||
|
||||
if site_count == 0:
|
||||
logger.warning("目标下没有站点,无法执行目录扫描")
|
||||
# 不抛出异常,由上层决定如何处理
|
||||
# raise ValueError("目标下没有站点,无法执行目录扫描")
|
||||
|
||||
return export_result['output_file'], site_count
|
||||
|
||||
|
||||
def _run_scans_sequentially(
|
||||
enabled_tools: dict,
|
||||
sites_file: str,
|
||||
directory_scan_dir: Path,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
site_count: int,
|
||||
target_name: str
|
||||
) -> tuple[int, int, list]:
|
||||
"""
|
||||
串行执行目录扫描任务(支持多工具)
|
||||
|
||||
Args:
|
||||
enabled_tools: 启用的工具配置字典
|
||||
sites_file: 站点文件路径
|
||||
directory_scan_dir: 目录扫描目录
|
||||
scan_id: 扫描任务 ID
|
||||
target_id: 目标 ID
|
||||
site_count: 站点数量
|
||||
target_name: 目标名称(用于错误日志)
|
||||
|
||||
Returns:
|
||||
tuple: (total_directories, processed_sites, failed_sites)
|
||||
"""
|
||||
# 读取站点列表
|
||||
sites = []
|
||||
with open(sites_file, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
site_url = line.strip()
|
||||
if site_url:
|
||||
sites.append(site_url)
|
||||
|
||||
logger.info("准备扫描 %d 个站点,使用工具: %s", len(sites), ', '.join(enabled_tools.keys()))
|
||||
|
||||
total_directories = 0
|
||||
processed_sites_set = set() # 使用 set 避免重复计数
|
||||
failed_sites = []
|
||||
|
||||
# 遍历每个工具
|
||||
for tool_name, tool_config in enabled_tools.items():
|
||||
logger.info("="*60)
|
||||
logger.info("使用工具: %s", tool_name)
|
||||
logger.info("="*60)
|
||||
|
||||
# 如果配置了 wordlist_name,则先确保本地存在对应的字典文件(含 hash 校验)
|
||||
wordlist_name = tool_config.get('wordlist_name')
|
||||
if wordlist_name:
|
||||
try:
|
||||
local_wordlist_path = ensure_wordlist_local(wordlist_name)
|
||||
tool_config['wordlist'] = local_wordlist_path
|
||||
except Exception as exc:
|
||||
logger.error("为工具 %s 准备字典失败: %s", tool_name, exc)
|
||||
# 当前工具无法执行,将所有站点视为失败,继续下一个工具
|
||||
failed_sites.extend(sites)
|
||||
continue
|
||||
|
||||
# 逐个站点执行扫描
|
||||
for idx, site_url in enumerate(sites, 1):
|
||||
logger.info(
|
||||
"[%d/%d] 开始扫描站点: %s (工具: %s)",
|
||||
idx, len(sites), site_url, tool_name
|
||||
)
|
||||
|
||||
# 使用统一的命令构建器
|
||||
try:
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type='directory_scan',
|
||||
command_params={
|
||||
'url': site_url
|
||||
},
|
||||
tool_config=tool_config
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"✗ [%d/%d] 构建 %s 命令失败: %s - 站点: %s",
|
||||
idx, len(sites), tool_name, e, site_url
|
||||
)
|
||||
failed_sites.append(site_url)
|
||||
continue
|
||||
|
||||
# 单个站点超时:从配置中获取(支持 'auto' 动态计算)
|
||||
# ffuf 逐个站点扫描,timeout 就是单个站点的超时时间
|
||||
site_timeout = tool_config.get('timeout', 300)
|
||||
if site_timeout == 'auto':
|
||||
# 动态计算超时时间(基于字典行数)
|
||||
site_timeout = calculate_directory_scan_timeout(tool_config)
|
||||
logger.info(f"✓ 工具 {tool_name} 动态计算 timeout: {site_timeout}秒")
|
||||
|
||||
# 生成日志文件路径
|
||||
from datetime import datetime
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
log_file = directory_scan_dir / f"{tool_name}_{timestamp}_{idx}.log"
|
||||
|
||||
try:
|
||||
# 直接调用 task(串行执行)
|
||||
result = run_and_stream_save_directories_task(
|
||||
cmd=command,
|
||||
tool_name=tool_name, # 新增:工具名称
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
site_url=site_url,
|
||||
cwd=str(directory_scan_dir),
|
||||
shell=True,
|
||||
batch_size=1000,
|
||||
timeout=site_timeout,
|
||||
log_file=str(log_file) # 新增:日志文件路径
|
||||
)
|
||||
|
||||
total_directories += result.get('created_directories', 0)
|
||||
processed_sites_set.add(site_url) # 使用 set 记录成功的站点
|
||||
|
||||
logger.info(
|
||||
"✓ [%d/%d] 站点扫描完成: %s - 发现 %d 个目录",
|
||||
idx, len(sites), site_url,
|
||||
result.get('created_directories', 0)
|
||||
)
|
||||
|
||||
except subprocess.TimeoutExpired as exc:
|
||||
# 超时异常单独处理
|
||||
failed_sites.append(site_url)
|
||||
logger.warning(
|
||||
"⚠️ [%d/%d] 站点扫描超时: %s - 超时配置: %d秒\n"
|
||||
"注意:超时前已解析的目录数据已保存到数据库,但扫描未完全完成。",
|
||||
idx, len(sites), site_url, site_timeout
|
||||
)
|
||||
except Exception as exc:
|
||||
# 其他异常
|
||||
failed_sites.append(site_url)
|
||||
logger.error(
|
||||
"✗ [%d/%d] 站点扫描失败: %s - 错误: %s",
|
||||
idx, len(sites), site_url, exc
|
||||
)
|
||||
|
||||
# 每 10 个站点输出进度
|
||||
if idx % 10 == 0:
|
||||
logger.info(
|
||||
"进度: %d/%d (%.1f%%) - 已发现 %d 个目录",
|
||||
idx, len(sites), idx/len(sites)*100, total_directories
|
||||
)
|
||||
|
||||
# 计算成功和失败的站点数
|
||||
processed_count = len(processed_sites_set)
|
||||
|
||||
if failed_sites:
|
||||
logger.warning(
|
||||
"部分站点扫描失败: %d/%d",
|
||||
len(failed_sites), len(sites)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"✓ 串行目录扫描执行完成 - 成功: %d/%d, 失败: %d, 总目录数: %d",
|
||||
processed_count, len(sites), len(failed_sites), total_directories
|
||||
)
|
||||
|
||||
return total_directories, processed_count, failed_sites
|
||||
|
||||
|
||||
@flow(
|
||||
name="directory_scan",
|
||||
log_prints=True,
|
||||
on_running=[on_scan_flow_running],
|
||||
on_completion=[on_scan_flow_completed],
|
||||
on_failure=[on_scan_flow_failed],
|
||||
)
|
||||
def directory_scan_flow(
|
||||
scan_id: int,
|
||||
target_name: str,
|
||||
target_id: int,
|
||||
scan_workspace_dir: str,
|
||||
enabled_tools: dict
|
||||
) -> dict:
|
||||
"""
|
||||
目录扫描 Flow
|
||||
|
||||
主要功能:
|
||||
1. 从 target 获取所有站点的 URL
|
||||
2. 对每个站点 URL 执行目录扫描(支持 ffuf 等工具)
|
||||
3. 流式保存扫描结果到数据库 Directory 表
|
||||
|
||||
工作流程:
|
||||
Step 0: 创建工作目录
|
||||
Step 1: 导出站点 URL 列表到文件(供扫描工具使用)
|
||||
Step 2: 验证工具配置
|
||||
Step 3: 串行执行扫描工具并实时保存结果
|
||||
|
||||
ffuf 输出字段:
|
||||
- url: 发现的目录/文件 URL
|
||||
- length: 响应内容长度
|
||||
- status: HTTP 状态码
|
||||
- words: 响应内容单词数
|
||||
- lines: 响应内容行数
|
||||
- content_type: 内容类型
|
||||
- duration: 请求耗时
|
||||
|
||||
Args:
|
||||
scan_id: 扫描任务 ID
|
||||
target_name: 目标名称
|
||||
target_id: 目标 ID
|
||||
scan_workspace_dir: 扫描工作空间目录
|
||||
enabled_tools: 启用的工具配置字典
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'success': bool,
|
||||
'scan_id': int,
|
||||
'target': str,
|
||||
'scan_workspace_dir': str,
|
||||
'sites_file': str,
|
||||
'site_count': int,
|
||||
'total_directories': int, # 发现的总目录数
|
||||
'processed_sites': int, # 成功处理的站点数
|
||||
'failed_sites_count': int, # 失败的站点数
|
||||
'executed_tasks': list
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: 参数错误
|
||||
RuntimeError: 执行失败
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"="*60 + "\n" +
|
||||
"开始目录扫描\n" +
|
||||
f" Scan ID: {scan_id}\n" +
|
||||
f" Target: {target_name}\n" +
|
||||
f" Workspace: {scan_workspace_dir}\n" +
|
||||
"="*60
|
||||
)
|
||||
|
||||
# 参数验证
|
||||
if scan_id is None:
|
||||
raise ValueError("scan_id 不能为空")
|
||||
if not target_name:
|
||||
raise ValueError("target_name 不能为空")
|
||||
if target_id is None:
|
||||
raise ValueError("target_id 不能为空")
|
||||
if not scan_workspace_dir:
|
||||
raise ValueError("scan_workspace_dir 不能为空")
|
||||
if not enabled_tools:
|
||||
raise ValueError("enabled_tools 不能为空")
|
||||
|
||||
# Step 0: 创建工作目录
|
||||
directory_scan_dir = _setup_directory_scan_directory(scan_workspace_dir)
|
||||
|
||||
# Step 1: 导出站点 URL
|
||||
sites_file, site_count = _export_site_urls(target_id, directory_scan_dir)
|
||||
|
||||
if site_count == 0:
|
||||
logger.warning("目标下没有站点,跳过目录扫描")
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'sites_file': sites_file,
|
||||
'site_count': 0,
|
||||
'total_directories': 0,
|
||||
'processed_sites': 0,
|
||||
'failed_sites_count': 0,
|
||||
'executed_tasks': ['export_sites']
|
||||
}
|
||||
|
||||
# Step 2: 工具配置信息
|
||||
logger.info("Step 2: 工具配置信息")
|
||||
logger.info(
|
||||
"✓ 启用工具: %s",
|
||||
', '.join(enabled_tools.keys())
|
||||
)
|
||||
|
||||
# Step 3: 串行执行扫描工具并实时保存结果
|
||||
logger.info("Step 3: 串行执行扫描工具并实时保存结果")
|
||||
total_directories, processed_sites, failed_sites = _run_scans_sequentially(
|
||||
enabled_tools=enabled_tools,
|
||||
sites_file=sites_file,
|
||||
directory_scan_dir=directory_scan_dir,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
site_count=site_count,
|
||||
target_name=target_name
|
||||
)
|
||||
|
||||
# 检查是否所有站点都失败
|
||||
if processed_sites == 0 and site_count > 0:
|
||||
logger.warning("所有站点扫描均失败 - 总站点数: %d, 失败数: %d", site_count, len(failed_sites))
|
||||
# 不抛出异常,让扫描继续
|
||||
|
||||
logger.info("="*60 + "\n✓ 目录扫描完成\n" + "="*60)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'sites_file': sites_file,
|
||||
'site_count': site_count,
|
||||
'total_directories': total_directories,
|
||||
'processed_sites': processed_sites,
|
||||
'failed_sites_count': len(failed_sites),
|
||||
'executed_tasks': ['export_sites', 'run_and_stream_save_directories']
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("目录扫描失败: %s", e)
|
||||
raise
|
||||
@@ -1,279 +0,0 @@
|
||||
"""
|
||||
扫描初始化 Flow
|
||||
|
||||
负责编排扫描任务的初始化流程
|
||||
|
||||
职责:
|
||||
- 使用 FlowOrchestrator 解析 YAML 配置
|
||||
- 在 Prefect Flow 中执行子 Flow(Subflow)
|
||||
- 按照 YAML 顺序编排工作流
|
||||
- 不包含具体业务逻辑(由 Tasks 和 FlowOrchestrator 实现)
|
||||
|
||||
架构:
|
||||
- Flow: Prefect 编排层(本文件)
|
||||
- FlowOrchestrator: 配置解析和执行计划(apps/scan/services/)
|
||||
- Tasks: 执行层(apps/scan/tasks/)
|
||||
- Handlers: 状态管理(apps/scan/handlers/)
|
||||
"""
|
||||
|
||||
# Django 环境初始化(导入即生效)
|
||||
# 注意:动态扫描容器应使用 run_initiate_scan.py 启动,以便在导入前设置环境变量
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
from prefect import flow, task
|
||||
from pathlib import Path
|
||||
import logging
|
||||
|
||||
from apps.scan.handlers import (
|
||||
on_initiate_scan_flow_running,
|
||||
on_initiate_scan_flow_completed,
|
||||
on_initiate_scan_flow_failed,
|
||||
)
|
||||
from prefect.futures import wait
|
||||
from apps.scan.tasks.workspace_tasks import create_scan_workspace_task
|
||||
from apps.scan.orchestrators import FlowOrchestrator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(name="run_subflow")
|
||||
def _run_subflow_task(scan_type: str, flow_func, flow_kwargs: dict):
|
||||
"""包装子 Flow 的 Task,用于在并行阶段并发执行子 Flow。"""
|
||||
logger.info("开始执行子 Flow: %s", scan_type)
|
||||
return flow_func(**flow_kwargs)
|
||||
|
||||
|
||||
@flow(
|
||||
name='initiate_scan',
|
||||
description='扫描任务初始化流程',
|
||||
log_prints=True,
|
||||
on_running=[on_initiate_scan_flow_running],
|
||||
on_completion=[on_initiate_scan_flow_completed],
|
||||
on_failure=[on_initiate_scan_flow_failed],
|
||||
)
|
||||
def initiate_scan_flow(
|
||||
scan_id: int,
|
||||
target_name: str,
|
||||
target_id: int,
|
||||
scan_workspace_dir: str,
|
||||
engine_name: str,
|
||||
scheduled_scan_name: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
初始化扫描任务(动态工作流编排)
|
||||
|
||||
根据 YAML 配置动态编排工作流:
|
||||
- 从数据库获取 engine_config (YAML)
|
||||
- 检测启用的扫描类型
|
||||
- 按照定义的阶段执行:
|
||||
Stage 1: Discovery (顺序执行)
|
||||
- subdomain_discovery
|
||||
- port_scan
|
||||
- site_scan
|
||||
Stage 2: Analysis (并行执行)
|
||||
- url_fetch
|
||||
- directory_scan
|
||||
|
||||
Args:
|
||||
scan_id: 扫描任务 ID
|
||||
target_name: 目标名称
|
||||
target_id: 目标 ID
|
||||
scan_workspace_dir: Scan 工作空间目录路径
|
||||
engine_name: 引擎名称(用于显示)
|
||||
scheduled_scan_name: 定时扫描任务名称(可选,用于通知显示)
|
||||
|
||||
Returns:
|
||||
dict: 执行结果摘要
|
||||
|
||||
Raises:
|
||||
ValueError: 参数验证失败或配置无效
|
||||
RuntimeError: 执行失败
|
||||
"""
|
||||
try:
|
||||
# ==================== 参数验证 ====================
|
||||
if not scan_id:
|
||||
raise ValueError("scan_id is required")
|
||||
if not scan_workspace_dir:
|
||||
raise ValueError("scan_workspace_dir is required")
|
||||
if not engine_name:
|
||||
raise ValueError("engine_name is required")
|
||||
|
||||
|
||||
logger.info(
|
||||
"="*60 + "\n" +
|
||||
"开始初始化扫描任务\n" +
|
||||
f" Scan ID: {scan_id}\n" +
|
||||
f" Target: {target_name}\n" +
|
||||
f" Engine: {engine_name}\n" +
|
||||
f" Workspace: {scan_workspace_dir}\n" +
|
||||
"="*60
|
||||
)
|
||||
|
||||
# ==================== Task 1: 创建 Scan 工作空间 ====================
|
||||
scan_workspace_path = create_scan_workspace_task(scan_workspace_dir)
|
||||
|
||||
# ==================== Task 2: 获取引擎配置 ====================
|
||||
from apps.scan.models import Scan
|
||||
scan = Scan.objects.select_related('engine').get(id=scan_id)
|
||||
engine_config = scan.engine.configuration
|
||||
|
||||
# ==================== Task 3: 解析配置,生成执行计划 ====================
|
||||
orchestrator = FlowOrchestrator(engine_config)
|
||||
|
||||
# FlowOrchestrator 已经解析了所有工具配置
|
||||
enabled_tools_by_type = orchestrator.enabled_tools_by_type
|
||||
|
||||
logger.info(
|
||||
f"执行计划生成成功:\n"
|
||||
f" 扫描类型: {' → '.join(orchestrator.scan_types)}\n"
|
||||
f" 总共 {len(orchestrator.scan_types)} 个 Flow"
|
||||
)
|
||||
|
||||
# ==================== 初始化阶段进度 ====================
|
||||
# 在解析完配置后立即初始化,此时已有完整的 scan_types 列表
|
||||
from apps.scan.services import ScanService
|
||||
scan_service = ScanService()
|
||||
scan_service.init_stage_progress(scan_id, orchestrator.scan_types)
|
||||
logger.info(f"✓ 初始化阶段进度 - Stages: {orchestrator.scan_types}")
|
||||
|
||||
# ==================== 更新 Target 最后扫描时间 ====================
|
||||
# 在开始扫描时更新,表示"最后一次扫描开始时间"
|
||||
from apps.targets.services import TargetService
|
||||
target_service = TargetService()
|
||||
target_service.update_last_scanned_at(target_id)
|
||||
logger.info(f"✓ 更新 Target 最后扫描时间 - Target ID: {target_id}")
|
||||
|
||||
# ==================== Task 3: 执行 Flow(动态阶段执行)====================
|
||||
# 注意:各阶段状态更新由 scan_flow_handlers.py 自动处理(running/completed/failed)
|
||||
executed_flows = []
|
||||
results = {}
|
||||
|
||||
# 通用执行参数
|
||||
flow_kwargs = {
|
||||
'scan_id': scan_id,
|
||||
'target_name': target_name,
|
||||
'target_id': target_id,
|
||||
'scan_workspace_dir': str(scan_workspace_path)
|
||||
}
|
||||
|
||||
def record_flow_result(scan_type, result=None, error=None):
|
||||
"""
|
||||
统一的结果记录函数
|
||||
|
||||
Args:
|
||||
scan_type: 扫描类型名称
|
||||
result: 执行结果(成功时)
|
||||
error: 异常对象(失败时)
|
||||
"""
|
||||
if error:
|
||||
# 失败处理:记录错误但不抛出异常,让扫描继续执行后续阶段
|
||||
error_msg = f"{scan_type} 执行失败: {str(error)}"
|
||||
logger.warning(error_msg)
|
||||
executed_flows.append(f"{scan_type} (失败)")
|
||||
results[scan_type] = {'success': False, 'error': str(error)}
|
||||
# 不再抛出异常,让扫描继续
|
||||
else:
|
||||
# 成功处理
|
||||
executed_flows.append(scan_type)
|
||||
results[scan_type] = result
|
||||
logger.info(f"✓ {scan_type} 执行成功")
|
||||
|
||||
def get_valid_flows(flow_names):
|
||||
"""
|
||||
获取有效的 Flow 函数列表,并为每个 Flow 准备专属参数
|
||||
|
||||
Args:
|
||||
flow_names: 扫描类型名称列表
|
||||
|
||||
Returns:
|
||||
list: [(scan_type, flow_func, flow_specific_kwargs), ...] 有效的函数列表
|
||||
"""
|
||||
valid_flows = []
|
||||
for scan_type in flow_names:
|
||||
flow_func = orchestrator.get_flow_function(scan_type)
|
||||
if flow_func:
|
||||
# 为每个 Flow 准备专属的参数(包含对应的 enabled_tools)
|
||||
flow_specific_kwargs = dict(flow_kwargs)
|
||||
flow_specific_kwargs['enabled_tools'] = enabled_tools_by_type.get(scan_type, {})
|
||||
valid_flows.append((scan_type, flow_func, flow_specific_kwargs))
|
||||
else:
|
||||
logger.warning(f"跳过未实现的 Flow: {scan_type}")
|
||||
return valid_flows
|
||||
|
||||
# ---------------------------------------------------------
|
||||
# 动态阶段执行(基于 FlowOrchestrator 定义)
|
||||
# ---------------------------------------------------------
|
||||
for mode, enabled_flows in orchestrator.get_execution_stages():
|
||||
if mode == 'sequential':
|
||||
# 顺序执行
|
||||
logger.info(f"\n{'='*60}\n顺序执行阶段: {', '.join(enabled_flows)}\n{'='*60}")
|
||||
for scan_type, flow_func, flow_specific_kwargs in get_valid_flows(enabled_flows):
|
||||
logger.info(f"\n{'='*60}\n执行 Flow: {scan_type}\n{'='*60}")
|
||||
try:
|
||||
result = flow_func(**flow_specific_kwargs)
|
||||
record_flow_result(scan_type, result=result)
|
||||
except Exception as e:
|
||||
record_flow_result(scan_type, error=e)
|
||||
|
||||
elif mode == 'parallel':
|
||||
# 并行执行阶段:通过 Task 包装子 Flow,并使用 Prefect TaskRunner 并发运行
|
||||
logger.info(f"\n{'='*60}\n并行执行阶段: {', '.join(enabled_flows)}\n{'='*60}")
|
||||
futures = []
|
||||
|
||||
# 提交所有并行子 Flow 任务
|
||||
for scan_type, flow_func, flow_specific_kwargs in get_valid_flows(enabled_flows):
|
||||
logger.info(f"\n{'='*60}\n提交并行子 Flow 任务: {scan_type}\n{'='*60}")
|
||||
future = _run_subflow_task.submit(
|
||||
scan_type=scan_type,
|
||||
flow_func=flow_func,
|
||||
flow_kwargs=flow_specific_kwargs,
|
||||
)
|
||||
futures.append((scan_type, future))
|
||||
|
||||
# 等待所有并行子 Flow 完成
|
||||
if futures:
|
||||
wait([f for _, f in futures])
|
||||
|
||||
# 检查结果(复用统一的结果处理逻辑)
|
||||
for scan_type, future in futures:
|
||||
try:
|
||||
result = future.result()
|
||||
record_flow_result(scan_type, result=result)
|
||||
except Exception as e:
|
||||
record_flow_result(scan_type, error=e)
|
||||
|
||||
# ==================== 完成 ====================
|
||||
logger.info(
|
||||
"="*60 + "\n" +
|
||||
"✓ 扫描任务初始化完成\n" +
|
||||
f" 执行的 Flow: {', '.join(executed_flows)}\n" +
|
||||
"="*60
|
||||
)
|
||||
|
||||
# ==================== 返回结果 ====================
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': str(scan_workspace_path),
|
||||
'executed_flows': executed_flows,
|
||||
'results': results
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
# 参数错误
|
||||
logger.error("参数错误: %s", e)
|
||||
raise
|
||||
except RuntimeError as e:
|
||||
# 执行失败
|
||||
logger.error("运行时错误: %s", e)
|
||||
raise
|
||||
except OSError as e:
|
||||
# 文件系统错误(工作空间创建失败)
|
||||
logger.error("文件系统错误: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
# 其他未预期错误
|
||||
logger.exception("初始化扫描任务失败: %s", e)
|
||||
# 注意:失败状态更新由 Prefect State Handlers 自动处理
|
||||
raise
|
||||
@@ -1,524 +0,0 @@
|
||||
"""
|
||||
端口扫描 Flow
|
||||
|
||||
负责编排端口扫描的完整流程
|
||||
|
||||
架构:
|
||||
- Flow 负责编排多个原子 Task
|
||||
- 支持串行执行扫描工具(流式处理)
|
||||
- 每个 Task 可独立重试
|
||||
- 配置由 YAML 解析
|
||||
"""
|
||||
|
||||
# Django 环境初始化(导入即生效)
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
from prefect import flow
|
||||
from apps.scan.tasks.port_scan import (
|
||||
export_scan_targets_task,
|
||||
run_and_stream_save_ports_task
|
||||
)
|
||||
from apps.scan.handlers.scan_flow_handlers import (
|
||||
on_scan_flow_running,
|
||||
on_scan_flow_completed,
|
||||
on_scan_flow_failed,
|
||||
)
|
||||
from apps.scan.utils import config_parser, build_scan_command
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculate_port_scan_timeout(
|
||||
tool_config: dict,
|
||||
file_path: str,
|
||||
base_per_pair: float = 0.5
|
||||
) -> int:
|
||||
"""
|
||||
根据目标数量和端口数量计算超时时间
|
||||
|
||||
计算公式:超时时间 = 目标数 × 端口数 × base_per_pair
|
||||
超时范围:60秒 ~ 2天(172800秒)
|
||||
|
||||
Args:
|
||||
tool_config: 工具配置字典,包含端口配置(ports, top-ports等)
|
||||
file_path: 目标文件路径(域名/IP列表)
|
||||
base_per_pair: 每个"端口-目标对"的基础时间(秒),默认 0.5秒
|
||||
|
||||
Returns:
|
||||
int: 计算出的超时时间(秒),范围:60 ~ 172800
|
||||
|
||||
Example:
|
||||
# 100个目标 × 100个端口 × 0.5秒 = 5000秒
|
||||
# 10个目标 × 1000个端口 × 0.5秒 = 5000秒
|
||||
timeout = calculate_port_scan_timeout(
|
||||
tool_config={'top-ports': 100},
|
||||
file_path='/path/to/domains.txt'
|
||||
)
|
||||
"""
|
||||
try:
|
||||
# 1. 统计目标数量
|
||||
result = subprocess.run(
|
||||
['wc', '-l', file_path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
target_count = int(result.stdout.strip().split()[0])
|
||||
|
||||
# 2. 解析端口数量
|
||||
port_count = _parse_port_count(tool_config)
|
||||
|
||||
# 3. 计算超时时间
|
||||
# 总工作量 = 目标数 × 端口数
|
||||
total_work = target_count * port_count
|
||||
timeout = int(total_work * base_per_pair)
|
||||
|
||||
# 4. 设置合理的下限(不再设置上限)
|
||||
min_timeout = 60 # 最小 60 秒
|
||||
timeout = max(min_timeout, timeout)
|
||||
|
||||
logger.info(
|
||||
f"计算端口扫描 timeout - "
|
||||
f"目标数: {target_count}, "
|
||||
f"端口数: {port_count}, "
|
||||
f"总工作量: {total_work}, "
|
||||
f"超时: {timeout}秒"
|
||||
)
|
||||
return timeout
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"计算 timeout 失败: {e},使用默认值 600秒")
|
||||
return 600
|
||||
|
||||
|
||||
def _parse_port_count(tool_config: dict) -> int:
|
||||
"""
|
||||
从工具配置中解析端口数量
|
||||
|
||||
优先级:
|
||||
1. top-ports: N → 返回 N
|
||||
2. ports: "80,443,8080" → 返回逗号分隔的数量
|
||||
3. ports: "1-1000" → 返回范围的大小
|
||||
4. ports: "1-65535" → 返回 65535
|
||||
5. 默认 → 返回 100(naabu 默认扫描 top 100)
|
||||
|
||||
Args:
|
||||
tool_config: 工具配置字典
|
||||
|
||||
Returns:
|
||||
int: 端口数量
|
||||
"""
|
||||
# 1. 检查 top-ports 配置
|
||||
if 'top-ports' in tool_config:
|
||||
top_ports = tool_config['top-ports']
|
||||
if isinstance(top_ports, int) and top_ports > 0:
|
||||
return top_ports
|
||||
logger.warning(f"top-ports 配置无效: {top_ports},使用默认值")
|
||||
|
||||
# 2. 检查 ports 配置
|
||||
if 'ports' in tool_config:
|
||||
ports_str = str(tool_config['ports']).strip()
|
||||
|
||||
# 2.1 逗号分隔的端口列表:80,443,8080
|
||||
if ',' in ports_str:
|
||||
port_list = [p.strip() for p in ports_str.split(',') if p.strip()]
|
||||
return len(port_list)
|
||||
|
||||
# 2.2 端口范围:1-1000
|
||||
if '-' in ports_str:
|
||||
try:
|
||||
start, end = ports_str.split('-', 1)
|
||||
start_port = int(start.strip())
|
||||
end_port = int(end.strip())
|
||||
|
||||
if 1 <= start_port <= end_port <= 65535:
|
||||
return end_port - start_port + 1
|
||||
logger.warning(f"端口范围无效: {ports_str},使用默认值")
|
||||
except ValueError:
|
||||
logger.warning(f"端口范围解析失败: {ports_str},使用默认值")
|
||||
|
||||
# 2.3 单个端口
|
||||
try:
|
||||
port = int(ports_str)
|
||||
if 1 <= port <= 65535:
|
||||
return 1
|
||||
except ValueError:
|
||||
logger.warning(f"端口配置解析失败: {ports_str},使用默认值")
|
||||
|
||||
# 3. 默认值:naabu 默认扫描 top 100 端口
|
||||
return 100
|
||||
|
||||
|
||||
def _setup_port_scan_directory(scan_workspace_dir: str) -> Path:
|
||||
"""
|
||||
创建并验证端口扫描工作目录
|
||||
|
||||
Args:
|
||||
scan_workspace_dir: 扫描工作空间目录
|
||||
|
||||
Returns:
|
||||
Path: 端口扫描目录路径
|
||||
|
||||
Raises:
|
||||
RuntimeError: 目录创建或验证失败
|
||||
"""
|
||||
port_scan_dir = Path(scan_workspace_dir) / 'port_scan'
|
||||
port_scan_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not port_scan_dir.is_dir():
|
||||
raise RuntimeError(f"端口扫描目录创建失败: {port_scan_dir}")
|
||||
if not os.access(port_scan_dir, os.W_OK):
|
||||
raise RuntimeError(f"端口扫描目录不可写: {port_scan_dir}")
|
||||
|
||||
return port_scan_dir
|
||||
|
||||
|
||||
def _export_scan_targets(target_id: int, port_scan_dir: Path) -> tuple[str, int, str]:
|
||||
"""
|
||||
导出扫描目标到文件
|
||||
|
||||
根据 Target 类型自动决定导出内容:
|
||||
- DOMAIN: 从 Subdomain 表导出子域名
|
||||
- IP: 直接写入 target.name
|
||||
- CIDR: 展开 CIDR 范围内的所有 IP
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
port_scan_dir: 端口扫描目录
|
||||
|
||||
Returns:
|
||||
tuple: (targets_file, target_count, target_type)
|
||||
"""
|
||||
logger.info("Step 1: 导出扫描目标列表")
|
||||
|
||||
targets_file = str(port_scan_dir / 'targets.txt')
|
||||
export_result = export_scan_targets_task(
|
||||
target_id=target_id,
|
||||
output_file=targets_file,
|
||||
batch_size=1000 # 每次读取 1000 条,优化内存占用
|
||||
)
|
||||
|
||||
target_count = export_result['total_count']
|
||||
target_type = export_result.get('target_type', 'unknown')
|
||||
|
||||
logger.info(
|
||||
"✓ 扫描目标导出完成 - 类型: %s, 文件: %s, 数量: %d",
|
||||
target_type,
|
||||
export_result['output_file'],
|
||||
target_count
|
||||
)
|
||||
|
||||
if target_count == 0:
|
||||
logger.warning("目标下没有可扫描的地址,无法执行端口扫描")
|
||||
|
||||
return export_result['output_file'], target_count, target_type
|
||||
|
||||
|
||||
def _run_scans_sequentially(
|
||||
enabled_tools: dict,
|
||||
domains_file: str,
|
||||
port_scan_dir: Path,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
target_name: str
|
||||
) -> tuple[dict, int, list, list]:
|
||||
"""
|
||||
串行执行端口扫描任务
|
||||
|
||||
Args:
|
||||
enabled_tools: 已启用的工具配置字典
|
||||
domains_file: 域名文件路径
|
||||
port_scan_dir: 端口扫描目录
|
||||
scan_id: 扫描任务 ID
|
||||
target_id: 目标 ID
|
||||
target_name: 目标名称(用于错误日志)
|
||||
|
||||
Returns:
|
||||
tuple: (tool_stats, processed_records, successful_tool_names, failed_tools)
|
||||
注意:端口扫描是流式输出,不生成结果文件
|
||||
|
||||
Raises:
|
||||
RuntimeError: 所有工具均失败
|
||||
"""
|
||||
# ==================== 构建命令并串行执行 ====================
|
||||
|
||||
tool_stats = {}
|
||||
processed_records = 0
|
||||
failed_tools = [] # 记录失败的工具(含原因)
|
||||
|
||||
# for循环执行工具:按顺序串行运行每个启用的端口扫描工具
|
||||
for tool_name, tool_config in enabled_tools.items():
|
||||
# 1. 构建完整命令(变量替换)
|
||||
try:
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type='port_scan',
|
||||
command_params={
|
||||
'domains_file': domains_file # 对应 {domains_file}
|
||||
},
|
||||
tool_config=tool_config #yaml的工具配置
|
||||
)
|
||||
except Exception as e:
|
||||
reason = f"命令构建失败: {str(e)}"
|
||||
logger.error(f"构建 {tool_name} 命令失败: {e}")
|
||||
failed_tools.append({'tool': tool_name, 'reason': reason})
|
||||
continue
|
||||
|
||||
# 2. 获取超时时间(支持 'auto' 动态计算)
|
||||
config_timeout = tool_config['timeout']
|
||||
if config_timeout == 'auto':
|
||||
# 动态计算超时时间
|
||||
config_timeout = calculate_port_scan_timeout(
|
||||
tool_config=tool_config,
|
||||
file_path=str(domains_file)
|
||||
)
|
||||
logger.info(f"✓ 工具 {tool_name} 动态计算 timeout: {config_timeout}秒")
|
||||
|
||||
# 2.1 生成日志文件路径
|
||||
from datetime import datetime
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
log_file = port_scan_dir / f"{tool_name}_{timestamp}.log"
|
||||
|
||||
# 3. 执行扫描任务
|
||||
logger.info("开始执行 %s 扫描(超时: %d秒)...", tool_name, config_timeout)
|
||||
|
||||
try:
|
||||
# 直接调用 task(串行执行)
|
||||
# 注意:端口扫描是流式输出到 stdout,不使用 output_file
|
||||
result = run_and_stream_save_ports_task(
|
||||
cmd=command,
|
||||
tool_name=tool_name, # 工具名称
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
cwd=str(port_scan_dir),
|
||||
shell=True,
|
||||
batch_size=1000,
|
||||
timeout=config_timeout,
|
||||
log_file=str(log_file) # 新增:日志文件路径
|
||||
)
|
||||
|
||||
tool_stats[tool_name] = {
|
||||
'command': command,
|
||||
'result': result,
|
||||
'timeout': config_timeout
|
||||
}
|
||||
processed_records += result.get('processed_records', 0)
|
||||
logger.info(
|
||||
"✓ 工具 %s 流式处理完成 - 记录数: %d",
|
||||
tool_name, result.get('processed_records', 0)
|
||||
)
|
||||
|
||||
except subprocess.TimeoutExpired as exc:
|
||||
# 超时异常单独处理
|
||||
# 注意:流式处理任务超时时,已解析的数据已保存到数据库
|
||||
reason = f"执行超时(配置: {config_timeout}秒)"
|
||||
failed_tools.append({'tool': tool_name, 'reason': reason})
|
||||
logger.warning(
|
||||
"⚠️ 工具 %s 执行超时 - 超时配置: %d秒\n"
|
||||
"注意:超时前已解析的端口数据已保存到数据库,但扫描未完全完成。",
|
||||
tool_name, config_timeout
|
||||
)
|
||||
except Exception as exc:
|
||||
# 其他异常
|
||||
failed_tools.append({'tool': tool_name, 'reason': str(exc)})
|
||||
logger.error("工具 %s 执行失败: %s", tool_name, exc, exc_info=True)
|
||||
|
||||
if failed_tools:
|
||||
logger.warning(
|
||||
"以下扫描工具执行失败: %s",
|
||||
', '.join([f['tool'] for f in failed_tools])
|
||||
)
|
||||
|
||||
if not tool_stats:
|
||||
error_details = "; ".join([f"{f['tool']}: {f['reason']}" for f in failed_tools])
|
||||
logger.warning("所有端口扫描工具均失败 - 目标: %s, 失败工具: %s", target_name, error_details)
|
||||
# 返回空结果,不抛出异常,让扫描继续
|
||||
return {}, 0, [], failed_tools
|
||||
|
||||
# 动态计算成功的工具列表
|
||||
successful_tool_names = [name for name in enabled_tools.keys()
|
||||
if name not in [f['tool'] for f in failed_tools]]
|
||||
|
||||
logger.info(
|
||||
"✓ 串行端口扫描执行完成 - 成功: %d/%d (成功: %s, 失败: %s)",
|
||||
len(tool_stats), len(enabled_tools),
|
||||
', '.join(successful_tool_names) if successful_tool_names else '无',
|
||||
', '.join([f['tool'] for f in failed_tools]) if failed_tools else '无'
|
||||
)
|
||||
|
||||
return tool_stats, processed_records, successful_tool_names, failed_tools
|
||||
|
||||
|
||||
@flow(
|
||||
name="port_scan",
|
||||
log_prints=True,
|
||||
on_running=[on_scan_flow_running],
|
||||
on_completion=[on_scan_flow_completed],
|
||||
on_failure=[on_scan_flow_failed],
|
||||
)
|
||||
def port_scan_flow(
|
||||
scan_id: int,
|
||||
target_name: str,
|
||||
target_id: int,
|
||||
scan_workspace_dir: str,
|
||||
enabled_tools: dict
|
||||
) -> dict:
|
||||
"""
|
||||
端口扫描 Flow
|
||||
|
||||
主要功能:
|
||||
1. 扫描目标域名/IP 的开放端口
|
||||
2. 保存 host + ip + port 三元映射到 HostPortMapping 表
|
||||
|
||||
输出资产:
|
||||
- HostPortMapping:主机端口映射(host + ip + port 三元组)
|
||||
|
||||
工作流程:
|
||||
Step 0: 创建工作目录
|
||||
Step 1: 导出域名列表到文件(供扫描工具使用)
|
||||
Step 2: 解析配置,获取启用的工具
|
||||
Step 3: 串行执行扫描工具,运行端口扫描工具并实时解析输出到数据库(→ HostPortMapping)
|
||||
|
||||
Args:
|
||||
scan_id: 扫描任务 ID
|
||||
target_name: 域名
|
||||
target_id: 目标 ID
|
||||
scan_workspace_dir: Scan 工作空间目录
|
||||
enabled_tools: 启用的工具配置字典
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'success': bool,
|
||||
'scan_id': int,
|
||||
'target': str,
|
||||
'scan_workspace_dir': str,
|
||||
'domains_file': str,
|
||||
'domain_count': int,
|
||||
'processed_records': int,
|
||||
'executed_tasks': list,
|
||||
'tool_stats': {
|
||||
'total': int, # 总工具数
|
||||
'successful': int, # 成功工具数
|
||||
'failed': int, # 失败工具数
|
||||
'successful_tools': list[str], # 成功工具列表 ['naabu_active']
|
||||
'failed_tools': list[dict], # 失败工具列表 [{'tool': 'naabu_passive', 'reason': '超时'}]
|
||||
'details': dict # 详细执行结果(保留向后兼容)
|
||||
}
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: 配置错误
|
||||
RuntimeError: 执行失败
|
||||
|
||||
Note:
|
||||
端口扫描工具(如 naabu)会解析域名获取 IP,输出 host + ip + port 三元组。
|
||||
同一 host 可能对应多个 IP(CDN、负载均衡),因此使用三元映射表存储。
|
||||
"""
|
||||
try:
|
||||
# 参数验证
|
||||
if scan_id is None:
|
||||
raise ValueError("scan_id 不能为空")
|
||||
if not target_name:
|
||||
raise ValueError("target_name 不能为空")
|
||||
if target_id is None:
|
||||
raise ValueError("target_id 不能为空")
|
||||
if not scan_workspace_dir:
|
||||
raise ValueError("scan_workspace_dir 不能为空")
|
||||
if not enabled_tools:
|
||||
raise ValueError("enabled_tools 不能为空")
|
||||
|
||||
logger.info(
|
||||
"="*60 + "\n" +
|
||||
"开始端口扫描\n" +
|
||||
f" Scan ID: {scan_id}\n" +
|
||||
f" Target: {target_name}\n" +
|
||||
f" Workspace: {scan_workspace_dir}\n" +
|
||||
"="*60
|
||||
)
|
||||
|
||||
# Step 0: 创建工作目录
|
||||
port_scan_dir = _setup_port_scan_directory(scan_workspace_dir)
|
||||
|
||||
# Step 1: 导出扫描目标列表到文件(根据 Target 类型自动决定内容)
|
||||
targets_file, target_count, target_type = _export_scan_targets(target_id, port_scan_dir)
|
||||
|
||||
if target_count == 0:
|
||||
logger.warning("目标下没有可扫描的地址,跳过端口扫描")
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'targets_file': targets_file,
|
||||
'target_count': 0,
|
||||
'target_type': target_type,
|
||||
'processed_records': 0,
|
||||
'executed_tasks': ['export_scan_targets'],
|
||||
'tool_stats': {
|
||||
'total': 0,
|
||||
'successful': 0,
|
||||
'failed': 0,
|
||||
'successful_tools': [],
|
||||
'failed_tools': [],
|
||||
'details': {}
|
||||
}
|
||||
}
|
||||
|
||||
# Step 2: 工具配置信息
|
||||
logger.info("Step 2: 工具配置信息")
|
||||
logger.info(
|
||||
"✓ 启用工具: %s",
|
||||
', '.join(enabled_tools.keys())
|
||||
)
|
||||
|
||||
# Step 3: 串行执行扫描工具
|
||||
logger.info("Step 3: 串行执行扫描工具")
|
||||
tool_stats, processed_records, successful_tool_names, failed_tools = _run_scans_sequentially(
|
||||
enabled_tools=enabled_tools,
|
||||
domains_file=targets_file, # 现在是 targets_file,兼容原参数名
|
||||
port_scan_dir=port_scan_dir,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
target_name=target_name
|
||||
)
|
||||
|
||||
logger.info("="*60 + "\n✓ 端口扫描完成\n" + "="*60)
|
||||
|
||||
# 动态生成已执行的任务列表
|
||||
executed_tasks = ['export_scan_targets', 'parse_config']
|
||||
executed_tasks.extend([f'run_and_stream_save_ports ({tool})' for tool in tool_stats.keys()])
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'targets_file': targets_file,
|
||||
'target_count': target_count,
|
||||
'target_type': target_type,
|
||||
'processed_records': processed_records,
|
||||
'executed_tasks': executed_tasks,
|
||||
'tool_stats': {
|
||||
'total': len(tool_stats) + len(failed_tools),
|
||||
'successful': len(successful_tool_names),
|
||||
'failed': len(failed_tools),
|
||||
'successful_tools': successful_tool_names,
|
||||
'failed_tools': failed_tools, # [{'tool': 'naabu_active', 'reason': '超时'}]
|
||||
'details': tool_stats # 详细结果(保留向后兼容)
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("配置错误: %s", e)
|
||||
raise
|
||||
except RuntimeError as e:
|
||||
logger.error("运行时错误: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("端口扫描失败: %s", e)
|
||||
raise
|
||||
@@ -1,495 +0,0 @@
|
||||
|
||||
"""
|
||||
站点扫描 Flow
|
||||
|
||||
负责编排站点扫描的完整流程
|
||||
|
||||
架构:
|
||||
- Flow 负责编排多个原子 Task
|
||||
- 支持串行执行扫描工具(流式处理)
|
||||
- 每个 Task 可独立重试
|
||||
- 配置由 YAML 解析
|
||||
"""
|
||||
|
||||
# Django 环境初始化(导入即生效)
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
from prefect import flow
|
||||
from apps.scan.tasks.site_scan import export_site_urls_task, run_and_stream_save_websites_task
|
||||
from apps.scan.handlers.scan_flow_handlers import (
|
||||
on_scan_flow_running,
|
||||
on_scan_flow_completed,
|
||||
on_scan_flow_failed,
|
||||
)
|
||||
from apps.scan.utils import config_parser, build_scan_command
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def calculate_timeout_by_line_count(
|
||||
tool_config: dict,
|
||||
file_path: str,
|
||||
base_per_time: int = 1
|
||||
) -> int:
|
||||
"""
|
||||
根据文件行数计算 timeout
|
||||
|
||||
使用 wc -l 统计文件行数,根据行数和每行基础时间计算 timeout
|
||||
|
||||
Args:
|
||||
tool_config: 工具配置字典(此函数未使用,但保持接口一致性)
|
||||
file_path: 要统计行数的文件路径
|
||||
base_per_time: 每行的基础时间(秒),默认1秒
|
||||
|
||||
Returns:
|
||||
int: 计算出的超时时间(秒)
|
||||
|
||||
Example:
|
||||
timeout = calculate_timeout_by_line_count(
|
||||
tool_config={},
|
||||
file_path='/path/to/urls.txt',
|
||||
base_per_time=2
|
||||
)
|
||||
"""
|
||||
try:
|
||||
# 使用 wc -l 快速统计行数
|
||||
result = subprocess.run(
|
||||
['wc', '-l', file_path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
# wc -l 输出格式:行数 + 空格 + 文件名
|
||||
line_count = int(result.stdout.strip().split()[0])
|
||||
|
||||
# 计算 timeout:行数 × 每行基础时间
|
||||
timeout = line_count * base_per_time
|
||||
|
||||
logger.info(
|
||||
f"timeout 自动计算: 文件={file_path}, "
|
||||
f"行数={line_count}, 每行时间={base_per_time}秒, timeout={timeout}秒"
|
||||
)
|
||||
|
||||
return timeout
|
||||
|
||||
except Exception as e:
|
||||
# 如果 wc -l 失败,使用默认值
|
||||
logger.warning(f"wc -l 计算行数失败: {e},使用默认 timeout: 600秒")
|
||||
return 600
|
||||
|
||||
|
||||
def _setup_site_scan_directory(scan_workspace_dir: str) -> Path:
|
||||
"""
|
||||
创建并验证站点扫描工作目录
|
||||
|
||||
Args:
|
||||
scan_workspace_dir: 扫描工作空间目录
|
||||
|
||||
Returns:
|
||||
Path: 站点扫描目录路径
|
||||
|
||||
Raises:
|
||||
RuntimeError: 目录创建或验证失败
|
||||
"""
|
||||
site_scan_dir = Path(scan_workspace_dir) / 'site_scan'
|
||||
site_scan_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not site_scan_dir.is_dir():
|
||||
raise RuntimeError(f"站点扫描目录创建失败: {site_scan_dir}")
|
||||
if not os.access(site_scan_dir, os.W_OK):
|
||||
raise RuntimeError(f"站点扫描目录不可写: {site_scan_dir}")
|
||||
|
||||
return site_scan_dir
|
||||
|
||||
|
||||
def _export_site_urls(target_id: int, site_scan_dir: Path) -> tuple[str, int, int]:
|
||||
"""
|
||||
导出站点 URL 到文件
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
site_scan_dir: 站点扫描目录
|
||||
|
||||
Returns:
|
||||
tuple: (urls_file, total_urls, association_count)
|
||||
|
||||
Raises:
|
||||
ValueError: URL 数量为 0
|
||||
"""
|
||||
logger.info("Step 1: 导出站点URL列表")
|
||||
|
||||
urls_file = str(site_scan_dir / 'site_urls.txt')
|
||||
export_result = export_site_urls_task(
|
||||
target_id=target_id,
|
||||
output_file=urls_file,
|
||||
batch_size=1000 # 每次处理1000个子域名
|
||||
)
|
||||
|
||||
total_urls = export_result['total_urls']
|
||||
association_count = export_result['association_count'] # 主机端口关联数
|
||||
|
||||
logger.info(
|
||||
"✓ 站点URL导出完成 - 文件: %s, URL数量: %d, 关联数: %d",
|
||||
export_result['output_file'],
|
||||
total_urls,
|
||||
association_count
|
||||
)
|
||||
|
||||
if total_urls == 0:
|
||||
logger.warning("目标下没有可用的站点URL,无法执行站点扫描")
|
||||
# 不抛出异常,由上层决定如何处理
|
||||
# raise ValueError("目标下没有可用的站点URL,无法执行站点扫描")
|
||||
|
||||
return export_result['output_file'], total_urls, association_count
|
||||
|
||||
|
||||
def _run_scans_sequentially(
|
||||
enabled_tools: dict,
|
||||
urls_file: str,
|
||||
total_urls: int,
|
||||
site_scan_dir: Path,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
target_name: str
|
||||
) -> tuple[dict, int, list, list]:
|
||||
"""
|
||||
串行执行站点扫描任务
|
||||
|
||||
Args:
|
||||
enabled_tools: 已启用的工具配置字典
|
||||
urls_file: URL 文件路径
|
||||
total_urls: URL 总数
|
||||
site_scan_dir: 站点扫描目录
|
||||
scan_id: 扫描任务 ID
|
||||
target_id: 目标 ID
|
||||
target_name: 目标名称(用于错误日志)
|
||||
|
||||
Returns:
|
||||
tuple: (tool_stats, processed_records, successful_tool_names, failed_tools)
|
||||
|
||||
Raises:
|
||||
RuntimeError: 所有工具均失败
|
||||
"""
|
||||
tool_stats = {}
|
||||
processed_records = 0
|
||||
failed_tools = []
|
||||
|
||||
for tool_name, tool_config in enabled_tools.items():
|
||||
# 1. 构建完整命令(变量替换)
|
||||
try:
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type='site_scan',
|
||||
command_params={
|
||||
'url_file': urls_file
|
||||
},
|
||||
tool_config=tool_config
|
||||
)
|
||||
except Exception as e:
|
||||
reason = f"命令构建失败: {str(e)}"
|
||||
logger.error(f"构建 {tool_name} 命令失败: {e}")
|
||||
failed_tools.append({'tool': tool_name, 'reason': reason})
|
||||
continue
|
||||
|
||||
# 2. 获取超时时间(支持 'auto' 动态计算)
|
||||
config_timeout = tool_config.get('timeout', 300)
|
||||
if config_timeout == 'auto':
|
||||
# 动态计算超时时间
|
||||
timeout = calculate_timeout_by_line_count(tool_config, urls_file, base_per_time=1)
|
||||
logger.info(f"✓ 工具 {tool_name} 动态计算 timeout: {timeout}秒")
|
||||
else:
|
||||
# 使用配置的超时时间和动态计算的较大值
|
||||
dynamic_timeout = calculate_timeout_by_line_count(tool_config, urls_file, base_per_time=1)
|
||||
timeout = max(dynamic_timeout, config_timeout)
|
||||
|
||||
# 2.1 生成日志文件路径(类似端口扫描)
|
||||
from datetime import datetime
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
log_file = site_scan_dir / f"{tool_name}_{timestamp}.log"
|
||||
|
||||
logger.info(
|
||||
"开始执行 %s 站点扫描 - URL数: %d, 最终超时: %ds",
|
||||
tool_name, total_urls, timeout
|
||||
)
|
||||
|
||||
# 3. 执行扫描任务
|
||||
try:
|
||||
# 流式执行扫描并实时保存结果
|
||||
result = run_and_stream_save_websites_task(
|
||||
cmd=command,
|
||||
tool_name=tool_name, # 新增:工具名称
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
cwd=str(site_scan_dir),
|
||||
shell=True,
|
||||
batch_size=1000,
|
||||
timeout=timeout,
|
||||
log_file=str(log_file) # 新增:日志文件路径
|
||||
)
|
||||
|
||||
tool_stats[tool_name] = {
|
||||
'command': command,
|
||||
'result': result,
|
||||
'timeout': timeout
|
||||
}
|
||||
processed_records += result.get('processed_records', 0)
|
||||
|
||||
logger.info(
|
||||
"✓ 工具 %s 流式处理完成 - 处理记录: %d, 创建站点: %d, 跳过: %d",
|
||||
tool_name,
|
||||
result.get('processed_records', 0),
|
||||
result.get('created_websites', 0),
|
||||
result.get('skipped_no_subdomain', 0) + result.get('skipped_failed', 0)
|
||||
)
|
||||
|
||||
except subprocess.TimeoutExpired as exc:
|
||||
# 超时异常单独处理
|
||||
reason = f"执行超时(配置: {timeout}秒)"
|
||||
failed_tools.append({'tool': tool_name, 'reason': reason})
|
||||
logger.warning(
|
||||
"⚠️ 工具 %s 执行超时 - 超时配置: %d秒\n"
|
||||
"注意:超时前已解析的站点数据已保存到数据库,但扫描未完全完成。",
|
||||
tool_name, timeout
|
||||
)
|
||||
except Exception as exc:
|
||||
# 其他异常
|
||||
failed_tools.append({'tool': tool_name, 'reason': str(exc)})
|
||||
logger.error("工具 %s 执行失败: %s", tool_name, exc, exc_info=True)
|
||||
|
||||
if failed_tools:
|
||||
logger.warning(
|
||||
"以下扫描工具执行失败: %s",
|
||||
', '.join([f['tool'] for f in failed_tools])
|
||||
)
|
||||
|
||||
if not tool_stats:
|
||||
error_details = "; ".join([f"{f['tool']}: {f['reason']}" for f in failed_tools])
|
||||
logger.warning("所有站点扫描工具均失败 - 目标: %s, 失败工具: %s", target_name, error_details)
|
||||
# 返回空结果,不抛出异常,让扫描继续
|
||||
return {}, 0, [], failed_tools
|
||||
|
||||
# 动态计算成功的工具列表
|
||||
successful_tool_names = [name for name in enabled_tools.keys()
|
||||
if name not in [f['tool'] for f in failed_tools]]
|
||||
|
||||
logger.info(
|
||||
"✓ 串行站点扫描执行完成 - 成功: %d/%d (成功: %s, 失败: %s)",
|
||||
len(tool_stats), len(enabled_tools),
|
||||
', '.join(successful_tool_names) if successful_tool_names else '无',
|
||||
', '.join([f['tool'] for f in failed_tools]) if failed_tools else '无'
|
||||
)
|
||||
|
||||
return tool_stats, processed_records, successful_tool_names, failed_tools
|
||||
|
||||
|
||||
def calculate_timeout(url_count: int, base: int = 600, per_url: int = 1) -> int:
|
||||
"""
|
||||
根据 URL 数量动态计算扫描超时时间
|
||||
|
||||
规则:
|
||||
- 基础时间:默认 600 秒(10 分钟)
|
||||
- 每个 URL 额外增加:默认 1 秒
|
||||
|
||||
Args:
|
||||
url_count: URL 数量,必须为正整数
|
||||
base: 基础超时时间(秒),默认 600
|
||||
per_url: 每个 URL 增加的时间(秒),默认 1
|
||||
|
||||
Returns:
|
||||
int: 计算得到的超时时间(秒),不超过 max_timeout
|
||||
|
||||
Raises:
|
||||
ValueError: 当 url_count 为负数或 0 时抛出异常
|
||||
"""
|
||||
if url_count < 0:
|
||||
raise ValueError(f"URL数量不能为负数: {url_count}")
|
||||
if url_count == 0:
|
||||
raise ValueError("URL数量不能为0")
|
||||
|
||||
timeout = base + int(url_count * per_url)
|
||||
|
||||
# 不设置上限,由调用方根据需要控制
|
||||
return timeout
|
||||
|
||||
|
||||
@flow(
|
||||
name="site_scan",
|
||||
log_prints=True,
|
||||
on_running=[on_scan_flow_running],
|
||||
on_completion=[on_scan_flow_completed],
|
||||
on_failure=[on_scan_flow_failed],
|
||||
)
|
||||
def site_scan_flow(
|
||||
scan_id: int,
|
||||
target_name: str,
|
||||
target_id: int,
|
||||
scan_workspace_dir: str,
|
||||
enabled_tools: dict
|
||||
) -> dict:
|
||||
"""
|
||||
站点扫描 Flow
|
||||
|
||||
主要功能:
|
||||
1. 从target获取所有子域名与其对应的端口号,拼接成URL写入文件
|
||||
2. 用httpx进行批量请求并实时保存到数据库(流式处理)
|
||||
|
||||
工作流程:
|
||||
Step 0: 创建工作目录
|
||||
Step 1: 导出站点 URL 列表
|
||||
Step 2: 解析配置,获取启用的工具
|
||||
Step 3: 串行执行扫描工具并实时保存结果
|
||||
|
||||
Args:
|
||||
scan_id: 扫描任务 ID
|
||||
target_name: 目标名称
|
||||
target_id: 目标 ID
|
||||
scan_workspace_dir: 扫描工作空间目录
|
||||
enabled_tools: 启用的工具配置字典
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'success': bool,
|
||||
'scan_id': int,
|
||||
'target': str,
|
||||
'scan_workspace_dir': str,
|
||||
'urls_file': str,
|
||||
'total_urls': int,
|
||||
'association_count': int,
|
||||
'processed_records': int,
|
||||
'created_websites': int,
|
||||
'skipped_no_subdomain': int,
|
||||
'skipped_failed': int,
|
||||
'executed_tasks': list,
|
||||
'tool_stats': {
|
||||
'total': int,
|
||||
'successful': int,
|
||||
'failed': int,
|
||||
'successful_tools': list[str],
|
||||
'failed_tools': list[dict]
|
||||
}
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: 配置错误
|
||||
RuntimeError: 执行失败
|
||||
"""
|
||||
try:
|
||||
logger.info(
|
||||
"="*60 + "\n" +
|
||||
"开始站点扫描\n" +
|
||||
f" Scan ID: {scan_id}\n" +
|
||||
f" Target: {target_name}\n" +
|
||||
f" Workspace: {scan_workspace_dir}\n" +
|
||||
"="*60
|
||||
)
|
||||
|
||||
# 参数验证
|
||||
if scan_id is None:
|
||||
raise ValueError("scan_id 不能为空")
|
||||
if not target_name:
|
||||
raise ValueError("target_name 不能为空")
|
||||
if target_id is None:
|
||||
raise ValueError("target_id 不能为空")
|
||||
if not scan_workspace_dir:
|
||||
raise ValueError("scan_workspace_dir 不能为空")
|
||||
|
||||
# Step 0: 创建工作目录
|
||||
site_scan_dir = _setup_site_scan_directory(scan_workspace_dir)
|
||||
|
||||
# Step 1: 导出站点 URL
|
||||
urls_file, total_urls, association_count = _export_site_urls(
|
||||
target_id, site_scan_dir
|
||||
)
|
||||
|
||||
if total_urls == 0:
|
||||
logger.warning("目标下没有可用的站点URL,跳过站点扫描")
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'urls_file': urls_file,
|
||||
'total_urls': 0,
|
||||
'association_count': association_count,
|
||||
'processed_records': 0,
|
||||
'created_websites': 0,
|
||||
'skipped_no_subdomain': 0,
|
||||
'skipped_failed': 0,
|
||||
'executed_tasks': ['export_site_urls'],
|
||||
'tool_stats': {
|
||||
'total': 0,
|
||||
'successful': 0,
|
||||
'failed': 0,
|
||||
'successful_tools': [],
|
||||
'failed_tools': [],
|
||||
'details': {}
|
||||
}
|
||||
}
|
||||
|
||||
# Step 2: 工具配置信息
|
||||
logger.info("Step 2: 工具配置信息")
|
||||
logger.info(
|
||||
"✓ 启用工具: %s",
|
||||
', '.join(enabled_tools.keys())
|
||||
)
|
||||
|
||||
# Step 3: 串行执行扫描工具
|
||||
logger.info("Step 3: 串行执行扫描工具并实时保存结果")
|
||||
tool_stats, processed_records, successful_tool_names, failed_tools = _run_scans_sequentially(
|
||||
enabled_tools=enabled_tools,
|
||||
urls_file=urls_file,
|
||||
total_urls=total_urls,
|
||||
site_scan_dir=site_scan_dir,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
target_name=target_name
|
||||
)
|
||||
|
||||
logger.info("="*60 + "\n✓ 站点扫描完成\n" + "="*60)
|
||||
|
||||
# 动态生成已执行的任务列表
|
||||
executed_tasks = ['export_site_urls', 'parse_config']
|
||||
executed_tasks.extend([f'run_and_stream_save_websites ({tool})' for tool in tool_stats.keys()])
|
||||
|
||||
# 汇总所有工具的结果
|
||||
total_created = sum(stats['result'].get('created_websites', 0) for stats in tool_stats.values())
|
||||
total_skipped_no_subdomain = sum(stats['result'].get('skipped_no_subdomain', 0) for stats in tool_stats.values())
|
||||
total_skipped_failed = sum(stats['result'].get('skipped_failed', 0) for stats in tool_stats.values())
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'urls_file': urls_file,
|
||||
'total_urls': total_urls,
|
||||
'association_count': association_count,
|
||||
'processed_records': processed_records,
|
||||
'created_websites': total_created,
|
||||
'skipped_no_subdomain': total_skipped_no_subdomain,
|
||||
'skipped_failed': total_skipped_failed,
|
||||
'executed_tasks': executed_tasks,
|
||||
'tool_stats': {
|
||||
'total': len(enabled_tools),
|
||||
'successful': len(successful_tool_names),
|
||||
'failed': len(failed_tools),
|
||||
'successful_tools': successful_tool_names,
|
||||
'failed_tools': failed_tools,
|
||||
'details': tool_stats
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("配置错误: %s", e)
|
||||
raise
|
||||
except RuntimeError as e:
|
||||
logger.error("运行时错误: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("站点扫描失败: %s", e)
|
||||
raise
|
||||
@@ -1,750 +0,0 @@
|
||||
"""
|
||||
子域名发现扫描 Flow
|
||||
|
||||
负责编排子域名发现扫描的完整流程
|
||||
|
||||
架构:
|
||||
- Flow 负责编排多个原子 Task
|
||||
- 支持并行执行扫描工具
|
||||
- 每个 Task 可独立重试
|
||||
- 配置由 YAML 解析
|
||||
|
||||
增强流程(4 阶段):
|
||||
Stage 1: 被动收集(并行) - 必选
|
||||
Stage 2: 字典爆破(可选) - 子域名字典爆破
|
||||
Stage 3: 变异生成 + 验证(可选) - dnsgen + 通用存活验证
|
||||
Stage 4: DNS 存活验证(可选) - 通用存活验证
|
||||
|
||||
各阶段可灵活开关,最终结果根据实际执行的阶段动态决定
|
||||
"""
|
||||
|
||||
# Django 环境初始化(导入即生效)
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
from prefect import flow
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import os
|
||||
from apps.scan.handlers.scan_flow_handlers import (
|
||||
on_scan_flow_running,
|
||||
on_scan_flow_completed,
|
||||
on_scan_flow_failed,
|
||||
)
|
||||
from apps.scan.utils import build_scan_command, ensure_wordlist_local
|
||||
from apps.engine.services.wordlist_service import WordlistService
|
||||
from apps.common.normalizer import normalize_domain
|
||||
from apps.common.validators import validate_domain
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _setup_subdomain_directory(scan_workspace_dir: str) -> Path:
|
||||
"""
|
||||
创建并验证子域名扫描工作目录
|
||||
|
||||
Args:
|
||||
scan_workspace_dir: 扫描工作空间目录
|
||||
|
||||
Returns:
|
||||
Path: 子域名扫描目录路径
|
||||
|
||||
Raises:
|
||||
RuntimeError: 目录创建或验证失败
|
||||
"""
|
||||
result_dir = Path(scan_workspace_dir) / 'subdomain_discovery'
|
||||
result_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not result_dir.is_dir():
|
||||
raise RuntimeError(f"子域名扫描目录创建失败: {result_dir}")
|
||||
if not os.access(result_dir, os.W_OK):
|
||||
raise RuntimeError(f"子域名扫描目录不可写: {result_dir}")
|
||||
|
||||
return result_dir
|
||||
|
||||
|
||||
def _validate_and_normalize_target(target_name: str) -> str:
|
||||
"""
|
||||
验证并规范化目标域名
|
||||
|
||||
Args:
|
||||
target_name: 原始目标域名
|
||||
|
||||
Returns:
|
||||
str: 规范化后的域名
|
||||
|
||||
Raises:
|
||||
ValueError: 域名无效时抛出异常
|
||||
|
||||
Example:
|
||||
>>> _validate_and_normalize_target('EXAMPLE.COM')
|
||||
'example.com'
|
||||
>>> _validate_and_normalize_target('http://example.com')
|
||||
'example.com'
|
||||
"""
|
||||
try:
|
||||
normalized_target = normalize_domain(target_name)
|
||||
validate_domain(normalized_target)
|
||||
logger.debug("域名验证通过: %s -> %s", target_name, normalized_target)
|
||||
return normalized_target
|
||||
except ValueError as e:
|
||||
error_msg = f"无效的目标域名: {target_name} - {e}"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg) from e
|
||||
|
||||
|
||||
def _run_scans_parallel(
|
||||
enabled_tools: dict,
|
||||
domain_name: str,
|
||||
result_dir: Path
|
||||
) -> tuple[list, list, list]:
|
||||
"""
|
||||
并行运行所有启用的子域名扫描工具
|
||||
|
||||
Args:
|
||||
enabled_tools: 启用的工具配置字典 {'tool_name': {'timeout': 600, ...}}
|
||||
domain_name: 目标域名
|
||||
result_dir: 结果输出目录
|
||||
|
||||
Returns:
|
||||
tuple: (result_files, failed_tools, successful_tool_names)
|
||||
|
||||
Raises:
|
||||
RuntimeError: 所有工具均失败
|
||||
"""
|
||||
# 导入任务函数
|
||||
from apps.scan.tasks.subdomain_discovery import run_subdomain_discovery_task
|
||||
|
||||
# 生成时间戳(所有工具共用)
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
|
||||
# TODO: 接入代理池管理系统
|
||||
# from apps.proxy.services import proxy_pool
|
||||
# proxy_stats = proxy_pool.get_stats()
|
||||
# logger.info(f"代理池状态: {proxy_stats['healthy']}/{proxy_stats['total']} 可用")
|
||||
|
||||
failures = [] # 记录命令构建失败的工具
|
||||
futures = {}
|
||||
|
||||
# 1. 构建命令并提交并行任务
|
||||
for tool_name, tool_config in enabled_tools.items():
|
||||
# 1.1 生成唯一的输出文件路径(绝对路径)
|
||||
short_uuid = uuid.uuid4().hex[:4]
|
||||
output_file = str(result_dir / f"{tool_name}_{timestamp}_{short_uuid}.txt")
|
||||
|
||||
# 1.2 构建完整命令(变量替换)
|
||||
try:
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type='subdomain_discovery',
|
||||
command_params={
|
||||
'domain': domain_name, # 对应 {domain}
|
||||
'output_file': output_file # 对应 {output_file}
|
||||
},
|
||||
tool_config=tool_config
|
||||
)
|
||||
except Exception as e:
|
||||
failure_msg = f"{tool_name}: 命令构建失败 - {e}"
|
||||
failures.append(failure_msg)
|
||||
logger.error(f"构建 {tool_name} 命令失败: {e}")
|
||||
continue
|
||||
|
||||
# 1.3 获取超时时间(支持 'auto' 动态计算)
|
||||
timeout = tool_config['timeout']
|
||||
if timeout == 'auto':
|
||||
# 子域名发现工具通常运行时间较长,使用默认值 600 秒
|
||||
timeout = 600
|
||||
logger.info(f"✓ 工具 {tool_name} 使用默认 timeout: {timeout}秒")
|
||||
|
||||
# 1.4 提交任务
|
||||
logger.debug(
|
||||
f"提交任务 - 工具: {tool_name}, 超时: {timeout}s, 输出: {output_file}"
|
||||
)
|
||||
|
||||
future = run_subdomain_discovery_task.submit(
|
||||
tool=tool_name,
|
||||
command=command,
|
||||
timeout=timeout,
|
||||
output_file=output_file
|
||||
)
|
||||
futures[tool_name] = future
|
||||
|
||||
# 2. 检查是否有任何工具成功提交
|
||||
if not futures:
|
||||
logger.warning(
|
||||
"所有扫描工具均无法启动 - 目标: %s, 失败详情: %s",
|
||||
domain_name, "; ".join(failures)
|
||||
)
|
||||
# 返回空结果,不抛出异常,让扫描继续
|
||||
return [], [{'tool': 'all', 'reason': '所有工具均无法启动'}], []
|
||||
|
||||
# 3. 等待并行任务完成,获取结果
|
||||
result_files = []
|
||||
failed_tools = []
|
||||
|
||||
for tool_name, future in futures.items():
|
||||
try:
|
||||
result = future.result() # 返回文件路径(字符串)或 ""(失败)
|
||||
if result:
|
||||
result_files.append(result)
|
||||
logger.info("✓ 扫描工具 %s 执行成功: %s", tool_name, result)
|
||||
else:
|
||||
failure_msg = f"{tool_name}: 未生成结果文件"
|
||||
failures.append(failure_msg)
|
||||
failed_tools.append({'tool': tool_name, 'reason': '未生成结果文件'})
|
||||
logger.warning("⚠️ 扫描工具 %s 未生成结果文件", tool_name)
|
||||
except Exception as e:
|
||||
failure_msg = f"{tool_name}: {str(e)}"
|
||||
failures.append(failure_msg)
|
||||
failed_tools.append({'tool': tool_name, 'reason': str(e)})
|
||||
logger.warning("⚠️ 扫描工具 %s 执行失败: %s", tool_name, str(e))
|
||||
|
||||
# 4. 检查是否有成功的工具
|
||||
if not result_files:
|
||||
logger.warning(
|
||||
"所有扫描工具均失败 - 目标: %s, 失败详情: %s",
|
||||
domain_name, "; ".join(failures)
|
||||
)
|
||||
# 返回空结果,不抛出异常,让扫描继续
|
||||
return [], failed_tools, []
|
||||
|
||||
# 5. 动态计算成功的工具列表
|
||||
successful_tool_names = [name for name in futures.keys()
|
||||
if name not in [f['tool'] for f in failed_tools]]
|
||||
|
||||
logger.info(
|
||||
"✓ 扫描工具并行执行完成 - 成功: %d/%d (成功: %s, 失败: %s)",
|
||||
len(result_files), len(futures),
|
||||
', '.join(successful_tool_names) if successful_tool_names else '无',
|
||||
', '.join([f['tool'] for f in failed_tools]) if failed_tools else '无'
|
||||
)
|
||||
|
||||
return result_files, failed_tools, successful_tool_names
|
||||
|
||||
|
||||
def _run_single_tool(
|
||||
tool_name: str,
|
||||
tool_config: dict,
|
||||
command_params: dict,
|
||||
result_dir: Path,
|
||||
scan_type: str = 'subdomain_discovery'
|
||||
) -> str:
|
||||
"""
|
||||
运行单个扫描工具
|
||||
|
||||
Args:
|
||||
tool_name: 工具名称
|
||||
tool_config: 工具配置
|
||||
command_params: 命令参数
|
||||
result_dir: 结果目录
|
||||
scan_type: 扫描类型
|
||||
|
||||
Returns:
|
||||
str: 输出文件路径,失败返回空字符串
|
||||
"""
|
||||
from apps.scan.tasks.subdomain_discovery import run_subdomain_discovery_task
|
||||
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
short_uuid = uuid.uuid4().hex[:4]
|
||||
output_file = str(result_dir / f"{tool_name}_{timestamp}_{short_uuid}.txt")
|
||||
|
||||
# 添加 output_file 到参数
|
||||
command_params['output_file'] = output_file
|
||||
|
||||
try:
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type=scan_type,
|
||||
command_params=command_params,
|
||||
tool_config=tool_config
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"构建 {tool_name} 命令失败: {e}")
|
||||
return ""
|
||||
|
||||
timeout = tool_config.get('timeout', 3600)
|
||||
if timeout == 'auto':
|
||||
timeout = 3600
|
||||
|
||||
logger.info(f"执行 {tool_name}: timeout={timeout}s")
|
||||
|
||||
try:
|
||||
result = run_subdomain_discovery_task(
|
||||
tool=tool_name,
|
||||
command=command,
|
||||
timeout=timeout,
|
||||
output_file=output_file
|
||||
)
|
||||
return result if result else ""
|
||||
except Exception as e:
|
||||
logger.warning(f"{tool_name} 执行失败: {e}")
|
||||
return ""
|
||||
|
||||
|
||||
def _count_lines(file_path: str) -> int:
|
||||
"""
|
||||
统计文件非空行数
|
||||
|
||||
Args:
|
||||
file_path: 文件路径
|
||||
|
||||
Returns:
|
||||
int: 非空行数量
|
||||
"""
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
return sum(1 for line in f if line.strip())
|
||||
except Exception as e:
|
||||
logger.warning(f"统计文件行数失败: {file_path} - {e}")
|
||||
return 0
|
||||
|
||||
|
||||
def _merge_files(file_list: list, output_file: str) -> str:
|
||||
"""
|
||||
合并多个文件并去重
|
||||
|
||||
Args:
|
||||
file_list: 文件路径列表
|
||||
output_file: 输出文件路径
|
||||
|
||||
Returns:
|
||||
str: 输出文件路径
|
||||
"""
|
||||
domains = set()
|
||||
for f in file_list:
|
||||
if f and Path(f).exists():
|
||||
with open(f, 'r', encoding='utf-8', errors='ignore') as fp:
|
||||
for line in fp:
|
||||
line = line.strip()
|
||||
if line:
|
||||
domains.add(line)
|
||||
|
||||
with open(output_file, 'w', encoding='utf-8') as fp:
|
||||
for domain in sorted(domains):
|
||||
fp.write(domain + '\n')
|
||||
|
||||
logger.info(f"合并完成: {len(domains)} 个域名 -> {output_file}")
|
||||
return output_file
|
||||
|
||||
|
||||
@flow(
|
||||
name="subdomain_discovery",
|
||||
log_prints=True,
|
||||
on_running=[on_scan_flow_running],
|
||||
on_completion=[on_scan_flow_completed],
|
||||
on_failure=[on_scan_flow_failed],
|
||||
)
|
||||
def subdomain_discovery_flow(
|
||||
scan_id: int,
|
||||
target_name: str,
|
||||
target_id: int,
|
||||
scan_workspace_dir: str,
|
||||
enabled_tools: dict
|
||||
) -> dict:
|
||||
"""子域名发现扫描流程
|
||||
|
||||
工作流程(4 阶段):
|
||||
Stage 1: 被动收集(并行) - 必选
|
||||
Stage 2: 字典爆破(可选) - 子域名字典爆破
|
||||
Stage 3: 变异生成 + 验证(可选) - dnsgen + 通用存活验证
|
||||
Stage 4: DNS 存活验证(可选) - 通用存活验证
|
||||
Final: 保存到数据库
|
||||
|
||||
Args:
|
||||
scan_id: 扫描任务 ID
|
||||
target_name: 目标名称(域名)
|
||||
target_id: 目标 ID
|
||||
scan_workspace_dir: Scan 工作空间目录(由 Service 层创建)
|
||||
enabled_tools: 扫描配置字典:
|
||||
{
|
||||
'passive_tools': {...},
|
||||
'bruteforce': {...},
|
||||
'permutation': {...},
|
||||
'resolve': {...}
|
||||
}
|
||||
|
||||
Returns:
|
||||
dict: 扫描结果
|
||||
|
||||
Raises:
|
||||
ValueError: 配置错误
|
||||
RuntimeError: 执行失败
|
||||
"""
|
||||
try:
|
||||
# ==================== 参数验证 ====================
|
||||
if scan_id is None:
|
||||
raise ValueError("scan_id 不能为空")
|
||||
if target_id is None:
|
||||
raise ValueError("target_id 不能为空")
|
||||
if not scan_workspace_dir:
|
||||
raise ValueError("scan_workspace_dir 不能为空")
|
||||
if enabled_tools is None:
|
||||
raise ValueError("enabled_tools 不能为空")
|
||||
|
||||
scan_config = enabled_tools
|
||||
|
||||
# 如果未提供目标域名,跳过扫描
|
||||
if not target_name:
|
||||
logger.warning("未提供目标域名,跳过子域名发现扫描")
|
||||
return _empty_result(scan_id, '', scan_workspace_dir)
|
||||
|
||||
# 导入任务函数
|
||||
from apps.scan.tasks.subdomain_discovery import (
|
||||
run_subdomain_discovery_task,
|
||||
merge_and_validate_task,
|
||||
save_domains_task
|
||||
)
|
||||
|
||||
# Step 0: 准备工作
|
||||
result_dir = _setup_subdomain_directory(scan_workspace_dir)
|
||||
|
||||
# 验证并规范化目标域名
|
||||
try:
|
||||
domain_name = _validate_and_normalize_target(target_name)
|
||||
except ValueError as e:
|
||||
logger.warning("目标域名无效,跳过子域名发现扫描: %s", e)
|
||||
return _empty_result(scan_id, target_name, scan_workspace_dir)
|
||||
|
||||
# 验证成功后打印日志
|
||||
logger.info(
|
||||
"="*60 + "\n" +
|
||||
"开始子域名发现扫描\n" +
|
||||
f" Scan ID: {scan_id}\n" +
|
||||
f" Domain: {domain_name}\n" +
|
||||
f" Workspace: {scan_workspace_dir}\n" +
|
||||
"="*60
|
||||
)
|
||||
|
||||
# 解析配置
|
||||
passive_tools = scan_config.get('passive_tools', {})
|
||||
bruteforce_config = scan_config.get('bruteforce', {})
|
||||
permutation_config = scan_config.get('permutation', {})
|
||||
resolve_config = scan_config.get('resolve', {})
|
||||
|
||||
# 过滤出启用的被动工具
|
||||
enabled_passive_tools = {
|
||||
k: v for k, v in passive_tools.items()
|
||||
if v.get('enabled', True)
|
||||
}
|
||||
|
||||
executed_tasks = []
|
||||
all_result_files = []
|
||||
failed_tools = []
|
||||
successful_tool_names = []
|
||||
|
||||
# ==================== Stage 1: 被动收集(并行)====================
|
||||
logger.info("=" * 40)
|
||||
logger.info("Stage 1: 被动收集(并行)")
|
||||
logger.info("=" * 40)
|
||||
|
||||
if enabled_passive_tools:
|
||||
logger.info("启用工具: %s", ', '.join(enabled_passive_tools.keys()))
|
||||
result_files, stage1_failed, stage1_success = _run_scans_parallel(
|
||||
enabled_tools=enabled_passive_tools,
|
||||
domain_name=domain_name,
|
||||
result_dir=result_dir
|
||||
)
|
||||
all_result_files.extend(result_files)
|
||||
failed_tools.extend(stage1_failed)
|
||||
successful_tool_names.extend(stage1_success)
|
||||
executed_tasks.extend([f'passive ({tool})' for tool in stage1_success])
|
||||
else:
|
||||
logger.warning("未启用任何被动收集工具")
|
||||
|
||||
# 合并 Stage 1 结果
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
current_result = str(result_dir / f"subs_passive_{timestamp}.txt")
|
||||
if all_result_files:
|
||||
current_result = _merge_files(all_result_files, current_result)
|
||||
executed_tasks.append('merge_passive')
|
||||
else:
|
||||
# 创建空文件
|
||||
Path(current_result).touch()
|
||||
logger.warning("Stage 1 无结果,创建空文件")
|
||||
|
||||
# ==================== Stage 2: 字典爆破(可选)====================
|
||||
bruteforce_enabled = bruteforce_config.get('enabled', False)
|
||||
if bruteforce_enabled:
|
||||
logger.info("=" * 40)
|
||||
logger.info("Stage 2: 字典爆破")
|
||||
logger.info("=" * 40)
|
||||
|
||||
bruteforce_tool_config = bruteforce_config.get('subdomain_bruteforce', {})
|
||||
wordlist_name = bruteforce_tool_config.get('wordlist_name', 'dns_wordlist.txt')
|
||||
|
||||
try:
|
||||
# 确保本地存在字典文件(含 hash 校验)
|
||||
local_wordlist_path = ensure_wordlist_local(wordlist_name)
|
||||
|
||||
# 获取字典记录用于计算 timeout
|
||||
wordlist_service = WordlistService()
|
||||
wordlist = wordlist_service.get_wordlist_by_name(wordlist_name)
|
||||
|
||||
timeout_value = bruteforce_tool_config.get('timeout', 3600)
|
||||
if timeout_value == 'auto' and wordlist:
|
||||
line_count = getattr(wordlist, 'line_count', None)
|
||||
if line_count is None:
|
||||
try:
|
||||
with open(local_wordlist_path, 'rb') as f:
|
||||
line_count = sum(1 for _ in f)
|
||||
except OSError:
|
||||
line_count = 0
|
||||
|
||||
try:
|
||||
line_count_int = int(line_count)
|
||||
except (TypeError, ValueError):
|
||||
line_count_int = 0
|
||||
|
||||
timeout_value = line_count_int * 3 if line_count_int > 0 else 3600
|
||||
bruteforce_tool_config = {
|
||||
**bruteforce_tool_config,
|
||||
'timeout': timeout_value,
|
||||
}
|
||||
logger.info(
|
||||
"subdomain_bruteforce 使用自动 timeout: %s 秒 (字典行数=%s, 3秒/行)",
|
||||
timeout_value,
|
||||
line_count_int,
|
||||
)
|
||||
|
||||
brute_output = str(result_dir / f"subs_brute_{timestamp}.txt")
|
||||
brute_result = _run_single_tool(
|
||||
tool_name='subdomain_bruteforce',
|
||||
tool_config=bruteforce_tool_config,
|
||||
command_params={
|
||||
'domain': domain_name,
|
||||
'wordlist': local_wordlist_path,
|
||||
'output_file': brute_output
|
||||
},
|
||||
result_dir=result_dir
|
||||
)
|
||||
|
||||
if brute_result:
|
||||
# 合并 Stage 1 + Stage 2
|
||||
current_result = _merge_files(
|
||||
[current_result, brute_result],
|
||||
str(result_dir / f"subs_merged_{timestamp}.txt")
|
||||
)
|
||||
successful_tool_names.append('subdomain_bruteforce')
|
||||
executed_tasks.append('bruteforce')
|
||||
else:
|
||||
failed_tools.append({'tool': 'subdomain_bruteforce', 'reason': '执行失败'})
|
||||
except Exception as exc:
|
||||
logger.warning("字典准备失败,跳过字典爆破: %s", exc)
|
||||
failed_tools.append({'tool': 'subdomain_bruteforce', 'reason': str(exc)})
|
||||
|
||||
# ==================== Stage 3: 变异生成 + 验证(可选)====================
|
||||
permutation_enabled = permutation_config.get('enabled', False)
|
||||
if permutation_enabled:
|
||||
logger.info("=" * 40)
|
||||
logger.info("Stage 3: 变异生成 + 存活验证(流式管道)")
|
||||
logger.info("=" * 40)
|
||||
|
||||
permutation_tool_config = permutation_config.get('subdomain_permutation_resolve', {})
|
||||
|
||||
# === Step 3.1: 泛解析采样检测 ===
|
||||
# 生成原文件 100 倍的变异样本,检查解析结果是否超过 50 倍
|
||||
before_count = _count_lines(current_result)
|
||||
|
||||
# 配置参数
|
||||
SAMPLE_MULTIPLIER = 100 # 采样数量 = 原文件 × 100
|
||||
EXPANSION_THRESHOLD = 50 # 膨胀阈值 = 原文件 × 50
|
||||
SAMPLE_TIMEOUT = 7200 # 采样超时 2 小时
|
||||
|
||||
sample_size = before_count * SAMPLE_MULTIPLIER
|
||||
max_allowed = before_count * EXPANSION_THRESHOLD
|
||||
|
||||
sample_output = str(result_dir / f"subs_permuted_sample_{timestamp}.txt")
|
||||
sample_cmd = (
|
||||
f"cat {current_result} | dnsgen - | head -n {sample_size} | "
|
||||
f"puredns resolve -r /app/backend/resources/resolvers.txt "
|
||||
f"--write {sample_output} --wildcard-tests 50 --wildcard-batch 1000000 --quiet"
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"泛解析采样检测: 原文件 {before_count} 个, "
|
||||
f"采样 {sample_size} 个, 阈值 {max_allowed} 个"
|
||||
)
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
sample_cmd,
|
||||
shell=True,
|
||||
timeout=SAMPLE_TIMEOUT,
|
||||
check=False,
|
||||
capture_output=True
|
||||
)
|
||||
sample_result_count = _count_lines(sample_output) if Path(sample_output).exists() else 0
|
||||
|
||||
logger.info(
|
||||
f"采样结果: {sample_result_count} 个域名存活 "
|
||||
f"(原文件: {before_count}, 阈值: {max_allowed})"
|
||||
)
|
||||
|
||||
if sample_result_count > max_allowed:
|
||||
# 采样结果超过阈值,说明存在泛解析,跳过完整变异
|
||||
ratio = sample_result_count / before_count if before_count > 0 else sample_result_count
|
||||
logger.warning(
|
||||
f"跳过变异: 采样检测到泛解析 "
|
||||
f"({sample_result_count} > {max_allowed}, 膨胀率 {ratio:.1f}x)"
|
||||
)
|
||||
failed_tools.append({
|
||||
'tool': 'subdomain_permutation_resolve',
|
||||
'reason': f"采样检测到泛解析 (膨胀率 {ratio:.1f}x)"
|
||||
})
|
||||
else:
|
||||
# === Step 3.2: 采样通过,执行完整变异 ===
|
||||
logger.info("采样检测通过,执行完整变异...")
|
||||
|
||||
permuted_output = str(result_dir / f"subs_permuted_{timestamp}.txt")
|
||||
|
||||
permuted_result = _run_single_tool(
|
||||
tool_name='subdomain_permutation_resolve',
|
||||
tool_config=permutation_tool_config,
|
||||
command_params={
|
||||
'input_file': current_result,
|
||||
'output_file': permuted_output,
|
||||
},
|
||||
result_dir=result_dir
|
||||
)
|
||||
|
||||
if permuted_result:
|
||||
# 合并原结果 + 变异验证结果
|
||||
current_result = _merge_files(
|
||||
[current_result, permuted_result],
|
||||
str(result_dir / f"subs_with_permuted_{timestamp}.txt")
|
||||
)
|
||||
successful_tool_names.append('subdomain_permutation_resolve')
|
||||
executed_tasks.append('permutation')
|
||||
else:
|
||||
failed_tools.append({'tool': 'subdomain_permutation_resolve', 'reason': '执行失败'})
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning(f"采样检测超时 ({SAMPLE_TIMEOUT}秒),跳过变异")
|
||||
failed_tools.append({'tool': 'subdomain_permutation_resolve', 'reason': '采样检测超时'})
|
||||
except Exception as e:
|
||||
logger.warning(f"采样检测失败: {e},跳过变异")
|
||||
failed_tools.append({'tool': 'subdomain_permutation_resolve', 'reason': f'采样检测失败: {e}'})
|
||||
|
||||
# ==================== Stage 4: DNS 存活验证(可选)====================
|
||||
# 无论是否启用 Stage 3,只要 resolve.enabled 为 true 就会执行,对当前所有候选子域做统一 DNS 验证
|
||||
resolve_enabled = resolve_config.get('enabled', False)
|
||||
if resolve_enabled:
|
||||
logger.info("=" * 40)
|
||||
logger.info("Stage 4: DNS 存活验证")
|
||||
logger.info("=" * 40)
|
||||
|
||||
resolve_tool_config = resolve_config.get('subdomain_resolve', {})
|
||||
|
||||
# 根据当前候选子域数量动态计算 timeout(支持 timeout: auto)
|
||||
timeout_value = resolve_tool_config.get('timeout', 3600)
|
||||
if timeout_value == 'auto':
|
||||
line_count = 0
|
||||
try:
|
||||
with open(current_result, 'rb') as f:
|
||||
line_count = sum(1 for _ in f)
|
||||
except OSError:
|
||||
line_count = 0
|
||||
|
||||
try:
|
||||
line_count_int = int(line_count)
|
||||
except (TypeError, ValueError):
|
||||
line_count_int = 0
|
||||
|
||||
timeout_value = line_count_int * 3 if line_count_int > 0 else 3600
|
||||
resolve_tool_config = {
|
||||
**resolve_tool_config,
|
||||
'timeout': timeout_value,
|
||||
}
|
||||
logger.info(
|
||||
"subdomain_resolve 使用自动 timeout: %s 秒 (候选子域数=%s, 3秒/域名)",
|
||||
timeout_value,
|
||||
line_count_int,
|
||||
)
|
||||
|
||||
alive_output = str(result_dir / f"subs_alive_{timestamp}.txt")
|
||||
|
||||
alive_result = _run_single_tool(
|
||||
tool_name='subdomain_resolve',
|
||||
tool_config=resolve_tool_config,
|
||||
command_params={
|
||||
'input_file': current_result,
|
||||
'output_file': alive_output,
|
||||
},
|
||||
result_dir=result_dir
|
||||
)
|
||||
|
||||
if alive_result:
|
||||
current_result = alive_result
|
||||
successful_tool_names.append('subdomain_resolve')
|
||||
executed_tasks.append('resolve')
|
||||
else:
|
||||
failed_tools.append({'tool': 'subdomain_resolve', 'reason': '执行失败'})
|
||||
|
||||
# ==================== Final: 保存到数据库 ====================
|
||||
logger.info("=" * 40)
|
||||
logger.info("Final: 保存到数据库")
|
||||
logger.info("=" * 40)
|
||||
|
||||
# 最终验证和保存
|
||||
final_file = merge_and_validate_task(
|
||||
result_files=[current_result],
|
||||
result_dir=str(result_dir)
|
||||
)
|
||||
|
||||
save_result = save_domains_task(
|
||||
domains_file=final_file,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id
|
||||
)
|
||||
processed_domains = save_result.get('processed_records', 0)
|
||||
executed_tasks.append('save_domains')
|
||||
|
||||
logger.info("="*60 + "\n✓ 子域名发现扫描完成\n" + "="*60)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': domain_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'total': processed_domains,
|
||||
'executed_tasks': executed_tasks,
|
||||
'tool_stats': {
|
||||
'total': len(enabled_passive_tools) + (1 if bruteforce_enabled else 0) +
|
||||
(1 if permutation_enabled else 0) + (1 if resolve_enabled else 0),
|
||||
'successful': len(successful_tool_names),
|
||||
'failed': len(failed_tools),
|
||||
'successful_tools': successful_tool_names,
|
||||
'failed_tools': failed_tools
|
||||
}
|
||||
}
|
||||
|
||||
except ValueError as e:
|
||||
logger.error("配置错误: %s", e)
|
||||
raise
|
||||
except RuntimeError as e:
|
||||
logger.error("运行时错误: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("子域名发现扫描失败: %s", e)
|
||||
raise
|
||||
|
||||
|
||||
def _empty_result(scan_id: int, target: str, scan_workspace_dir: str) -> dict:
|
||||
"""返回空结果"""
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'total': 0,
|
||||
'executed_tasks': [],
|
||||
'tool_stats': {
|
||||
'total': 0,
|
||||
'successful': 0,
|
||||
'failed': 0,
|
||||
'successful_tools': [],
|
||||
'failed_tools': []
|
||||
}
|
||||
}
|
||||
@@ -1,169 +0,0 @@
|
||||
"""
|
||||
基于 domain_name(域名)的 URL 获取 Flow
|
||||
|
||||
主要用于像 waymore 这种按域名输入(input_type = 'domain_name')的工具:
|
||||
- 直接对目标域名(target_name/domain_name)执行 URL 被动收集
|
||||
- 不再依赖 domains_file(子域名列表文件)
|
||||
"""
|
||||
|
||||
# Django 环境初始化
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
from prefect import flow
|
||||
|
||||
from apps.common.validators import validate_domain
|
||||
from apps.scan.tasks.url_fetch import run_url_fetcher_task
|
||||
from apps.scan.utils import build_scan_command
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@flow(name="domain_name_url_fetch_flow", log_prints=True)
|
||||
def domain_name_url_fetch_flow(
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
target_name: str,
|
||||
output_dir: str,
|
||||
domain_name_tools: Dict[str, dict],
|
||||
) -> dict:
|
||||
"""
|
||||
基于 target_name/domain_name 域名执行 URL 获取子 Flow(当前主要用于 waymore)。
|
||||
|
||||
执行流程:
|
||||
1. 校验 target_name 是否为域名
|
||||
2. 使用传入的 domain_name_tools 工具列表
|
||||
3. 为每个工具构建命令并并行执行
|
||||
4. 汇总结果文件列表
|
||||
"""
|
||||
try:
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 复用公共域名校验逻辑,确保 target_name 是合法域名
|
||||
validate_domain(target_name)
|
||||
|
||||
logger.info(
|
||||
"开始基于 domain_name 的 URL 获取 - Target: %s, Tools: %s",
|
||||
target_name,
|
||||
", ".join(domain_name_tools.keys()) if domain_name_tools else "无",
|
||||
)
|
||||
|
||||
futures: dict[str, object] = {}
|
||||
failed_tools: list[dict] = []
|
||||
|
||||
# 提交所有基于域名的 URL 获取任务
|
||||
for tool_name, tool_config in domain_name_tools.items():
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
short_uuid = uuid.uuid4().hex[:4]
|
||||
output_file = str(output_path / f"{tool_name}_{timestamp}_{short_uuid}.txt")
|
||||
|
||||
command_params = {
|
||||
"domain_name": target_name,
|
||||
"output_file": output_file,
|
||||
}
|
||||
|
||||
try:
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type="url_fetch",
|
||||
command_params=command_params,
|
||||
tool_config=tool_config,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("构建 %s 命令失败: %s", tool_name, e)
|
||||
failed_tools.append({"tool": tool_name, "reason": f"命令构建失败: {e}"})
|
||||
continue
|
||||
|
||||
# 计算超时时间:domain_name 模式下,没有行数统计,auto 使用固定超时
|
||||
raw_timeout = tool_config.get("timeout", 3600)
|
||||
timeout = 3600
|
||||
if isinstance(raw_timeout, str) and raw_timeout == "auto":
|
||||
timeout = 3600
|
||||
logger.info(
|
||||
"工具 %s 使用固定自动超时: %d 秒 (domain_name 模式)",
|
||||
tool_name,
|
||||
timeout,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
timeout = int(raw_timeout)
|
||||
except (TypeError, ValueError):
|
||||
logger.warning(
|
||||
"工具 %s 的 timeout 配置无效(%s),将使用默认 3600 秒",
|
||||
tool_name,
|
||||
raw_timeout,
|
||||
)
|
||||
timeout = 3600
|
||||
|
||||
logger.info(
|
||||
"提交任务 - 工具: %s, domain_name: %s, 超时: %d秒",
|
||||
tool_name,
|
||||
target_name,
|
||||
timeout,
|
||||
)
|
||||
|
||||
future = run_url_fetcher_task.submit(
|
||||
tool_name=tool_name,
|
||||
command=command,
|
||||
timeout=timeout,
|
||||
output_file=output_file,
|
||||
)
|
||||
futures[tool_name] = future
|
||||
|
||||
result_files: list[str] = []
|
||||
successful_tools: list[str] = []
|
||||
|
||||
# 收集执行结果
|
||||
for tool_name, future in futures.items():
|
||||
try:
|
||||
result = future.result()
|
||||
if result and result.get("success"):
|
||||
result_files.append(result["output_file"])
|
||||
successful_tools.append(tool_name)
|
||||
logger.info(
|
||||
"✓ 工具 %s 执行成功 - 发现 URL: %d",
|
||||
tool_name,
|
||||
result.get("url_count", 0),
|
||||
)
|
||||
else:
|
||||
failed_tools.append(
|
||||
{
|
||||
"tool": tool_name,
|
||||
"reason": "未生成结果或无有效 URL",
|
||||
}
|
||||
)
|
||||
logger.warning("⚠️ 工具 %s 未生成有效结果", tool_name)
|
||||
except Exception as e:
|
||||
failed_tools.append({"tool": tool_name, "reason": str(e)})
|
||||
logger.warning("⚠️ 工具 %s 执行失败: %s", tool_name, e)
|
||||
|
||||
logger.info(
|
||||
"基于 domain_name 的 URL 获取完成 - 成功工具: %s, 失败工具: %s",
|
||||
successful_tools or "无",
|
||||
[f["tool"] for f in failed_tools] or "无",
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"result_files": result_files,
|
||||
"failed_tools": failed_tools,
|
||||
"successful_tools": successful_tools,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("domain_name URL 获取失败: %s", e, exc_info=True)
|
||||
return {
|
||||
"success": False,
|
||||
"result_files": [],
|
||||
"failed_tools": [
|
||||
{"tool": "domain_name_url_fetch_flow", "reason": str(e)},
|
||||
],
|
||||
"successful_tools": [],
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
"""
|
||||
URL 被动收集 Flow
|
||||
|
||||
从历史归档、搜索引擎等被动来源收集 URL
|
||||
工具:waymore, gau, waybackurls 等
|
||||
输入:domains_file(子域名列表)
|
||||
"""
|
||||
|
||||
# Django 环境初始化
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from prefect import flow
|
||||
|
||||
from .utils import run_tools_parallel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _export_domains_file(target_id: int, scan_id: int, output_dir: Path) -> tuple[str, int]:
|
||||
"""
|
||||
导出子域名列表到文件
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
scan_id: 扫描 ID
|
||||
output_dir: 输出目录
|
||||
|
||||
Returns:
|
||||
tuple: (file_path, count)
|
||||
"""
|
||||
from apps.scan.tasks.url_fetch import export_target_assets_task
|
||||
|
||||
output_file = str(output_dir / "domains.txt")
|
||||
result = export_target_assets_task(
|
||||
output_file=output_file,
|
||||
target_id=target_id,
|
||||
scan_id=scan_id,
|
||||
input_type="domains_file"
|
||||
)
|
||||
|
||||
count = result['asset_count']
|
||||
if count == 0:
|
||||
logger.warning("子域名列表为空,被动收集可能无法正常工作")
|
||||
else:
|
||||
logger.info("✓ 子域名列表导出完成 - 数量: %d", count)
|
||||
|
||||
return output_file, count
|
||||
|
||||
|
||||
@flow(name="domains_url_fetch_flow", log_prints=True)
|
||||
def domains_url_fetch_flow(
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
target_name: str,
|
||||
output_dir: str,
|
||||
enabled_tools: dict
|
||||
) -> dict:
|
||||
"""
|
||||
URL 被动收集子 Flow
|
||||
|
||||
执行流程:
|
||||
1. 导出子域名列表(domains_file)
|
||||
2. 并行执行被动收集工具
|
||||
3. 返回结果文件列表
|
||||
|
||||
Args:
|
||||
scan_id: 扫描 ID
|
||||
target_id: 目标 ID
|
||||
target_name: 目标名称
|
||||
output_dir: 输出目录
|
||||
enabled_tools: 启用的被动收集工具配置
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'success': bool,
|
||||
'result_files': list,
|
||||
'failed_tools': list,
|
||||
'successful_tools': list,
|
||||
'domains_count': int
|
||||
}
|
||||
"""
|
||||
try:
|
||||
output_path = Path(output_dir)
|
||||
|
||||
logger.info(
|
||||
"开始 URL 被动收集 - Target: %s, Tools: %s",
|
||||
target_name, ', '.join(enabled_tools.keys())
|
||||
)
|
||||
|
||||
# Step 1: 导出子域名列表
|
||||
domains_file, domains_count = _export_domains_file(
|
||||
target_id=target_id,
|
||||
scan_id=scan_id,
|
||||
output_dir=output_path
|
||||
)
|
||||
|
||||
if domains_count == 0:
|
||||
logger.warning("没有可用的子域名,跳过被动收集")
|
||||
return {
|
||||
'success': True,
|
||||
'result_files': [],
|
||||
'failed_tools': [],
|
||||
'successful_tools': [],
|
||||
'domains_count': 0
|
||||
}
|
||||
|
||||
# Step 2: 并行执行被动收集工具
|
||||
result_files, failed_tools, successful_tools = run_tools_parallel(
|
||||
tools=enabled_tools,
|
||||
input_file=domains_file,
|
||||
input_type="domains_file",
|
||||
output_dir=output_path
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"✓ 被动收集完成 - 成功: %d/%d, 结果文件: %d",
|
||||
len(successful_tools), len(enabled_tools), len(result_files)
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'result_files': result_files,
|
||||
'failed_tools': failed_tools,
|
||||
'successful_tools': successful_tools,
|
||||
'domains_count': domains_count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("URL 被动收集失败: %s", e, exc_info=True)
|
||||
return {
|
||||
'success': False,
|
||||
'result_files': [],
|
||||
'failed_tools': [{'tool': 'domains_url_fetch_flow', 'reason': str(e)}],
|
||||
'successful_tools': [],
|
||||
'domains_count': 0
|
||||
}
|
||||
@@ -1,242 +0,0 @@
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
from prefect import flow
|
||||
|
||||
from apps.scan.handlers.scan_flow_handlers import (
|
||||
on_scan_flow_running,
|
||||
on_scan_flow_completed,
|
||||
on_scan_flow_failed,
|
||||
)
|
||||
from apps.scan.utils import build_scan_command, ensure_nuclei_templates_local
|
||||
from apps.scan.tasks.vuln_scan import (
|
||||
export_endpoints_task,
|
||||
run_vuln_tool_task,
|
||||
run_and_stream_save_dalfox_vulns_task,
|
||||
run_and_stream_save_nuclei_vulns_task,
|
||||
)
|
||||
from .utils import calculate_timeout_by_line_count
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _setup_vuln_scan_directory(scan_workspace_dir: str) -> Path:
|
||||
vuln_scan_dir = Path(scan_workspace_dir) / "vuln_scan"
|
||||
vuln_scan_dir.mkdir(parents=True, exist_ok=True)
|
||||
return vuln_scan_dir
|
||||
|
||||
|
||||
@flow(
|
||||
name="endpoints_vuln_scan_flow",
|
||||
log_prints=True,
|
||||
)
|
||||
def endpoints_vuln_scan_flow(
|
||||
scan_id: int,
|
||||
target_name: str,
|
||||
target_id: int,
|
||||
scan_workspace_dir: str,
|
||||
enabled_tools: Dict[str, dict],
|
||||
) -> dict:
|
||||
"""基于 Endpoint 的漏洞扫描 Flow(串行执行 Dalfox 等工具)。"""
|
||||
try:
|
||||
if scan_id is None:
|
||||
raise ValueError("scan_id 不能为空")
|
||||
if not target_name:
|
||||
raise ValueError("target_name 不能为空")
|
||||
if target_id is None:
|
||||
raise ValueError("target_id 不能为空")
|
||||
if not scan_workspace_dir:
|
||||
raise ValueError("scan_workspace_dir 不能为空")
|
||||
if not enabled_tools:
|
||||
raise ValueError("enabled_tools 不能为空")
|
||||
|
||||
vuln_scan_dir = _setup_vuln_scan_directory(scan_workspace_dir)
|
||||
endpoints_file = vuln_scan_dir / "input_endpoints.txt"
|
||||
|
||||
# Step 1: 导出 Endpoint URL
|
||||
export_result = export_endpoints_task(
|
||||
target_id=target_id,
|
||||
output_file=str(endpoints_file),
|
||||
)
|
||||
total_endpoints = export_result.get("total_count", 0)
|
||||
|
||||
if total_endpoints == 0 or not endpoints_file.exists() or endpoints_file.stat().st_size == 0:
|
||||
logger.warning("目标下没有可用 Endpoint,跳过漏洞扫描")
|
||||
return {
|
||||
"success": True,
|
||||
"scan_id": scan_id,
|
||||
"target": target_name,
|
||||
"scan_workspace_dir": scan_workspace_dir,
|
||||
"endpoints_file": str(endpoints_file),
|
||||
"endpoint_count": 0,
|
||||
"executed_tools": [],
|
||||
"tool_results": {},
|
||||
}
|
||||
|
||||
logger.info("Endpoint 导出完成,共 %d 条,开始执行漏洞扫描", total_endpoints)
|
||||
|
||||
tool_results: Dict[str, dict] = {}
|
||||
|
||||
# Step 2: 并行执行每个漏洞扫描工具(目前主要是 Dalfox)
|
||||
# 1)先为每个工具 submit Prefect Task,让 Worker 并行调度
|
||||
# 2)再统一收集各自的结果,组装成 tool_results
|
||||
tool_futures: Dict[str, dict] = {}
|
||||
|
||||
for tool_name, tool_config in enabled_tools.items():
|
||||
# Nuclei 需要先确保本地模板存在(支持多个模板仓库)
|
||||
template_args = ""
|
||||
if tool_name == "nuclei":
|
||||
repo_names = tool_config.get("template_repo_names")
|
||||
if not repo_names or not isinstance(repo_names, (list, tuple)):
|
||||
logger.error("Nuclei 配置缺少 template_repo_names(数组),跳过")
|
||||
continue
|
||||
template_paths = []
|
||||
try:
|
||||
for repo_name in repo_names:
|
||||
path = ensure_nuclei_templates_local(repo_name)
|
||||
template_paths.append(path)
|
||||
logger.info("Nuclei 模板路径 [%s]: %s", repo_name, path)
|
||||
except Exception as e:
|
||||
logger.error("获取 Nuclei 模板失败: %s,跳过 nuclei 扫描", e)
|
||||
continue
|
||||
template_args = " ".join(f"-t {p}" for p in template_paths)
|
||||
|
||||
# 构建命令参数
|
||||
command_params = {"endpoints_file": str(endpoints_file)}
|
||||
if template_args:
|
||||
command_params["template_args"] = template_args
|
||||
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type="vuln_scan",
|
||||
command_params=command_params,
|
||||
tool_config=tool_config,
|
||||
)
|
||||
|
||||
raw_timeout = tool_config.get("timeout", 600)
|
||||
timeout = 600
|
||||
|
||||
if isinstance(raw_timeout, str) and raw_timeout == "auto":
|
||||
# timeout=auto 时,根据 endpoints_file 行数自动计算超时时间
|
||||
# Dalfox: 每行 100 秒,Nuclei: 每行 30 秒
|
||||
base_per_time = 30 if tool_name == "nuclei" else 100
|
||||
timeout = calculate_timeout_by_line_count(
|
||||
tool_config=tool_config,
|
||||
file_path=str(endpoints_file),
|
||||
base_per_time=base_per_time,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
timeout = int(raw_timeout)
|
||||
except (TypeError, ValueError) as e:
|
||||
# 配置错误应当直接暴露,避免默默使用默认值导致排查困难
|
||||
raise ValueError(
|
||||
f"工具 {tool_name} 的 timeout 配置无效: {raw_timeout!r}"
|
||||
) from e
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
log_file = vuln_scan_dir / f"{tool_name}_{timestamp}.log"
|
||||
|
||||
# Dalfox XSS 使用流式任务,一边解析一边保存漏洞结果
|
||||
if tool_name == "dalfox_xss":
|
||||
logger.info("开始执行漏洞扫描工具 %s(流式保存漏洞结果,已提交任务)", tool_name)
|
||||
future = run_and_stream_save_dalfox_vulns_task.submit(
|
||||
cmd=command,
|
||||
tool_name=tool_name,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
cwd=str(vuln_scan_dir),
|
||||
shell=True,
|
||||
batch_size=1,
|
||||
timeout=timeout,
|
||||
log_file=str(log_file),
|
||||
)
|
||||
|
||||
tool_futures[tool_name] = {
|
||||
"future": future,
|
||||
"command": command,
|
||||
"timeout": timeout,
|
||||
"log_file": str(log_file),
|
||||
"mode": "streaming",
|
||||
}
|
||||
elif tool_name == "nuclei":
|
||||
# Nuclei 使用流式任务
|
||||
logger.info("开始执行漏洞扫描工具 %s(流式保存漏洞结果,已提交任务)", tool_name)
|
||||
future = run_and_stream_save_nuclei_vulns_task.submit(
|
||||
cmd=command,
|
||||
tool_name=tool_name,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
cwd=str(vuln_scan_dir),
|
||||
shell=True,
|
||||
batch_size=10,
|
||||
timeout=timeout,
|
||||
log_file=str(log_file),
|
||||
)
|
||||
|
||||
tool_futures[tool_name] = {
|
||||
"future": future,
|
||||
"command": command,
|
||||
"timeout": timeout,
|
||||
"log_file": str(log_file),
|
||||
"mode": "streaming",
|
||||
}
|
||||
else:
|
||||
# 其他工具仍使用非流式执行逻辑
|
||||
logger.info("开始执行漏洞扫描工具 %s(已提交任务)", tool_name)
|
||||
future = run_vuln_tool_task.submit(
|
||||
tool_name=tool_name,
|
||||
command=command,
|
||||
timeout=timeout,
|
||||
log_file=str(log_file),
|
||||
)
|
||||
|
||||
tool_futures[tool_name] = {
|
||||
"future": future,
|
||||
"command": command,
|
||||
"timeout": timeout,
|
||||
"log_file": str(log_file),
|
||||
"mode": "normal",
|
||||
}
|
||||
|
||||
# 统一收集所有工具的执行结果
|
||||
for tool_name, meta in tool_futures.items():
|
||||
future = meta["future"]
|
||||
result = future.result()
|
||||
|
||||
if meta["mode"] == "streaming":
|
||||
tool_results[tool_name] = {
|
||||
"command": meta["command"],
|
||||
"timeout": meta["timeout"],
|
||||
"processed_records": result.get("processed_records"),
|
||||
"created_vulns": result.get("created_vulns"),
|
||||
"command_log_file": meta["log_file"],
|
||||
}
|
||||
else:
|
||||
tool_results[tool_name] = {
|
||||
"command": meta["command"],
|
||||
"timeout": meta["timeout"],
|
||||
"duration": result.get("duration"),
|
||||
"returncode": result.get("returncode"),
|
||||
"command_log_file": result.get("command_log_file"),
|
||||
}
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"scan_id": scan_id,
|
||||
"target": target_name,
|
||||
"scan_workspace_dir": scan_workspace_dir,
|
||||
"endpoints_file": str(endpoints_file),
|
||||
"endpoint_count": total_endpoints,
|
||||
"executed_tools": list(enabled_tools.keys()),
|
||||
"tool_results": tool_results,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Endpoint 漏洞扫描失败: %s", e)
|
||||
raise
|
||||
@@ -1,107 +0,0 @@
|
||||
from apps.common.prefect_django_setup import setup_django_for_prefect
|
||||
|
||||
import logging
|
||||
from typing import Dict, Tuple
|
||||
|
||||
from prefect import flow
|
||||
|
||||
from apps.scan.handlers.scan_flow_handlers import (
|
||||
on_scan_flow_running,
|
||||
on_scan_flow_completed,
|
||||
on_scan_flow_failed,
|
||||
)
|
||||
from apps.scan.configs.command_templates import get_command_template
|
||||
from .endpoints_vuln_scan_flow import endpoints_vuln_scan_flow
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _classify_vuln_tools(enabled_tools: Dict[str, dict]) -> Tuple[Dict[str, dict], Dict[str, dict]]:
|
||||
"""根据命令模板中的 input_type 对漏洞扫描工具进行分类。
|
||||
|
||||
当前支持:
|
||||
- endpoints_file: 以端点列表文件为输入(例如 Dalfox XSS)
|
||||
预留:
|
||||
- 其他 input_type 将被归类到 other_tools,暂不处理。
|
||||
"""
|
||||
endpoints_tools: Dict[str, dict] = {}
|
||||
other_tools: Dict[str, dict] = {}
|
||||
|
||||
for tool_name, tool_config in enabled_tools.items():
|
||||
template = get_command_template("vuln_scan", tool_name) or {}
|
||||
input_type = template.get("input_type", "endpoints_file")
|
||||
|
||||
if input_type == "endpoints_file":
|
||||
endpoints_tools[tool_name] = tool_config
|
||||
else:
|
||||
other_tools[tool_name] = tool_config
|
||||
|
||||
return endpoints_tools, other_tools
|
||||
|
||||
|
||||
@flow(
|
||||
name="vuln_scan",
|
||||
log_prints=True,
|
||||
on_running=[on_scan_flow_running],
|
||||
on_completion=[on_scan_flow_completed],
|
||||
on_failure=[on_scan_flow_failed],
|
||||
)
|
||||
def vuln_scan_flow(
|
||||
scan_id: int,
|
||||
target_name: str,
|
||||
target_id: int,
|
||||
scan_workspace_dir: str,
|
||||
enabled_tools: Dict[str, dict],
|
||||
) -> dict:
|
||||
"""漏洞扫描主 Flow:串行编排各类漏洞扫描子 Flow。
|
||||
|
||||
支持工具:
|
||||
- dalfox_xss: XSS 漏洞扫描(流式保存)
|
||||
- nuclei: 通用漏洞扫描(流式保存,支持模板 commit hash 同步)
|
||||
"""
|
||||
try:
|
||||
if scan_id is None:
|
||||
raise ValueError("scan_id 不能为空")
|
||||
if not target_name:
|
||||
raise ValueError("target_name 不能为空")
|
||||
if target_id is None:
|
||||
raise ValueError("target_id 不能为空")
|
||||
if not scan_workspace_dir:
|
||||
raise ValueError("scan_workspace_dir 不能为空")
|
||||
if not enabled_tools:
|
||||
raise ValueError("enabled_tools 不能为空")
|
||||
|
||||
# Step 1: 分类工具
|
||||
endpoints_tools, other_tools = _classify_vuln_tools(enabled_tools)
|
||||
|
||||
logger.info(
|
||||
"漏洞扫描工具分类 - endpoints_file: %s, 其他: %s",
|
||||
list(endpoints_tools.keys()) or "无",
|
||||
list(other_tools.keys()) or "无",
|
||||
)
|
||||
|
||||
if other_tools:
|
||||
logger.warning(
|
||||
"存在暂不支持输入类型的漏洞扫描工具,将被忽略: %s",
|
||||
list(other_tools.keys()),
|
||||
)
|
||||
|
||||
if not endpoints_tools:
|
||||
raise ValueError("漏洞扫描需要至少启用一个以 endpoints_file 为输入的工具(如 dalfox_xss、nuclei)。")
|
||||
|
||||
# Step 2: 执行 Endpoint 漏洞扫描子 Flow(串行)
|
||||
endpoint_result = endpoints_vuln_scan_flow(
|
||||
scan_id=scan_id,
|
||||
target_name=target_name,
|
||||
target_id=target_id,
|
||||
scan_workspace_dir=scan_workspace_dir,
|
||||
enabled_tools=endpoints_tools,
|
||||
)
|
||||
|
||||
# 目前只有一个子 Flow,直接返回其结果
|
||||
return endpoint_result
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("漏洞扫描主 Flow 失败: %s", e)
|
||||
raise
|
||||
@@ -1,182 +0,0 @@
|
||||
"""
|
||||
扫描流程处理器
|
||||
|
||||
负责处理扫描流程(端口扫描、子域名发现等)的状态变化和通知
|
||||
|
||||
职责:
|
||||
- 更新各阶段的进度状态(running/completed/failed)
|
||||
- 发送扫描阶段的通知
|
||||
- 记录 Flow 性能指标
|
||||
"""
|
||||
|
||||
import logging
|
||||
from prefect import Flow
|
||||
from prefect.client.schemas import FlowRun, State
|
||||
|
||||
from apps.scan.utils.performance import FlowPerformanceTracker
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# 存储每个 flow_run 的性能追踪器
|
||||
_flow_trackers: dict[str, FlowPerformanceTracker] = {}
|
||||
|
||||
|
||||
def _get_stage_from_flow_name(flow_name: str) -> str | None:
|
||||
"""
|
||||
从 Flow name 获取对应的 stage
|
||||
|
||||
Flow name 直接作为 stage(与 engine_config 的 key 一致)
|
||||
排除主 Flow(initiate_scan)
|
||||
"""
|
||||
# 排除主 Flow,它不是阶段 Flow
|
||||
if flow_name == 'initiate_scan':
|
||||
return None
|
||||
return flow_name
|
||||
|
||||
|
||||
def on_scan_flow_running(flow: Flow, flow_run: FlowRun, state: State) -> None:
|
||||
"""
|
||||
扫描流程开始运行时的回调
|
||||
|
||||
职责:
|
||||
- 更新阶段进度为 running
|
||||
- 发送扫描开始通知
|
||||
- 启动性能追踪
|
||||
|
||||
Args:
|
||||
flow: Prefect Flow 对象
|
||||
flow_run: Flow 运行实例
|
||||
state: Flow 当前状态
|
||||
"""
|
||||
logger.info("🚀 扫描流程开始运行 - Flow: %s, Run ID: %s", flow.name, flow_run.id)
|
||||
|
||||
# 提取流程参数
|
||||
flow_params = flow_run.parameters or {}
|
||||
scan_id = flow_params.get('scan_id')
|
||||
target_name = flow_params.get('target_name', 'unknown')
|
||||
target_id = flow_params.get('target_id')
|
||||
|
||||
# 启动性能追踪
|
||||
if scan_id:
|
||||
tracker = FlowPerformanceTracker(flow.name, scan_id)
|
||||
tracker.start(target_id=target_id, target_name=target_name)
|
||||
_flow_trackers[str(flow_run.id)] = tracker
|
||||
|
||||
# 更新阶段进度
|
||||
stage = _get_stage_from_flow_name(flow.name)
|
||||
if scan_id and stage:
|
||||
try:
|
||||
from apps.scan.services import ScanService
|
||||
service = ScanService()
|
||||
service.start_stage(scan_id, stage)
|
||||
logger.info(f"✓ 阶段进度已更新为 running - Scan ID: {scan_id}, Stage: {stage}")
|
||||
except Exception as e:
|
||||
logger.error(f"更新阶段进度失败 - Scan ID: {scan_id}, Stage: {stage}: {e}")
|
||||
|
||||
|
||||
def on_scan_flow_completed(flow: Flow, flow_run: FlowRun, state: State) -> None:
|
||||
"""
|
||||
扫描流程完成时的回调
|
||||
|
||||
职责:
|
||||
- 更新阶段进度为 completed
|
||||
- 发送扫描完成通知(可选)
|
||||
- 记录性能指标
|
||||
|
||||
Args:
|
||||
flow: Prefect Flow 对象
|
||||
flow_run: Flow 运行实例
|
||||
state: Flow 当前状态
|
||||
"""
|
||||
logger.info("✅ 扫描流程完成 - Flow: %s, Run ID: %s", flow.name, flow_run.id)
|
||||
|
||||
# 提取流程参数
|
||||
flow_params = flow_run.parameters or {}
|
||||
scan_id = flow_params.get('scan_id')
|
||||
|
||||
# 获取 flow result
|
||||
result = None
|
||||
try:
|
||||
result = state.result() if state.result else None
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 记录性能指标
|
||||
tracker = _flow_trackers.pop(str(flow_run.id), None)
|
||||
if tracker:
|
||||
tracker.finish(success=True)
|
||||
|
||||
# 更新阶段进度
|
||||
stage = _get_stage_from_flow_name(flow.name)
|
||||
if scan_id and stage:
|
||||
try:
|
||||
from apps.scan.services import ScanService
|
||||
service = ScanService()
|
||||
# 从 flow result 中提取 detail(如果有)
|
||||
detail = None
|
||||
if isinstance(result, dict):
|
||||
detail = result.get('detail')
|
||||
service.complete_stage(scan_id, stage, detail)
|
||||
logger.info(f"✓ 阶段进度已更新为 completed - Scan ID: {scan_id}, Stage: {stage}")
|
||||
# 每个阶段完成后刷新缓存统计,便于前端实时看到增量
|
||||
try:
|
||||
service.update_cached_stats(scan_id)
|
||||
logger.info("✓ 阶段完成后已刷新缓存统计 - Scan ID: %s", scan_id)
|
||||
except Exception as e:
|
||||
logger.error("阶段完成后刷新缓存统计失败 - Scan ID: %s, 错误: %s", scan_id, e)
|
||||
except Exception as e:
|
||||
logger.error(f"更新阶段进度失败 - Scan ID: {scan_id}, Stage: {stage}: {e}")
|
||||
|
||||
|
||||
def on_scan_flow_failed(flow: Flow, flow_run: FlowRun, state: State) -> None:
|
||||
"""
|
||||
扫描流程失败时的回调
|
||||
|
||||
职责:
|
||||
- 更新阶段进度为 failed
|
||||
- 发送扫描失败通知
|
||||
- 记录性能指标(含错误信息)
|
||||
|
||||
Args:
|
||||
flow: Prefect Flow 对象
|
||||
flow_run: Flow 运行实例
|
||||
state: Flow 当前状态
|
||||
"""
|
||||
logger.info("❌ 扫描流程失败 - Flow: %s, Run ID: %s", flow.name, flow_run.id)
|
||||
|
||||
# 提取流程参数
|
||||
flow_params = flow_run.parameters or {}
|
||||
scan_id = flow_params.get('scan_id')
|
||||
target_name = flow_params.get('target_name', 'unknown')
|
||||
|
||||
# 提取错误信息
|
||||
error_message = str(state.message) if state.message else "未知错误"
|
||||
|
||||
# 记录性能指标(失败情况)
|
||||
tracker = _flow_trackers.pop(str(flow_run.id), None)
|
||||
if tracker:
|
||||
tracker.finish(success=False, error_message=error_message)
|
||||
|
||||
# 更新阶段进度
|
||||
stage = _get_stage_from_flow_name(flow.name)
|
||||
if scan_id and stage:
|
||||
try:
|
||||
from apps.scan.services import ScanService
|
||||
service = ScanService()
|
||||
service.fail_stage(scan_id, stage, error_message)
|
||||
logger.info(f"✓ 阶段进度已更新为 failed - Scan ID: {scan_id}, Stage: {stage}")
|
||||
except Exception as e:
|
||||
logger.error(f"更新阶段进度失败 - Scan ID: {scan_id}, Stage: {stage}: {e}")
|
||||
|
||||
# 发送通知
|
||||
try:
|
||||
from apps.scan.notifications import create_notification, NotificationLevel
|
||||
message = f"任务:{flow.name}\n状态:执行失败\n错误:{error_message}"
|
||||
create_notification(
|
||||
title=target_name,
|
||||
message=message,
|
||||
level=NotificationLevel.HIGH
|
||||
)
|
||||
logger.error(f"✓ 扫描失败通知已发送 - Target: {target_name}, Flow: {flow.name}, Error: {error_message}")
|
||||
except Exception as e:
|
||||
logger.error(f"发送扫描失败通知失败 - Flow: {flow.name}: {e}")
|
||||
@@ -1,567 +0,0 @@
|
||||
"""
|
||||
生成测试数据的管理命令
|
||||
|
||||
用法:
|
||||
python manage.py generate_test_data --target test.com --count 100000
|
||||
|
||||
性能测试:
|
||||
python manage.py generate_test_data --target test.com --count 10000 --batch-size 500 --benchmark
|
||||
"""
|
||||
|
||||
import random
|
||||
import string
|
||||
import time
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction, connection
|
||||
from django.utils import timezone
|
||||
from apps.targets.models import Target
|
||||
from apps.scan.models import Scan
|
||||
from apps.asset.models.asset_models import Subdomain, IPAddress, Port, WebSite, Directory
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = '为指定目标生成大量测试数据'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--target',
|
||||
type=str,
|
||||
required=True,
|
||||
help='目标域名(如 test.com)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--count',
|
||||
type=int,
|
||||
default=100000,
|
||||
help='每个表生成的记录数(默认 100000)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--batch-size',
|
||||
type=int,
|
||||
default=1000,
|
||||
help='批量插入的批次大小(默认 1000)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--benchmark',
|
||||
action='store_true',
|
||||
help='启用性能基准测试模式(显示详细的性能指标)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--test-batch-sizes',
|
||||
action='store_true',
|
||||
help='测试不同批次大小的性能(100, 500, 1000, 2000, 5000)'
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
target_name = options['target']
|
||||
count = options['count']
|
||||
batch_size = options['batch_size']
|
||||
benchmark = options['benchmark']
|
||||
test_batch_sizes = options['test_batch_sizes']
|
||||
|
||||
# 如果是测试批次大小模式
|
||||
if test_batch_sizes:
|
||||
self._test_batch_sizes(target_name, count)
|
||||
return
|
||||
|
||||
self.stdout.write(f'\n{"="*60}')
|
||||
self.stdout.write(f' 开始生成测试数据')
|
||||
self.stdout.write(f'{"="*60}\n')
|
||||
self.stdout.write(f'目标: {target_name}')
|
||||
self.stdout.write(f'每表记录数: {count:,}')
|
||||
self.stdout.write(f'批次大小: {batch_size:,}')
|
||||
if benchmark:
|
||||
self.stdout.write('模式: 性能基准测试 ⚡')
|
||||
self._print_db_info()
|
||||
self.stdout.write('')
|
||||
|
||||
# 记录总开始时间
|
||||
total_start_time = time.time()
|
||||
|
||||
# 1. 获取或创建目标
|
||||
try:
|
||||
target = Target.objects.get(name=target_name)
|
||||
self.stdout.write(self.style.SUCCESS(f'✓ 找到目标: {target.name} (ID: {target.id})'))
|
||||
except Target.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f'✗ 目标不存在: {target_name}'))
|
||||
return
|
||||
|
||||
# 2. 创建新的测试扫描任务
|
||||
from apps.engine.models import ScanEngine
|
||||
engine = ScanEngine.objects.first()
|
||||
if not engine:
|
||||
self.stdout.write(self.style.ERROR('✗ 没有可用的扫描引擎'))
|
||||
return
|
||||
|
||||
scan = Scan.objects.create(
|
||||
target=target,
|
||||
engine=engine,
|
||||
status='completed',
|
||||
results_dir=f'/tmp/test_{target_name}_{int(time.time())}'
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(f'✓ 创建新测试扫描任务 (ID: {scan.id})'))
|
||||
|
||||
# 3. 生成子域名
|
||||
self.stdout.write(f'\n[1/5] 生成 {count:,} 个子域名...')
|
||||
subdomains, stats1 = self._generate_subdomains(target, scan, count, batch_size, benchmark)
|
||||
|
||||
# 4. 生成 IP 地址
|
||||
self.stdout.write(f'\n[2/5] 生成 {count:,} 个 IP 地址...')
|
||||
ips, stats2 = self._generate_ips(target, scan, subdomains, count, batch_size, benchmark)
|
||||
|
||||
# 5. 生成端口
|
||||
self.stdout.write(f'\n[3/5] 生成 {count:,} 个端口...')
|
||||
stats3 = self._generate_ports(scan, ips, subdomains, count, batch_size, benchmark)
|
||||
|
||||
# 6. 生成网站
|
||||
self.stdout.write(f'\n[4/5] 生成 {count:,} 个网站...')
|
||||
websites, stats4 = self._generate_websites(target, scan, subdomains, count, batch_size, benchmark)
|
||||
|
||||
# 7. 生成目录
|
||||
self.stdout.write(f'\n[5/5] 生成 {count:,} 个目录...')
|
||||
stats5 = self._generate_directories(target, scan, websites, count, batch_size, benchmark)
|
||||
|
||||
# 计算总耗时
|
||||
total_time = time.time() - total_start_time
|
||||
|
||||
self.stdout.write(f'\n{"="*60}')
|
||||
self.stdout.write(self.style.SUCCESS(' ✓ 测试数据生成完成!'))
|
||||
self.stdout.write(f'{"="*60}')
|
||||
self.stdout.write(f'总耗时: {total_time:.2f} 秒 ({total_time/60:.2f} 分钟)\n')
|
||||
|
||||
if benchmark:
|
||||
self._print_performance_summary([stats1, stats2, stats3, stats4, stats5])
|
||||
|
||||
def _generate_subdomains(self, target, scan, count, batch_size, benchmark=False):
|
||||
"""生成子域名"""
|
||||
subdomains = []
|
||||
created_subdomains = []
|
||||
start_time = time.time()
|
||||
batch_times = []
|
||||
|
||||
for i in range(count):
|
||||
# 生成唯一的子域名
|
||||
subdomain_name = f'test-{i:07d}.{target.name}'
|
||||
|
||||
subdomains.append(Subdomain(
|
||||
target=target,
|
||||
scan=scan,
|
||||
name=subdomain_name,
|
||||
cname=[],
|
||||
is_cdn=random.choice([True, False]),
|
||||
cdn_name=random.choice(['', 'cloudflare', 'akamai', 'fastly'])
|
||||
))
|
||||
|
||||
# 批量插入
|
||||
if len(subdomains) >= batch_size:
|
||||
batch_start = time.time()
|
||||
with transaction.atomic():
|
||||
created = Subdomain.objects.bulk_create(subdomains, ignore_conflicts=True)
|
||||
created_subdomains.extend(created)
|
||||
batch_time = time.time() - batch_start
|
||||
batch_times.append(batch_time)
|
||||
|
||||
if benchmark:
|
||||
speed = len(subdomains) / batch_time
|
||||
self.stdout.write(f' 插入 {len(subdomains):,} 个 | 耗时: {batch_time:.2f}s | 速度: {speed:.0f} 条/秒')
|
||||
else:
|
||||
self.stdout.write(f' 插入 {len(subdomains):,} 个子域名... (进度: {i+1:,}/{count:,})')
|
||||
subdomains = []
|
||||
|
||||
# 插入剩余的
|
||||
if subdomains:
|
||||
with transaction.atomic():
|
||||
created = Subdomain.objects.bulk_create(subdomains, ignore_conflicts=True)
|
||||
created_subdomains.extend(created)
|
||||
self.stdout.write(f' 插入 {len(subdomains):,} 个子域名... (进度: {count:,}/{count:,})')
|
||||
|
||||
total_time = time.time() - start_time
|
||||
avg_batch_time = sum(batch_times) / len(batch_times) if batch_times else 0
|
||||
total_speed = len(created_subdomains) / total_time if total_time > 0 else 0
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f' ✓ 完成!共创建 {len(created_subdomains):,} 个 | '
|
||||
f'总耗时: {total_time:.2f}s | '
|
||||
f'平均速度: {total_speed:.0f} 条/秒'
|
||||
))
|
||||
|
||||
return created_subdomains, {
|
||||
'name': '子域名',
|
||||
'count': len(created_subdomains),
|
||||
'time': total_time,
|
||||
'speed': total_speed,
|
||||
'avg_batch_time': avg_batch_time
|
||||
}
|
||||
|
||||
def _generate_ips(self, target, scan, subdomains, count, batch_size, benchmark=False):
|
||||
"""生成 IP 地址"""
|
||||
# 重新从数据库查询 subdomain,确保有 ID
|
||||
subdomain_list = list(Subdomain.objects.filter(scan=scan).values_list('id', flat=True))
|
||||
|
||||
ips = []
|
||||
created_ips = []
|
||||
start_time = time.time()
|
||||
batch_times = []
|
||||
|
||||
for i in range(count):
|
||||
# 生成随机 IP
|
||||
ip_addr = f'192.168.{random.randint(0, 255)}.{random.randint(1, 254)}'
|
||||
subdomain_id = random.choice(subdomain_list) if subdomain_list else None
|
||||
|
||||
if subdomain_id:
|
||||
ips.append(IPAddress(
|
||||
target=target,
|
||||
scan=scan,
|
||||
subdomain_id=subdomain_id,
|
||||
ip=f'{ip_addr}-{i}', # 加后缀确保唯一
|
||||
protocol_version='IPv4',
|
||||
is_private=True
|
||||
))
|
||||
|
||||
# 批量插入
|
||||
if len(ips) >= batch_size:
|
||||
batch_start = time.time()
|
||||
with transaction.atomic():
|
||||
created = IPAddress.objects.bulk_create(ips, ignore_conflicts=True)
|
||||
created_ips.extend(created)
|
||||
batch_time = time.time() - batch_start
|
||||
batch_times.append(batch_time)
|
||||
|
||||
if benchmark:
|
||||
speed = len(ips) / batch_time
|
||||
self.stdout.write(f' 插入 {len(ips):,} 个 | 耗时: {batch_time:.2f}s | 速度: {speed:.0f} 条/秒')
|
||||
else:
|
||||
self.stdout.write(f' 插入 {len(ips):,} 个 IP 地址... (进度: {i+1:,}/{count:,})')
|
||||
ips = []
|
||||
|
||||
# 插入剩余的
|
||||
if ips:
|
||||
with transaction.atomic():
|
||||
created = IPAddress.objects.bulk_create(ips, ignore_conflicts=True)
|
||||
created_ips.extend(created)
|
||||
self.stdout.write(f' 插入 {len(ips):,} 个 IP 地址... (进度: {count:,}/{count:,})')
|
||||
|
||||
total_time = time.time() - start_time
|
||||
avg_batch_time = sum(batch_times) / len(batch_times) if batch_times else 0
|
||||
total_speed = len(created_ips) / total_time if total_time > 0 else 0
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f' ✓ 完成!共创建 {len(created_ips):,} 个 | '
|
||||
f'总耗时: {total_time:.2f}s | '
|
||||
f'平均速度: {total_speed:.0f} 条/秒'
|
||||
))
|
||||
|
||||
return created_ips, {
|
||||
'name': 'IP地址',
|
||||
'count': len(created_ips),
|
||||
'time': total_time,
|
||||
'speed': total_speed,
|
||||
'avg_batch_time': avg_batch_time
|
||||
}
|
||||
|
||||
def _generate_ports(self, scan, ips, subdomains, count, batch_size, benchmark=False):
|
||||
"""生成端口"""
|
||||
# 重新查询 IP 和 subdomain 的 ID
|
||||
ip_list = list(IPAddress.objects.filter(scan=scan).values_list('id', flat=True))
|
||||
subdomain_list = list(Subdomain.objects.filter(scan=scan).values_list('id', flat=True))
|
||||
|
||||
ports = []
|
||||
total_created = 0
|
||||
start_time = time.time()
|
||||
batch_times = []
|
||||
|
||||
for i in range(count):
|
||||
ip_id = random.choice(ip_list) if ip_list else None
|
||||
subdomain_id = random.choice(subdomain_list) if subdomain_list else None
|
||||
|
||||
if ip_id:
|
||||
ports.append(Port(
|
||||
ip_address_id=ip_id,
|
||||
subdomain_id=subdomain_id,
|
||||
number=random.randint(1, 65535),
|
||||
service_name=random.choice(['http', 'https', 'ssh', 'ftp', 'mysql']),
|
||||
is_uncommon=random.choice([True, False])
|
||||
))
|
||||
|
||||
# 批量插入
|
||||
if len(ports) >= batch_size:
|
||||
batch_start = time.time()
|
||||
with transaction.atomic():
|
||||
Port.objects.bulk_create(ports, ignore_conflicts=True)
|
||||
total_created += len(ports)
|
||||
batch_time = time.time() - batch_start
|
||||
batch_times.append(batch_time)
|
||||
|
||||
if benchmark:
|
||||
speed = len(ports) / batch_time
|
||||
self.stdout.write(f' 插入 {len(ports):,} 个 | 耗时: {batch_time:.2f}s | 速度: {speed:.0f} 条/秒')
|
||||
else:
|
||||
self.stdout.write(f' 插入 {len(ports):,} 个端口... (进度: {i+1:,}/{count:,})')
|
||||
ports = []
|
||||
|
||||
# 插入剩余的
|
||||
if ports:
|
||||
with transaction.atomic():
|
||||
Port.objects.bulk_create(ports, ignore_conflicts=True)
|
||||
total_created += len(ports)
|
||||
self.stdout.write(f' 插入 {len(ports):,} 个端口... (进度: {count:,}/{count:,})')
|
||||
|
||||
total_time = time.time() - start_time
|
||||
avg_batch_time = sum(batch_times) / len(batch_times) if batch_times else 0
|
||||
total_speed = total_created / total_time if total_time > 0 else 0
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f' ✓ 完成!共创建 {total_created:,} 个 | '
|
||||
f'总耗时: {total_time:.2f}s | '
|
||||
f'平均速度: {total_speed:.0f} 条/秒'
|
||||
))
|
||||
|
||||
return {
|
||||
'name': '端口',
|
||||
'count': total_created,
|
||||
'time': total_time,
|
||||
'speed': total_speed,
|
||||
'avg_batch_time': avg_batch_time
|
||||
}
|
||||
|
||||
def _generate_websites(self, target, scan, subdomains, count, batch_size, benchmark=False):
|
||||
"""生成网站"""
|
||||
# 重新查询 subdomain 信息
|
||||
subdomain_data = list(Subdomain.objects.filter(scan=scan).values('id', 'name'))
|
||||
|
||||
websites = []
|
||||
created_websites = []
|
||||
start_time = time.time()
|
||||
batch_times = []
|
||||
|
||||
for i in range(count):
|
||||
subdomain = random.choice(subdomain_data) if subdomain_data else None
|
||||
|
||||
if subdomain:
|
||||
protocol = random.choice(['http', 'https'])
|
||||
url = f'{protocol}://{subdomain["name"]}'
|
||||
|
||||
websites.append(WebSite(
|
||||
target=target,
|
||||
scan=scan,
|
||||
subdomain_id=subdomain['id'],
|
||||
url=f'{url}?id={i}', # 加参数确保唯一
|
||||
title=f'Test Website {i}',
|
||||
status_code=random.choice([200, 301, 302, 404, 500]),
|
||||
content_length=random.randint(1000, 100000),
|
||||
webserver=random.choice(['nginx', 'apache', 'IIS']),
|
||||
content_type='text/html',
|
||||
tech=['Python', 'Django'] if i % 2 == 0 else ['Node.js', 'React'],
|
||||
vhost=random.choice([True, False, None])
|
||||
))
|
||||
|
||||
# 批量插入
|
||||
if len(websites) >= batch_size:
|
||||
batch_start = time.time()
|
||||
with transaction.atomic():
|
||||
created = WebSite.objects.bulk_create(websites, ignore_conflicts=True)
|
||||
created_websites.extend(created)
|
||||
batch_time = time.time() - batch_start
|
||||
batch_times.append(batch_time)
|
||||
|
||||
if benchmark:
|
||||
speed = len(websites) / batch_time
|
||||
self.stdout.write(f' 插入 {len(websites):,} 个 | 耗时: {batch_time:.2f}s | 速度: {speed:.0f} 条/秒')
|
||||
else:
|
||||
self.stdout.write(f' 插入 {len(websites):,} 个网站... (进度: {i+1:,}/{count:,})')
|
||||
websites = []
|
||||
|
||||
# 插入剩余的
|
||||
if websites:
|
||||
with transaction.atomic():
|
||||
created = WebSite.objects.bulk_create(websites, ignore_conflicts=True)
|
||||
created_websites.extend(created)
|
||||
self.stdout.write(f' 插入 {len(websites):,} 个网站... (进度: {count:,}/{count:,})')
|
||||
|
||||
total_time = time.time() - start_time
|
||||
avg_batch_time = sum(batch_times) / len(batch_times) if batch_times else 0
|
||||
total_speed = len(created_websites) / total_time if total_time > 0 else 0
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f' ✓ 完成!共创建 {len(created_websites):,} 个 | '
|
||||
f'总耗时: {total_time:.2f}s | '
|
||||
f'平均速度: {total_speed:.0f} 条/秒'
|
||||
))
|
||||
|
||||
return created_websites, {
|
||||
'name': '网站',
|
||||
'count': len(created_websites),
|
||||
'time': total_time,
|
||||
'speed': total_speed,
|
||||
'avg_batch_time': avg_batch_time
|
||||
}
|
||||
|
||||
def _generate_directories(self, target, scan, websites, count, batch_size, benchmark=False):
|
||||
"""生成目录"""
|
||||
# 重新查询 website 信息
|
||||
website_data = list(WebSite.objects.filter(scan=scan).values('id', 'url'))
|
||||
|
||||
directories = []
|
||||
total_created = 0
|
||||
start_time = time.time()
|
||||
batch_times = []
|
||||
|
||||
for i in range(count):
|
||||
website = random.choice(website_data) if website_data else None
|
||||
|
||||
if website:
|
||||
path = ''.join(random.choices(string.ascii_lowercase, k=10))
|
||||
|
||||
directories.append(Directory(
|
||||
target=target,
|
||||
scan=scan,
|
||||
website_id=website['id'],
|
||||
url=f'{website["url"]}/dir/{path}/{i}', # 加后缀确保唯一
|
||||
status=random.choice([200, 301, 403, 404]),
|
||||
length=random.randint(1000, 50000),
|
||||
words=random.randint(100, 5000),
|
||||
lines=random.randint(50, 1000),
|
||||
content_type='text/html'
|
||||
))
|
||||
|
||||
# 批量插入
|
||||
if len(directories) >= batch_size:
|
||||
batch_start = time.time()
|
||||
with transaction.atomic():
|
||||
Directory.objects.bulk_create(directories, ignore_conflicts=True)
|
||||
total_created += len(directories)
|
||||
batch_time = time.time() - batch_start
|
||||
batch_times.append(batch_time)
|
||||
|
||||
if benchmark:
|
||||
speed = len(directories) / batch_time
|
||||
self.stdout.write(f' 插入 {len(directories):,} 个 | 耗时: {batch_time:.2f}s | 速度: {speed:.0f} 条/秒')
|
||||
else:
|
||||
self.stdout.write(f' 插入 {len(directories):,} 个目录... (进度: {i+1:,}/{count:,})')
|
||||
directories = []
|
||||
|
||||
# 插入剩余的
|
||||
if directories:
|
||||
with transaction.atomic():
|
||||
Directory.objects.bulk_create(directories, ignore_conflicts=True)
|
||||
total_created += len(directories)
|
||||
self.stdout.write(f' 插入 {len(directories):,} 个目录... (进度: {count:,}/{count:,})')
|
||||
|
||||
total_time = time.time() - start_time
|
||||
avg_batch_time = sum(batch_times) / len(batch_times) if batch_times else 0
|
||||
total_speed = total_created / total_time if total_time > 0 else 0
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(
|
||||
f' ✓ 完成!共创建 {total_created:,} 个 | '
|
||||
f'总耗时: {total_time:.2f}s | '
|
||||
f'平均速度: {total_speed:.0f} 条/秒'
|
||||
))
|
||||
|
||||
return {
|
||||
'name': '目录',
|
||||
'count': total_created,
|
||||
'time': total_time,
|
||||
'speed': total_speed,
|
||||
'avg_batch_time': avg_batch_time
|
||||
}
|
||||
|
||||
def _print_db_info(self):
|
||||
"""打印数据库连接信息"""
|
||||
db_settings = connection.settings_dict
|
||||
self.stdout.write(f'\n数据库信息:')
|
||||
self.stdout.write(f' 主机: {db_settings["HOST"]}')
|
||||
self.stdout.write(f' 端口: {db_settings["PORT"]}')
|
||||
self.stdout.write(f' 数据库: {db_settings["NAME"]}')
|
||||
self.stdout.write(f' 引擎: {db_settings["ENGINE"].split(".")[-1]}')
|
||||
|
||||
def _print_performance_summary(self, stats_list):
|
||||
"""打印性能总结"""
|
||||
self.stdout.write(f'\n{"="*60}')
|
||||
self.stdout.write(' 性能测试报告')
|
||||
self.stdout.write(f'{"="*60}\n')
|
||||
|
||||
total_records = sum(s['count'] for s in stats_list)
|
||||
total_time = sum(s['time'] for s in stats_list)
|
||||
overall_speed = total_records / total_time if total_time > 0 else 0
|
||||
|
||||
self.stdout.write(f'{"表名":<12} {"记录数":<12} {"耗时(秒)":<12} {"速度(条/秒)":<15} {"平均批次时间(秒)"}')
|
||||
self.stdout.write('-' * 65)
|
||||
|
||||
for stats in stats_list:
|
||||
self.stdout.write(
|
||||
f'{stats["name"]:<12} '
|
||||
f'{stats["count"]:<12,} '
|
||||
f'{stats["time"]:<12.2f} '
|
||||
f'{stats["speed"]:<15.0f} '
|
||||
f'{stats.get("avg_batch_time", 0):<.3f}'
|
||||
)
|
||||
|
||||
self.stdout.write('-' * 65)
|
||||
self.stdout.write(
|
||||
f'{"总计":<12} '
|
||||
f'{total_records:<12,} '
|
||||
f'{total_time:<12.2f} '
|
||||
f'{overall_speed:<15.0f}'
|
||||
)
|
||||
self.stdout.write('')
|
||||
|
||||
def _test_batch_sizes(self, target_name, count):
|
||||
"""测试不同批次大小的性能"""
|
||||
batch_sizes = [100, 500, 1000, 2000, 5000]
|
||||
test_count = min(count, 10000) # 限制测试数据量
|
||||
|
||||
self.stdout.write(f'\n{"="*60}')
|
||||
self.stdout.write(f' 批次大小性能测试')
|
||||
self.stdout.write(f'{"="*60}\n')
|
||||
self.stdout.write(f'测试数据量: {test_count:,} 条')
|
||||
self.stdout.write(f'测试批次: {batch_sizes}\n')
|
||||
|
||||
results = []
|
||||
|
||||
for batch_size in batch_sizes:
|
||||
self.stdout.write(f'\n测试批次大小: {batch_size}')
|
||||
self.stdout.write('-' * 40)
|
||||
|
||||
# 这里只测试子域名的插入性能
|
||||
try:
|
||||
target = Target.objects.get(name=target_name)
|
||||
except Target.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR(f'目标不存在: {target_name}'))
|
||||
return
|
||||
|
||||
scan = Scan.objects.filter(target=target).first()
|
||||
if not scan:
|
||||
from apps.engine.models import ScanEngine
|
||||
engine = ScanEngine.objects.first()
|
||||
scan = Scan.objects.create(
|
||||
target=target,
|
||||
engine=engine,
|
||||
status='completed',
|
||||
results_dir=f'/tmp/test_{target_name}'
|
||||
)
|
||||
|
||||
_, stats = self._generate_subdomains(target, scan, test_count, batch_size, benchmark=True)
|
||||
results.append((batch_size, stats))
|
||||
|
||||
# 清理测试数据
|
||||
Subdomain.objects.filter(scan=scan, name__startswith=f'test-').delete()
|
||||
|
||||
# 打印对比结果
|
||||
self.stdout.write(f'\n{"="*60}')
|
||||
self.stdout.write(' 批次大小对比结果')
|
||||
self.stdout.write(f'{"="*60}\n')
|
||||
self.stdout.write(f'{"批次大小":<12} {"总耗时(秒)":<15} {"速度(条/秒)":<15} {"平均批次时间(秒)"}')
|
||||
self.stdout.write('-' * 60)
|
||||
|
||||
for batch_size, stats in results:
|
||||
self.stdout.write(
|
||||
f'{batch_size:<12} '
|
||||
f'{stats["time"]:<15.2f} '
|
||||
f'{stats["speed"]:<15.0f} '
|
||||
f'{stats["avg_batch_time"]:<.3f}'
|
||||
)
|
||||
|
||||
# 找出最快的批次大小
|
||||
fastest = min(results, key=lambda x: x[1]['time'])
|
||||
self.stdout.write(f'\n推荐批次大小: {fastest[0]} (最快: {fastest[1]["time"]:.2f}秒)')
|
||||
self.stdout.write('')
|
||||
@@ -1,180 +0,0 @@
|
||||
from django.db import models
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
from ..common.definitions import ScanStatus
|
||||
|
||||
|
||||
|
||||
|
||||
class SoftDeleteManager(models.Manager):
|
||||
"""软删除管理器:默认只返回未删除的记录"""
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(deleted_at__isnull=True)
|
||||
|
||||
|
||||
class Scan(models.Model):
|
||||
"""扫描任务模型"""
|
||||
|
||||
id = models.AutoField(primary_key=True)
|
||||
|
||||
target = models.ForeignKey('targets.Target', on_delete=models.CASCADE, related_name='scans', help_text='扫描目标')
|
||||
|
||||
engine = models.ForeignKey(
|
||||
'engine.ScanEngine',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scans',
|
||||
help_text='使用的扫描引擎'
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='任务创建时间')
|
||||
stopped_at = models.DateTimeField(null=True, blank=True, help_text='扫描结束时间')
|
||||
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=ScanStatus.choices,
|
||||
default=ScanStatus.INITIATED,
|
||||
db_index=True,
|
||||
help_text='任务状态'
|
||||
)
|
||||
|
||||
results_dir = models.CharField(max_length=100, blank=True, default='', help_text='结果存储目录')
|
||||
|
||||
container_ids = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
default=list,
|
||||
help_text='容器 ID 列表(Docker Container ID)'
|
||||
)
|
||||
|
||||
worker = models.ForeignKey(
|
||||
'engine.WorkerNode',
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='执行扫描的 Worker 节点'
|
||||
)
|
||||
|
||||
error_message = models.CharField(max_length=2000, blank=True, default='', help_text='错误信息')
|
||||
|
||||
# ==================== 软删除字段 ====================
|
||||
deleted_at = models.DateTimeField(null=True, blank=True, db_index=True, help_text='删除时间(NULL表示未删除)')
|
||||
|
||||
# ==================== 管理器 ====================
|
||||
objects = SoftDeleteManager() # 默认管理器:只返回未删除的记录
|
||||
all_objects = models.Manager() # 全量管理器:包括已删除的记录(用于硬删除)
|
||||
|
||||
# ==================== 进度跟踪字段 ====================
|
||||
progress = models.IntegerField(default=0, help_text='扫描进度 0-100')
|
||||
current_stage = models.CharField(max_length=50, blank=True, default='', help_text='当前扫描阶段')
|
||||
stage_progress = models.JSONField(default=dict, help_text='各阶段进度详情')
|
||||
|
||||
# ==================== 缓存统计字段 ====================
|
||||
cached_subdomains_count = models.IntegerField(default=0, help_text='缓存的子域名数量')
|
||||
cached_websites_count = models.IntegerField(default=0, help_text='缓存的网站数量')
|
||||
cached_endpoints_count = models.IntegerField(default=0, help_text='缓存的端点数量')
|
||||
cached_ips_count = models.IntegerField(default=0, help_text='缓存的IP地址数量')
|
||||
cached_directories_count = models.IntegerField(default=0, help_text='缓存的目录数量')
|
||||
cached_vulns_total = models.IntegerField(default=0, help_text='缓存的漏洞总数')
|
||||
cached_vulns_critical = models.IntegerField(default=0, help_text='缓存的严重漏洞数量')
|
||||
cached_vulns_high = models.IntegerField(default=0, help_text='缓存的高危漏洞数量')
|
||||
cached_vulns_medium = models.IntegerField(default=0, help_text='缓存的中危漏洞数量')
|
||||
cached_vulns_low = models.IntegerField(default=0, help_text='缓存的低危漏洞数量')
|
||||
stats_updated_at = models.DateTimeField(null=True, blank=True, help_text='统计数据最后更新时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scan'
|
||||
verbose_name = '扫描任务'
|
||||
verbose_name_plural = '扫描任务'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['-created_at']), # 优化按创建时间降序排序(list 查询的默认排序)
|
||||
models.Index(fields=['target']), # 优化按目标查询扫描任务
|
||||
models.Index(fields=['deleted_at', '-created_at']), # 软删除 + 时间索引
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"Scan #{self.id} - {self.target.name}"
|
||||
|
||||
|
||||
class ScheduledScan(models.Model):
|
||||
"""
|
||||
定时扫描任务模型
|
||||
|
||||
调度机制:
|
||||
- APScheduler 每分钟检查 next_run_time
|
||||
- 到期任务通过 task_distributor 分发到 Worker 执行
|
||||
- 支持 cron 表达式进行灵活调度
|
||||
|
||||
扫描模式(二选一):
|
||||
- 组织扫描:设置 organization,执行时动态获取组织下所有目标
|
||||
- 目标扫描:设置 target,扫描单个目标
|
||||
- organization 优先级高于 target
|
||||
"""
|
||||
|
||||
id = models.AutoField(primary_key=True)
|
||||
|
||||
# 基本信息
|
||||
name = models.CharField(max_length=200, help_text='任务名称')
|
||||
|
||||
# 关联的扫描引擎
|
||||
engine = models.ForeignKey(
|
||||
'engine.ScanEngine',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scheduled_scans',
|
||||
help_text='使用的扫描引擎'
|
||||
)
|
||||
|
||||
# 关联的组织(组织扫描模式:执行时动态获取组织下所有目标)
|
||||
organization = models.ForeignKey(
|
||||
'targets.Organization',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scheduled_scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='扫描组织(设置后执行时动态获取组织下所有目标)'
|
||||
)
|
||||
|
||||
# 关联的目标(目标扫描模式:扫描单个目标)
|
||||
target = models.ForeignKey(
|
||||
'targets.Target',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scheduled_scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='扫描单个目标(与 organization 二选一)'
|
||||
)
|
||||
|
||||
# 调度配置 - 直接使用 Cron 表达式
|
||||
cron_expression = models.CharField(
|
||||
max_length=100,
|
||||
default='0 2 * * *',
|
||||
help_text='Cron 表达式,格式:分 时 日 月 周'
|
||||
)
|
||||
|
||||
# 状态
|
||||
is_enabled = models.BooleanField(default=True, db_index=True, help_text='是否启用')
|
||||
|
||||
# 执行统计
|
||||
run_count = models.IntegerField(default=0, help_text='已执行次数')
|
||||
last_run_time = models.DateTimeField(null=True, blank=True, help_text='上次执行时间')
|
||||
next_run_time = models.DateTimeField(null=True, blank=True, help_text='下次执行时间')
|
||||
|
||||
# 时间戳
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='创建时间')
|
||||
updated_at = models.DateTimeField(auto_now=True, help_text='更新时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scheduled_scan'
|
||||
verbose_name = '定时扫描任务'
|
||||
verbose_name_plural = '定时扫描任务'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['-created_at']),
|
||||
models.Index(fields=['is_enabled', '-created_at']),
|
||||
models.Index(fields=['name']), # 优化 name 搜索
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"ScheduledScan #{self.id} - {self.name}"
|
||||
@@ -1,245 +0,0 @@
|
||||
from rest_framework import serializers
|
||||
from django.db.models import Count
|
||||
|
||||
from .models import Scan, ScheduledScan
|
||||
|
||||
|
||||
class ScanSerializer(serializers.ModelSerializer):
|
||||
"""扫描任务序列化器"""
|
||||
target_name = serializers.SerializerMethodField()
|
||||
engine_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = [
|
||||
'id', 'target', 'target_name', 'engine', 'engine_name',
|
||||
'created_at', 'stopped_at', 'status', 'results_dir',
|
||||
'container_ids', 'error_message'
|
||||
]
|
||||
read_only_fields = [
|
||||
'id', 'created_at', 'stopped_at', 'results_dir',
|
||||
'container_ids', 'error_message', 'status'
|
||||
]
|
||||
|
||||
def get_target_name(self, obj):
|
||||
"""获取目标名称"""
|
||||
return obj.target.name if obj.target else None
|
||||
|
||||
def get_engine_name(self, obj):
|
||||
"""获取引擎名称"""
|
||||
return obj.engine.name if obj.engine else None
|
||||
|
||||
|
||||
class ScanHistorySerializer(serializers.ModelSerializer):
|
||||
"""扫描历史列表专用序列化器
|
||||
|
||||
为前端扫描历史页面提供优化的数据格式,包括:
|
||||
- 扫描汇总统计(子域名、端点、漏洞数量)
|
||||
- 进度百分比和当前阶段
|
||||
"""
|
||||
|
||||
# 字段映射
|
||||
target_name = serializers.CharField(source='target.name', read_only=True)
|
||||
engine_name = serializers.CharField(source='engine.name', read_only=True)
|
||||
|
||||
# 计算字段
|
||||
summary = serializers.SerializerMethodField()
|
||||
|
||||
# 进度跟踪字段(直接从模型读取)
|
||||
progress = serializers.IntegerField(read_only=True)
|
||||
current_stage = serializers.CharField(read_only=True)
|
||||
stage_progress = serializers.JSONField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = [
|
||||
'id', 'target', 'target_name', 'engine', 'engine_name',
|
||||
'created_at', 'status', 'error_message', 'summary', 'progress',
|
||||
'current_stage', 'stage_progress'
|
||||
]
|
||||
|
||||
def get_summary(self, obj):
|
||||
"""获取扫描汇总数据。
|
||||
|
||||
设计原则:
|
||||
- 子域名/网站/端点/IP/目录使用缓存字段(避免实时 COUNT)
|
||||
- 漏洞统计使用 Scan 上的缓存字段,在扫描结束时统一聚合
|
||||
"""
|
||||
# 1. 使用缓存字段构建基础统计(子域名、网站、端点、IP、目录)
|
||||
summary = {
|
||||
'subdomains': obj.cached_subdomains_count or 0,
|
||||
'websites': obj.cached_websites_count or 0,
|
||||
'endpoints': obj.cached_endpoints_count or 0,
|
||||
'ips': obj.cached_ips_count or 0,
|
||||
'directories': obj.cached_directories_count or 0,
|
||||
}
|
||||
|
||||
# 2. 使用 Scan 模型上的缓存漏洞统计(按严重性聚合)
|
||||
summary['vulnerabilities'] = {
|
||||
'total': obj.cached_vulns_total or 0,
|
||||
'critical': obj.cached_vulns_critical or 0,
|
||||
'high': obj.cached_vulns_high or 0,
|
||||
'medium': obj.cached_vulns_medium or 0,
|
||||
'low': obj.cached_vulns_low or 0,
|
||||
}
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
class QuickScanSerializer(serializers.Serializer):
|
||||
"""
|
||||
快速扫描序列化器
|
||||
|
||||
功能:
|
||||
- 接收目标列表和引擎配置
|
||||
- 自动创建/获取目标
|
||||
- 立即发起扫描
|
||||
"""
|
||||
|
||||
# 批量创建的最大数量限制
|
||||
MAX_BATCH_SIZE = 1000
|
||||
|
||||
# 目标列表
|
||||
targets = serializers.ListField(
|
||||
child=serializers.DictField(),
|
||||
help_text='目标列表,每个目标包含 name 字段'
|
||||
)
|
||||
|
||||
# 扫描引擎 ID
|
||||
engine_id = serializers.IntegerField(
|
||||
required=True,
|
||||
help_text='使用的扫描引擎 ID (必填)'
|
||||
)
|
||||
|
||||
def validate_targets(self, value):
|
||||
"""验证目标列表"""
|
||||
if not value:
|
||||
raise serializers.ValidationError("目标列表不能为空")
|
||||
|
||||
# 检查数量限制,防止服务器过载
|
||||
if len(value) > self.MAX_BATCH_SIZE:
|
||||
raise serializers.ValidationError(
|
||||
f"快速扫描最多支持 {self.MAX_BATCH_SIZE} 个目标,当前提交了 {len(value)} 个"
|
||||
)
|
||||
|
||||
# 验证每个目标的必填字段
|
||||
for idx, target in enumerate(value):
|
||||
if 'name' not in target:
|
||||
raise serializers.ValidationError(f"第 {idx + 1} 个目标缺少 name 字段")
|
||||
if not target['name']:
|
||||
raise serializers.ValidationError(f"第 {idx + 1} 个目标的 name 不能为空")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
# ==================== 定时扫描序列化器 ====================
|
||||
|
||||
class ScheduledScanSerializer(serializers.ModelSerializer):
|
||||
"""定时扫描任务序列化器(用于列表和详情)"""
|
||||
|
||||
# 关联字段
|
||||
engine_name = serializers.CharField(source='engine.name', read_only=True)
|
||||
organization_id = serializers.IntegerField(source='organization.id', read_only=True, allow_null=True)
|
||||
organization_name = serializers.CharField(source='organization.name', read_only=True, allow_null=True)
|
||||
target_id = serializers.IntegerField(source='target.id', read_only=True, allow_null=True)
|
||||
target_name = serializers.CharField(source='target.name', read_only=True, allow_null=True)
|
||||
scan_mode = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ScheduledScan
|
||||
fields = [
|
||||
'id', 'name',
|
||||
'engine', 'engine_name',
|
||||
'organization_id', 'organization_name',
|
||||
'target_id', 'target_name',
|
||||
'scan_mode',
|
||||
'cron_expression',
|
||||
'is_enabled',
|
||||
'run_count', 'last_run_time', 'next_run_time',
|
||||
'created_at', 'updated_at'
|
||||
]
|
||||
read_only_fields = [
|
||||
'id', 'run_count',
|
||||
'last_run_time', 'next_run_time',
|
||||
'created_at', 'updated_at'
|
||||
]
|
||||
|
||||
def get_scan_mode(self, obj):
|
||||
"""获取扫描模式:organization 或 target"""
|
||||
return 'organization' if obj.organization_id else 'target'
|
||||
|
||||
|
||||
class CreateScheduledScanSerializer(serializers.Serializer):
|
||||
"""创建定时扫描任务序列化器
|
||||
|
||||
扫描模式(二选一):
|
||||
- 组织扫描:提供 organization_id,执行时动态获取组织下所有目标
|
||||
- 目标扫描:提供 target_id,扫描单个目标
|
||||
"""
|
||||
|
||||
name = serializers.CharField(max_length=200, help_text='任务名称')
|
||||
engine_id = serializers.IntegerField(help_text='扫描引擎 ID')
|
||||
|
||||
# 组织扫描模式
|
||||
organization_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='组织 ID(组织扫描模式:执行时动态获取组织下所有目标)'
|
||||
)
|
||||
|
||||
# 目标扫描模式
|
||||
target_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='目标 ID(目标扫描模式:扫描单个目标)'
|
||||
)
|
||||
|
||||
cron_expression = serializers.CharField(
|
||||
max_length=100,
|
||||
default='0 2 * * *',
|
||||
help_text='Cron 表达式,格式:分 时 日 月 周'
|
||||
)
|
||||
is_enabled = serializers.BooleanField(default=True, help_text='是否立即启用')
|
||||
|
||||
def validate(self, data):
|
||||
"""验证 organization_id 和 target_id 互斥"""
|
||||
organization_id = data.get('organization_id')
|
||||
target_id = data.get('target_id')
|
||||
|
||||
if not organization_id and not target_id:
|
||||
raise serializers.ValidationError('必须提供 organization_id 或 target_id 其中之一')
|
||||
|
||||
if organization_id and target_id:
|
||||
raise serializers.ValidationError('organization_id 和 target_id 只能提供其中之一')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class UpdateScheduledScanSerializer(serializers.Serializer):
|
||||
"""更新定时扫描任务序列化器"""
|
||||
|
||||
name = serializers.CharField(max_length=200, required=False, help_text='任务名称')
|
||||
engine_id = serializers.IntegerField(required=False, help_text='扫描引擎 ID')
|
||||
|
||||
# 组织扫描模式
|
||||
organization_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='组织 ID(设置后清空 target_id)'
|
||||
)
|
||||
|
||||
# 目标扫描模式
|
||||
target_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='目标 ID(设置后清空 organization_id)'
|
||||
)
|
||||
|
||||
cron_expression = serializers.CharField(max_length=100, required=False, help_text='Cron 表达式')
|
||||
is_enabled = serializers.BooleanField(required=False, help_text='是否启用')
|
||||
|
||||
|
||||
class ToggleScheduledScanSerializer(serializers.Serializer):
|
||||
"""切换定时扫描启用状态序列化器"""
|
||||
|
||||
is_enabled = serializers.BooleanField(help_text='是否启用')
|
||||
@@ -1,238 +0,0 @@
|
||||
"""
|
||||
扫描任务服务
|
||||
|
||||
负责 Scan 模型的所有业务逻辑
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from typing import Dict, List, TYPE_CHECKING
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.utils import DatabaseError, IntegrityError, OperationalError
|
||||
from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
||||
|
||||
from apps.scan.models import Scan
|
||||
from apps.scan.repositories import DjangoScanRepository
|
||||
from apps.targets.repositories import DjangoTargetRepository, DjangoOrganizationRepository
|
||||
from apps.engine.repositories import DjangoEngineRepository
|
||||
from apps.targets.models import Target
|
||||
from apps.engine.models import ScanEngine
|
||||
from apps.common.definitions import ScanStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ScanService:
|
||||
"""
|
||||
扫描任务服务(协调者)
|
||||
|
||||
职责:
|
||||
- 协调各个子服务
|
||||
- 提供统一的公共接口
|
||||
- 保持向后兼容
|
||||
|
||||
注意:
|
||||
- 具体业务逻辑已拆分到子服务
|
||||
- 本类主要负责委托和协调
|
||||
"""
|
||||
|
||||
# 终态集合:这些状态一旦设置,不应该被覆盖
|
||||
FINAL_STATUSES = {
|
||||
ScanStatus.COMPLETED,
|
||||
ScanStatus.FAILED,
|
||||
ScanStatus.CANCELLED
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
初始化服务
|
||||
"""
|
||||
# 初始化子服务
|
||||
from apps.scan.services.scan_creation_service import ScanCreationService
|
||||
from apps.scan.services.scan_state_service import ScanStateService
|
||||
from apps.scan.services.scan_control_service import ScanControlService
|
||||
from apps.scan.services.scan_stats_service import ScanStatsService
|
||||
|
||||
self.creation_service = ScanCreationService()
|
||||
self.state_service = ScanStateService()
|
||||
self.control_service = ScanControlService()
|
||||
self.stats_service = ScanStatsService()
|
||||
|
||||
# 保留 ScanRepository(用于 get_scan 方法)
|
||||
self.scan_repo = DjangoScanRepository()
|
||||
|
||||
def get_scan(self, scan_id: int, prefetch_relations: bool) -> Scan | None:
|
||||
"""
|
||||
获取扫描任务(包含关联对象)
|
||||
|
||||
自动预加载 engine 和 target,避免 N+1 查询问题
|
||||
|
||||
Args:
|
||||
scan_id: 扫描任务 ID
|
||||
|
||||
Returns:
|
||||
Scan 对象(包含 engine 和 target)或 None
|
||||
"""
|
||||
return self.scan_repo.get_by_id(scan_id, prefetch_relations)
|
||||
|
||||
def get_all_scans(self, prefetch_relations: bool = True):
|
||||
return self.scan_repo.get_all(prefetch_relations=prefetch_relations)
|
||||
|
||||
def prepare_initiate_scan(
|
||||
self,
|
||||
organization_id: int | None = None,
|
||||
target_id: int | None = None,
|
||||
engine_id: int | None = None
|
||||
) -> tuple[List[Target], ScanEngine]:
|
||||
"""
|
||||
为创建扫描任务做准备,返回所需的目标列表和扫描引擎
|
||||
"""
|
||||
return self.creation_service.prepare_initiate_scan(
|
||||
organization_id, target_id, engine_id
|
||||
)
|
||||
|
||||
def create_scans(
|
||||
self,
|
||||
targets: List[Target],
|
||||
engine: ScanEngine,
|
||||
scheduled_scan_name: str | None = None
|
||||
) -> List[Scan]:
|
||||
"""批量创建扫描任务(委托给 ScanCreationService)"""
|
||||
return self.creation_service.create_scans(targets, engine, scheduled_scan_name)
|
||||
|
||||
# ==================== 状态管理方法(委托给 ScanStateService) ====================
|
||||
|
||||
def update_status(
|
||||
self,
|
||||
scan_id: int,
|
||||
status: ScanStatus,
|
||||
error_message: str | None = None,
|
||||
stopped_at: datetime | None = None
|
||||
) -> bool:
|
||||
"""更新 Scan 状态(委托给 ScanStateService)"""
|
||||
return self.state_service.update_status(
|
||||
scan_id, status, error_message, stopped_at
|
||||
)
|
||||
|
||||
def update_status_if_match(
|
||||
self,
|
||||
scan_id: int,
|
||||
current_status: ScanStatus,
|
||||
new_status: ScanStatus,
|
||||
stopped_at: datetime | None = None
|
||||
) -> bool:
|
||||
"""条件更新 Scan 状态(委托给 ScanStateService)"""
|
||||
return self.state_service.update_status_if_match(
|
||||
scan_id, current_status, new_status, stopped_at
|
||||
)
|
||||
|
||||
def update_cached_stats(self, scan_id: int) -> dict | None:
|
||||
"""更新缓存统计数据(委托给 ScanStateService),返回统计数据字典"""
|
||||
return self.state_service.update_cached_stats(scan_id)
|
||||
|
||||
# ==================== 进度跟踪方法(委托给 ScanStateService) ====================
|
||||
|
||||
def init_stage_progress(self, scan_id: int, stages: list[str]) -> bool:
|
||||
"""初始化阶段进度(委托给 ScanStateService)"""
|
||||
return self.state_service.init_stage_progress(scan_id, stages)
|
||||
|
||||
def start_stage(self, scan_id: int, stage: str) -> bool:
|
||||
"""开始执行某个阶段(委托给 ScanStateService)"""
|
||||
return self.state_service.start_stage(scan_id, stage)
|
||||
|
||||
def complete_stage(self, scan_id: int, stage: str, detail: str | None = None) -> bool:
|
||||
"""完成某个阶段(委托给 ScanStateService)"""
|
||||
return self.state_service.complete_stage(scan_id, stage, detail)
|
||||
|
||||
def fail_stage(self, scan_id: int, stage: str, error: str | None = None) -> bool:
|
||||
"""标记某个阶段失败(委托给 ScanStateService)"""
|
||||
return self.state_service.fail_stage(scan_id, stage, error)
|
||||
|
||||
def cancel_running_stages(self, scan_id: int, final_status: str = "cancelled") -> bool:
|
||||
"""取消所有正在运行的阶段(委托给 ScanStateService)"""
|
||||
return self.state_service.cancel_running_stages(scan_id, final_status)
|
||||
|
||||
# TODO:待接入
|
||||
def add_command_to_scan(self, scan_id: int, stage_name: str, tool_name: str, command: str) -> bool:
|
||||
"""
|
||||
增量添加命令到指定扫描阶段
|
||||
|
||||
Args:
|
||||
scan_id: 扫描任务ID
|
||||
stage_name: 阶段名称(如 'subdomain_discovery', 'port_scan')
|
||||
tool_name: 工具名称
|
||||
command: 执行命令
|
||||
|
||||
Returns:
|
||||
bool: 是否成功添加
|
||||
"""
|
||||
try:
|
||||
scan = self.get_scan(scan_id, prefetch_relations=False)
|
||||
if not scan:
|
||||
logger.error(f"扫描任务不存在: {scan_id}")
|
||||
return False
|
||||
|
||||
stage_progress = scan.stage_progress or {}
|
||||
|
||||
# 确保指定阶段存在
|
||||
if stage_name not in stage_progress:
|
||||
stage_progress[stage_name] = {'status': 'running', 'commands': []}
|
||||
|
||||
# 确保 commands 列表存在
|
||||
if 'commands' not in stage_progress[stage_name]:
|
||||
stage_progress[stage_name]['commands'] = []
|
||||
|
||||
# 增量添加命令
|
||||
command_entry = f"{tool_name}: {command}"
|
||||
stage_progress[stage_name]['commands'].append(command_entry)
|
||||
|
||||
scan.stage_progress = stage_progress
|
||||
scan.save(update_fields=['stage_progress'])
|
||||
|
||||
command_count = len(stage_progress[stage_name]['commands'])
|
||||
logger.info(f"✓ 记录命令: {stage_name}.{tool_name} (总计: {command_count})")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"记录命令失败: {e}")
|
||||
return False
|
||||
|
||||
# ==================== 删除和控制方法(委托给 ScanControlService) ====================
|
||||
|
||||
def delete_scans_two_phase(self, scan_ids: List[int]) -> dict:
|
||||
"""两阶段删除扫描任务(委托给 ScanControlService)"""
|
||||
return self.control_service.delete_scans_two_phase(scan_ids)
|
||||
|
||||
def stop_scan(self, scan_id: int) -> tuple[bool, int]:
|
||||
"""停止扫描任务(委托给 ScanControlService)"""
|
||||
return self.control_service.stop_scan(scan_id)
|
||||
|
||||
def hard_delete_scans(self, scan_ids: List[int]) -> tuple[int, Dict[str, int]]:
|
||||
"""
|
||||
硬删除扫描任务(真正删除数据)
|
||||
|
||||
用于 Worker 容器中执行,删除已软删除的扫描及其关联数据。
|
||||
|
||||
Args:
|
||||
scan_ids: 扫描任务 ID 列表
|
||||
|
||||
Returns:
|
||||
(删除数量, 详情字典)
|
||||
"""
|
||||
return self.scan_repo.hard_delete_by_ids(scan_ids)
|
||||
|
||||
# ==================== 统计方法(委托给 ScanStatsService) ====================
|
||||
|
||||
def get_statistics(self) -> dict:
|
||||
"""获取扫描统计数据(委托给 ScanStatsService)"""
|
||||
return self.stats_service.get_statistics()
|
||||
|
||||
|
||||
|
||||
# 导出接口
|
||||
__all__ = ['ScanService']
|
||||
@@ -1,94 +0,0 @@
|
||||
"""
|
||||
导出站点 URL 到 TXT 文件的 Task
|
||||
|
||||
使用流式处理,避免大量站点导致内存溢出
|
||||
"""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.repositories import DjangoWebSiteRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(name="export_sites")
|
||||
def export_sites_task(
|
||||
target_id: int,
|
||||
output_file: str,
|
||||
batch_size: int = 1000
|
||||
) -> dict:
|
||||
"""
|
||||
导出目标下的所有站点 URL 到 TXT 文件
|
||||
|
||||
使用流式处理,支持大规模数据导出(10万+站点)
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
output_file: 输出文件路径(绝对路径)
|
||||
batch_size: 每次读取的批次大小,默认 1000
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'success': bool,
|
||||
'output_file': str,
|
||||
'total_count': int
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: 参数错误
|
||||
IOError: 文件写入失败
|
||||
"""
|
||||
try:
|
||||
# 初始化 Repository
|
||||
repository = DjangoWebSiteRepository()
|
||||
|
||||
logger.info("开始导出站点 URL - Target ID: %d, 输出文件: %s", target_id, output_file)
|
||||
|
||||
# 确保输出目录存在
|
||||
output_path = Path(output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 使用 Repository 流式查询站点 URL
|
||||
url_iterator = repository.get_urls_for_export(
|
||||
target_id=target_id,
|
||||
batch_size=batch_size
|
||||
)
|
||||
|
||||
# 流式写入文件
|
||||
total_count = 0
|
||||
with open(output_path, 'w', encoding='utf-8', buffering=8192) as f:
|
||||
for url in url_iterator:
|
||||
# 每次只处理一个 URL,边读边写
|
||||
f.write(f"{url}\n")
|
||||
total_count += 1
|
||||
|
||||
# 每写入 10000 条记录打印一次进度
|
||||
if total_count % 10000 == 0:
|
||||
logger.info("已导出 %d 个站点 URL...", total_count)
|
||||
|
||||
logger.info(
|
||||
"✓ 站点 URL 导出完成 - 总数: %d, 文件: %s (%.2f KB)",
|
||||
total_count,
|
||||
str(output_path), # 使用绝对路径
|
||||
output_path.stat().st_size / 1024
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'output_file': str(output_path),
|
||||
'total_count': total_count
|
||||
}
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.error("输出目录不存在: %s", e)
|
||||
raise
|
||||
except PermissionError as e:
|
||||
logger.error("文件写入权限不足: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("导出站点 URL 失败: %s", e)
|
||||
raise
|
||||
|
||||
|
||||
|
||||
@@ -1,189 +0,0 @@
|
||||
"""
|
||||
导出扫描目标到 TXT 文件的 Task
|
||||
|
||||
根据 Target 类型决定导出内容:
|
||||
- DOMAIN: 从 Subdomain 表导出子域名
|
||||
- IP: 直接写入 target.name
|
||||
- CIDR: 展开 CIDR 范围内的所有 IP
|
||||
|
||||
使用流式处理,避免大量数据导致内存溢出
|
||||
"""
|
||||
import logging
|
||||
import ipaddress
|
||||
from pathlib import Path
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.services.asset.subdomain_service import SubdomainService
|
||||
from apps.targets.services import TargetService
|
||||
from apps.targets.models import Target # 仅用于 TargetType 常量
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _export_domains(target_id: int, output_path: Path, batch_size: int) -> int:
|
||||
"""
|
||||
导出域名类型目标的子域名
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
output_path: 输出文件路径
|
||||
batch_size: 批次大小
|
||||
|
||||
Returns:
|
||||
int: 导出的记录数
|
||||
"""
|
||||
subdomain_service = SubdomainService()
|
||||
domain_iterator = subdomain_service.iter_subdomain_names_by_target(
|
||||
target_id=target_id,
|
||||
chunk_size=batch_size
|
||||
)
|
||||
|
||||
total_count = 0
|
||||
with open(output_path, 'w', encoding='utf-8', buffering=8192) as f:
|
||||
for domain_name in domain_iterator:
|
||||
f.write(f"{domain_name}\n")
|
||||
total_count += 1
|
||||
|
||||
if total_count % 10000 == 0:
|
||||
logger.info("已导出 %d 个域名...", total_count)
|
||||
|
||||
return total_count
|
||||
|
||||
|
||||
def _export_ip(target_name: str, output_path: Path) -> int:
|
||||
"""
|
||||
导出 IP 类型目标
|
||||
|
||||
Args:
|
||||
target_name: IP 地址
|
||||
output_path: 输出文件路径
|
||||
|
||||
Returns:
|
||||
int: 导出的记录数(始终为 1)
|
||||
"""
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
f.write(f"{target_name}\n")
|
||||
return 1
|
||||
|
||||
|
||||
def _export_cidr(target_name: str, output_path: Path) -> int:
|
||||
"""
|
||||
导出 CIDR 类型目标,展开为每个 IP
|
||||
|
||||
Args:
|
||||
target_name: CIDR 范围(如 192.168.1.0/24)
|
||||
output_path: 输出文件路径
|
||||
|
||||
Returns:
|
||||
int: 导出的 IP 数量
|
||||
"""
|
||||
network = ipaddress.ip_network(target_name, strict=False)
|
||||
total_count = 0
|
||||
|
||||
with open(output_path, 'w', encoding='utf-8', buffering=8192) as f:
|
||||
for ip in network.hosts(): # 排除网络地址和广播地址
|
||||
f.write(f"{ip}\n")
|
||||
total_count += 1
|
||||
|
||||
if total_count % 10000 == 0:
|
||||
logger.info("已导出 %d 个 IP...", total_count)
|
||||
|
||||
# 如果是 /32 或 /128(单个 IP),hosts() 会为空,需要特殊处理
|
||||
if total_count == 0:
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
f.write(f"{network.network_address}\n")
|
||||
total_count = 1
|
||||
|
||||
return total_count
|
||||
|
||||
|
||||
@task(name="export_scan_targets")
|
||||
def export_scan_targets_task(
|
||||
target_id: int,
|
||||
output_file: str,
|
||||
batch_size: int = 1000
|
||||
) -> dict:
|
||||
"""
|
||||
导出扫描目标到 TXT 文件
|
||||
|
||||
根据 Target 类型自动决定导出内容:
|
||||
- DOMAIN: 从 Subdomain 表导出子域名(流式处理,支持 10万+ 域名)
|
||||
- IP: 直接写入 target.name(单个 IP)
|
||||
- CIDR: 展开 CIDR 范围内的所有可用 IP
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
output_file: 输出文件路径(绝对路径)
|
||||
batch_size: 每次读取的批次大小,默认 1000(仅对 DOMAIN 类型有效)
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'success': bool,
|
||||
'output_file': str,
|
||||
'total_count': int,
|
||||
'target_type': str
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: Target 不存在
|
||||
IOError: 文件写入失败
|
||||
"""
|
||||
try:
|
||||
# 1. 通过 Service 层获取 Target
|
||||
target_service = TargetService()
|
||||
target = target_service.get_target(target_id)
|
||||
if not target:
|
||||
raise ValueError(f"Target ID {target_id} 不存在")
|
||||
|
||||
target_type = target.type
|
||||
target_name = target.name
|
||||
|
||||
logger.info(
|
||||
"开始导出扫描目标 - Target ID: %d, Name: %s, Type: %s, 输出文件: %s",
|
||||
target_id, target_name, target_type, output_file
|
||||
)
|
||||
|
||||
# 2. 确保输出目录存在
|
||||
output_path = Path(output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 3. 根据类型导出
|
||||
if target_type == Target.TargetType.DOMAIN:
|
||||
total_count = _export_domains(target_id, output_path, batch_size)
|
||||
type_desc = "域名"
|
||||
elif target_type == Target.TargetType.IP:
|
||||
total_count = _export_ip(target_name, output_path)
|
||||
type_desc = "IP"
|
||||
elif target_type == Target.TargetType.CIDR:
|
||||
total_count = _export_cidr(target_name, output_path)
|
||||
type_desc = "CIDR IP"
|
||||
else:
|
||||
raise ValueError(f"不支持的目标类型: {target_type}")
|
||||
|
||||
logger.info(
|
||||
"✓ 扫描目标导出完成 - 类型: %s, 总数: %d, 文件: %s (%.2f KB)",
|
||||
type_desc,
|
||||
total_count,
|
||||
str(output_path),
|
||||
output_path.stat().st_size / 1024
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'output_file': str(output_path),
|
||||
'total_count': total_count,
|
||||
'target_type': target_type
|
||||
}
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.error("输出目录不存在: %s", e)
|
||||
raise
|
||||
except PermissionError as e:
|
||||
logger.error("文件写入权限不足: %s", e)
|
||||
raise
|
||||
except ValueError as e:
|
||||
logger.error("参数错误: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("导出扫描目标失败: %s", e)
|
||||
raise
|
||||
@@ -1,119 +0,0 @@
|
||||
"""
|
||||
导出站点URL到文件的Task
|
||||
|
||||
直接使用 HostPortMapping 表查询 host+port 组合,拼接成URL格式写入文件
|
||||
"""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.services import HostPortMappingService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(name="export_site_urls")
|
||||
def export_site_urls_task(
|
||||
target_id: int,
|
||||
output_file: str,
|
||||
batch_size: int = 1000
|
||||
) -> dict:
|
||||
"""
|
||||
导出目标下的所有站点URL到文件(基于 HostPortMapping 表)
|
||||
|
||||
功能:
|
||||
1. 从 HostPortMapping 表查询 target 下所有 host+port 组合
|
||||
2. 拼接成URL格式(标准端口80/443将省略端口号)
|
||||
3. 写入到指定文件中
|
||||
|
||||
Args:
|
||||
target_id: 目标ID
|
||||
output_file: 输出文件路径(绝对路径)
|
||||
batch_size: 每次处理的批次大小,默认1000(暂未使用,预留)
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'success': bool,
|
||||
'output_file': str,
|
||||
'total_urls': int,
|
||||
'association_count': int # 主机端口关联数量
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: 参数错误
|
||||
IOError: 文件写入失败
|
||||
"""
|
||||
try:
|
||||
logger.info("开始统计站点URL - Target ID: %d, 输出文件: %s", target_id, output_file)
|
||||
|
||||
# 确保输出目录存在
|
||||
output_path = Path(output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 直接查询 HostPortMapping 表,按 host 排序
|
||||
service = HostPortMappingService()
|
||||
associations = service.iter_host_port_by_target(
|
||||
target_id=target_id,
|
||||
batch_size=batch_size,
|
||||
)
|
||||
|
||||
total_urls = 0
|
||||
association_count = 0
|
||||
|
||||
# 流式写入文件
|
||||
with open(output_path, 'w', encoding='utf-8', buffering=8192) as f:
|
||||
for assoc in associations:
|
||||
association_count += 1
|
||||
host = assoc['host']
|
||||
port = assoc['port']
|
||||
|
||||
# 根据端口号生成URL
|
||||
# 80 端口:只生成 HTTP URL(省略端口号)
|
||||
# 443 端口:只生成 HTTPS URL(省略端口号)
|
||||
# 其他端口:生成 HTTP 和 HTTPS 两个URL(带端口号)
|
||||
if port == 80:
|
||||
# HTTP 标准端口,省略端口号
|
||||
url = f"http://{host}"
|
||||
f.write(f"{url}\n")
|
||||
total_urls += 1
|
||||
elif port == 443:
|
||||
# HTTPS 标准端口,省略端口号
|
||||
url = f"https://{host}"
|
||||
f.write(f"{url}\n")
|
||||
total_urls += 1
|
||||
else:
|
||||
# 非标准端口,生成 HTTP 和 HTTPS 两个URL
|
||||
http_url = f"http://{host}:{port}"
|
||||
https_url = f"https://{host}:{port}"
|
||||
f.write(f"{http_url}\n")
|
||||
f.write(f"{https_url}\n")
|
||||
total_urls += 2
|
||||
|
||||
# 每处理1000条记录打印一次进度
|
||||
if association_count % 1000 == 0:
|
||||
logger.info("已处理 %d 条关联,生成 %d 个URL...", association_count, total_urls)
|
||||
|
||||
logger.info(
|
||||
"✓ 站点URL导出完成 - 关联数: %d, 总URL数: %d, 文件: %s (%.2f KB)",
|
||||
association_count,
|
||||
total_urls,
|
||||
str(output_path),
|
||||
output_path.stat().st_size / 1024
|
||||
)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'output_file': str(output_path),
|
||||
'total_urls': total_urls,
|
||||
'association_count': association_count
|
||||
}
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.error("输出目录不存在: %s", e)
|
||||
raise
|
||||
except PermissionError as e:
|
||||
logger.error("文件写入权限不足: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("导出站点URL失败: %s", e)
|
||||
raise
|
||||
@@ -1,195 +0,0 @@
|
||||
"""
|
||||
合并并去重域名任务
|
||||
|
||||
合并 merge + parse + validate 三个步骤,优化性能:
|
||||
- 单命令实现(LC_ALL=C sort -u)
|
||||
- C语言级性能,单进程高效
|
||||
- 无临时文件,零额外开销
|
||||
- 支持千万级数据处理
|
||||
|
||||
性能优势:
|
||||
- LC_ALL=C 字节序比较(比locale快20-30%)
|
||||
- 单进程直接处理多文件(无管道开销)
|
||||
- 内存占用恒定(~50MB for 50万域名)
|
||||
- 50万域名处理时间:~0.5秒(相比 Python 提升 ~67%)
|
||||
|
||||
Note:
|
||||
- 工具(amass/subfinder)输出已标准化(小写,无空行)
|
||||
- sort -u 自动处理去重和排序
|
||||
- 无需额外过滤,性能最优
|
||||
"""
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from prefect import task
|
||||
from typing import List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# 注:使用纯系统命令实现,无需 Python 缓冲区配置
|
||||
# 工具(amass/subfinder)输出已是小写且标准化
|
||||
|
||||
@task(
|
||||
name='merge_and_deduplicate',
|
||||
retries=1,
|
||||
log_prints=True
|
||||
)
|
||||
def merge_and_validate_task(
|
||||
result_files: List[str],
|
||||
result_dir: str
|
||||
) -> str:
|
||||
"""
|
||||
合并扫描结果并去重(高性能流式处理)
|
||||
|
||||
流程:
|
||||
1. 使用 LC_ALL=C sort -u 直接处理多文件
|
||||
2. 排序去重一步完成
|
||||
3. 返回去重后的文件路径
|
||||
|
||||
命令:LC_ALL=C sort -u file1 file2 file3 -o output
|
||||
注:工具输出已标准化(小写,无空行),无需额外处理
|
||||
|
||||
Args:
|
||||
result_files: 结果文件路径列表
|
||||
result_dir: 结果目录
|
||||
|
||||
Returns:
|
||||
str: 去重后的域名文件路径
|
||||
|
||||
Raises:
|
||||
RuntimeError: 处理失败
|
||||
|
||||
Performance:
|
||||
- 纯系统命令(C语言实现),单进程极简
|
||||
- LC_ALL=C: 字节序比较
|
||||
- sort -u: 直接处理多文件(无管道开销)
|
||||
|
||||
Design:
|
||||
- 极简单命令,无冗余处理
|
||||
- 单进程直接执行(无管道/重定向开销)
|
||||
- 内存占用仅在 sort 阶段(外部排序,不会 OOM)
|
||||
"""
|
||||
logger.info("开始合并并去重 %d 个结果文件(系统命令优化)", len(result_files))
|
||||
|
||||
result_path = Path(result_dir)
|
||||
|
||||
# 验证文件存在性
|
||||
valid_files = []
|
||||
for file_path_str in result_files:
|
||||
file_path = Path(file_path_str)
|
||||
if file_path.exists():
|
||||
valid_files.append(str(file_path))
|
||||
else:
|
||||
logger.warning("结果文件不存在: %s", file_path)
|
||||
|
||||
if not valid_files:
|
||||
raise RuntimeError("所有结果文件都不存在")
|
||||
|
||||
# 生成输出文件路径
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
short_uuid = uuid.uuid4().hex[:4]
|
||||
merged_file = result_path / f"merged_{timestamp}_{short_uuid}.txt"
|
||||
|
||||
try:
|
||||
# ==================== 使用系统命令一步完成:排序去重 ====================
|
||||
# LC_ALL=C: 使用字节序比较(比locale快20-30%)
|
||||
# sort -u: 直接处理多文件,排序去重
|
||||
# -o: 安全输出(比重定向更可靠)
|
||||
cmd = f"LC_ALL=C sort -u {' '.join(valid_files)} -o {merged_file}"
|
||||
|
||||
logger.debug("执行命令: %s", cmd)
|
||||
|
||||
# 按输入文件总行数动态计算超时时间
|
||||
total_lines = 0
|
||||
for file_path in valid_files:
|
||||
try:
|
||||
line_count_proc = subprocess.run(
|
||||
["wc", "-l", file_path],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
total_lines += int(line_count_proc.stdout.strip().split()[0])
|
||||
except (subprocess.CalledProcessError, ValueError, IndexError):
|
||||
continue
|
||||
|
||||
timeout = 3600
|
||||
if total_lines > 0:
|
||||
# 按行数线性计算:每行约 0.1 秒
|
||||
base_per_line = 0.1
|
||||
est = int(total_lines * base_per_line)
|
||||
timeout = max(600, est)
|
||||
|
||||
logger.info(
|
||||
"Subdomain 合并去重 timeout 自动计算: 输入总行数=%d, timeout=%d秒",
|
||||
total_lines,
|
||||
timeout,
|
||||
)
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
check=True,
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
logger.debug("✓ 合并去重完成")
|
||||
|
||||
# ==================== 统计结果 ====================
|
||||
if not merged_file.exists():
|
||||
raise RuntimeError("合并文件未被创建")
|
||||
|
||||
# 统计行数(使用系统命令提升大文件性能)
|
||||
try:
|
||||
line_count_proc = subprocess.run(
|
||||
["wc", "-l", str(merged_file)],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
unique_count = int(line_count_proc.stdout.strip().split()[0])
|
||||
except (subprocess.CalledProcessError, ValueError, IndexError) as e:
|
||||
logger.warning(
|
||||
"wc -l 统计失败(文件: %s),降级为 Python 逐行统计 - 错误: %s",
|
||||
merged_file, e
|
||||
)
|
||||
unique_count = 0
|
||||
with open(merged_file, 'r', encoding='utf-8') as file_obj:
|
||||
for _ in file_obj:
|
||||
unique_count += 1
|
||||
|
||||
if unique_count == 0:
|
||||
raise RuntimeError("未找到任何有效域名")
|
||||
|
||||
file_size = merged_file.stat().st_size
|
||||
|
||||
logger.info(
|
||||
"✓ 合并去重完成 - 去重后: %d 个域名, 文件大小: %.2f KB",
|
||||
unique_count,
|
||||
file_size / 1024
|
||||
)
|
||||
|
||||
return str(merged_file)
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
error_msg = "合并去重超时(>60分钟),请检查数据量或系统资源"
|
||||
logger.warning(error_msg) # 超时是可预期的
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_msg = f"系统命令执行失败: {e.stderr if e.stderr else str(e)}"
|
||||
logger.warning(error_msg) # 超时是可预期的
|
||||
raise RuntimeError(error_msg) from e
|
||||
|
||||
except IOError as e:
|
||||
error_msg = f"文件读写失败: {e}"
|
||||
logger.warning(error_msg) # 超时是可预期的
|
||||
raise RuntimeError(error_msg) from e
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"合并去重失败: {e}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
raise
|
||||
@@ -1,128 +0,0 @@
|
||||
"""
|
||||
导出目标资产任务
|
||||
|
||||
根据 input_type 导出不同类型的资产到文件:
|
||||
- domains_file: 导出子域名列表(用于 waymore 等域名级工具)
|
||||
- sites_file: 导出站点 URL 列表(用于 katana 等站点级工具)
|
||||
|
||||
使用流式写入,避免内存溢出
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from prefect import task
|
||||
from typing import Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(
|
||||
name='export_target_assets',
|
||||
retries=1,
|
||||
log_prints=True
|
||||
)
|
||||
def export_target_assets_task(
|
||||
output_file: str,
|
||||
target_id: int,
|
||||
scan_id: int,
|
||||
input_type: str,
|
||||
batch_size: int = 1000
|
||||
) -> dict:
|
||||
"""
|
||||
根据 input_type 导出目标资产到文件
|
||||
|
||||
Args:
|
||||
output_file: 输出文件路径
|
||||
target_id: 目标 ID
|
||||
scan_id: 扫描 ID
|
||||
input_type: 输入类型 ('domains_file' 或 'sites_file')
|
||||
batch_size: 批次大小(内存优化)
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'output_file': str, # 输出文件路径
|
||||
'asset_count': int, # 资产数量
|
||||
'asset_type': str # 资产类型(domains 或 sites)
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: 参数错误
|
||||
RuntimeError: 执行失败
|
||||
"""
|
||||
try:
|
||||
logger.info("开始导出目标资产 - 类型: %s", input_type)
|
||||
|
||||
# 确保输出目录存在
|
||||
output_path = Path(output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 根据 input_type 导出不同的资产
|
||||
if input_type == 'domains_file':
|
||||
# 导出子域名列表
|
||||
logger.info("从目标 %d 导出域名列表", target_id)
|
||||
from apps.asset.services import SubdomainService
|
||||
|
||||
# 使用 Service 层的流式接口
|
||||
subdomain_service = SubdomainService()
|
||||
|
||||
# 流式写入文件
|
||||
asset_count = 0
|
||||
with open(output_path, 'w') as f:
|
||||
# 使用 Service 层的迭代器进行批量处理
|
||||
for domain in subdomain_service.iter_subdomain_names_by_target(target_id, batch_size):
|
||||
f.write(f"{domain}\n")
|
||||
asset_count += 1
|
||||
|
||||
# 每写入一批就刷新缓冲区
|
||||
if asset_count % batch_size == 0:
|
||||
f.flush()
|
||||
|
||||
logger.info("✓ 域名导出完成 - 文件: %s, 数量: %d", output_file, asset_count)
|
||||
|
||||
if asset_count == 0:
|
||||
logger.warning("目标下没有域名")
|
||||
|
||||
return {
|
||||
'output_file': output_file,
|
||||
'asset_count': asset_count,
|
||||
'asset_type': 'domains'
|
||||
}
|
||||
|
||||
elif input_type == 'sites_file':
|
||||
# 导出站点 URL 列表(按目标导出)
|
||||
logger.info("从目标 %d 导出站点 URL 列表", target_id)
|
||||
from apps.asset.services import WebSiteService
|
||||
|
||||
# 使用 Service 层的流式接口
|
||||
website_service = WebSiteService()
|
||||
|
||||
# 流式写入文件
|
||||
asset_count = 0
|
||||
with open(output_path, 'w') as f:
|
||||
# 使用 Service 层的迭代器进行批量处理(按目标)
|
||||
for url in website_service.iter_website_urls_by_target(target_id, batch_size):
|
||||
f.write(f"{url}\n")
|
||||
asset_count += 1
|
||||
|
||||
# 每写入一批就刷新缓冲区
|
||||
if asset_count % batch_size == 0:
|
||||
f.flush()
|
||||
|
||||
logger.info("✓ 站点 URL 导出完成 - 文件: %s, 数量: %d", output_file, asset_count)
|
||||
|
||||
if asset_count == 0:
|
||||
logger.warning("扫描下没有站点")
|
||||
|
||||
return {
|
||||
'output_file': output_file,
|
||||
'asset_count': asset_count,
|
||||
'asset_type': 'sites'
|
||||
}
|
||||
|
||||
else:
|
||||
# 未知的 input_type
|
||||
raise ValueError(f"不支持的 input_type: {input_type},必须是 'domains_file' 或 'sites_file'")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("导出资产失败: %s", e, exc_info=True)
|
||||
raise RuntimeError(f"导出资产失败: {e}") from e
|
||||
@@ -1,494 +0,0 @@
|
||||
"""
|
||||
基于 execute_stream 的流式 URL 验证任务
|
||||
|
||||
主要功能:
|
||||
1. 实时执行 httpx 命令验证 URL 存活
|
||||
2. 流式处理命令输出,解析存活的 URL
|
||||
3. 批量保存到数据库(Endpoint 表)
|
||||
4. 避免一次性加载所有 URL 到内存
|
||||
|
||||
数据流向:
|
||||
httpx 命令执行 → 流式输出 → 实时解析 → 批量保存 → Endpoint 表
|
||||
|
||||
优化策略:
|
||||
- 使用 execute_stream 实时处理输出
|
||||
- 流式处理避免内存溢出
|
||||
- 批量操作减少数据库交互
|
||||
- 只保存存活的 URL(status 2xx/3xx)
|
||||
"""
|
||||
|
||||
import logging
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
from prefect import task
|
||||
from typing import Generator, Optional
|
||||
from django.db import IntegrityError, OperationalError, DatabaseError
|
||||
from psycopg2 import InterfaceError
|
||||
from dataclasses import dataclass
|
||||
|
||||
from apps.asset.services.snapshot import EndpointSnapshotsService
|
||||
from apps.scan.utils import execute_stream
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ServiceSet:
|
||||
"""
|
||||
Service 集合,用于依赖注入
|
||||
|
||||
提供 URL 验证所需的 Service 实例
|
||||
"""
|
||||
snapshot: EndpointSnapshotsService
|
||||
|
||||
@classmethod
|
||||
def create_default(cls) -> "ServiceSet":
|
||||
"""创建默认的 Service 集合"""
|
||||
return cls(
|
||||
snapshot=EndpointSnapshotsService()
|
||||
)
|
||||
|
||||
|
||||
def _sanitize_string(value: str) -> str:
|
||||
"""
|
||||
清理字符串中的 NUL 字符和其他不可打印字符
|
||||
|
||||
PostgreSQL 不允许字符串字段包含 NUL (0x00) 字符
|
||||
"""
|
||||
if not value:
|
||||
return value
|
||||
# 移除 NUL 字符
|
||||
return value.replace('\x00', '')
|
||||
|
||||
|
||||
def _parse_and_validate_line(line: str) -> Optional[dict]:
|
||||
"""
|
||||
解析并验证单行 httpx JSON 输出
|
||||
|
||||
Args:
|
||||
line: 单行输出数据
|
||||
|
||||
Returns:
|
||||
Optional[dict]: 有效的 httpx 记录,或 None 如果验证失败
|
||||
|
||||
只返回存活的 URL(2xx/3xx 状态码)
|
||||
"""
|
||||
try:
|
||||
# 清理 NUL 字符后再解析 JSON
|
||||
line = _sanitize_string(line)
|
||||
|
||||
# 解析 JSON
|
||||
try:
|
||||
line_data = json.loads(line)
|
||||
except json.JSONDecodeError:
|
||||
# logger.debug("跳过非 JSON 格式的行: %s", line[:100])
|
||||
return None
|
||||
|
||||
# 验证数据类型
|
||||
if not isinstance(line_data, dict):
|
||||
logger.warning("解析后的数据不是字典类型,跳过: %s", str(line_data)[:100])
|
||||
return None
|
||||
|
||||
# 获取必要字段
|
||||
url = line_data.get('url', '').strip()
|
||||
status_code = line_data.get('status_code')
|
||||
|
||||
if not url:
|
||||
logger.debug("URL 为空,跳过")
|
||||
return None
|
||||
|
||||
# 只保存存活的 URL(2xx 或 3xx)
|
||||
if status_code and (200 <= status_code < 400):
|
||||
return {
|
||||
'url': _sanitize_string(url),
|
||||
'host': _sanitize_string(line_data.get('host', '')),
|
||||
'status_code': status_code,
|
||||
'title': _sanitize_string(line_data.get('title', '')),
|
||||
'content_length': line_data.get('content_length', 0),
|
||||
'content_type': _sanitize_string(line_data.get('content_type', '')),
|
||||
'webserver': _sanitize_string(line_data.get('webserver', '')),
|
||||
'location': _sanitize_string(line_data.get('location', '')),
|
||||
'tech': line_data.get('tech', []),
|
||||
'body_preview': _sanitize_string(line_data.get('body_preview', '')),
|
||||
'vhost': line_data.get('vhost', False),
|
||||
}
|
||||
else:
|
||||
logger.debug("URL 不存活(状态码: %s),跳过: %s", status_code, url)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error("解析行数据异常: %s - 数据: %s", e, line[:100] if line else 'empty')
|
||||
return None
|
||||
|
||||
|
||||
def _parse_httpx_stream_output(
|
||||
cmd: str,
|
||||
tool_name: str,
|
||||
cwd: Optional[str] = None,
|
||||
shell: bool = False,
|
||||
timeout: Optional[int] = None,
|
||||
log_file: Optional[str] = None
|
||||
) -> Generator[dict, None, None]:
|
||||
"""
|
||||
流式解析 httpx 命令输出
|
||||
|
||||
Args:
|
||||
cmd: httpx 命令
|
||||
tool_name: 工具名称('httpx')
|
||||
cwd: 工作目录
|
||||
shell: 是否使用 shell 执行
|
||||
timeout: 命令执行超时时间(秒)
|
||||
log_file: 日志文件路径
|
||||
|
||||
Yields:
|
||||
dict: 每次 yield 一条存活的 URL 记录
|
||||
"""
|
||||
logger.info("开始流式解析 httpx 输出 - 命令: %s", cmd)
|
||||
|
||||
total_lines = 0
|
||||
error_lines = 0
|
||||
valid_records = 0
|
||||
|
||||
try:
|
||||
# 使用 execute_stream 获取实时输出流
|
||||
for line in execute_stream(
|
||||
cmd=cmd,
|
||||
tool_name=tool_name,
|
||||
cwd=cwd,
|
||||
shell=shell,
|
||||
timeout=timeout,
|
||||
log_file=log_file
|
||||
):
|
||||
total_lines += 1
|
||||
|
||||
# 解析并验证单行数据
|
||||
record = _parse_and_validate_line(line)
|
||||
if record is None:
|
||||
error_lines += 1
|
||||
continue
|
||||
|
||||
valid_records += 1
|
||||
# yield 一条有效记录(存活的 URL)
|
||||
yield record
|
||||
|
||||
# 每处理 500 条记录输出一次进度
|
||||
if valid_records % 500 == 0:
|
||||
logger.info("已解析 %d 条存活的 URL...", valid_records)
|
||||
|
||||
except subprocess.TimeoutExpired as e:
|
||||
error_msg = f"流式解析命令输出超时 - 命令执行超过 {timeout} 秒"
|
||||
logger.warning(error_msg) # 超时是可预期的,使用 warning 级别
|
||||
raise RuntimeError(error_msg) from e
|
||||
except Exception as e:
|
||||
logger.error("流式解析命令输出失败: %s", e, exc_info=True)
|
||||
raise
|
||||
|
||||
logger.info(
|
||||
"流式解析完成 - 总行数: %d, 存活 URL: %d, 无效/死链: %d",
|
||||
total_lines, valid_records, error_lines
|
||||
)
|
||||
|
||||
|
||||
def _save_batch_with_retry(
|
||||
batch: list,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
batch_num: int,
|
||||
services: ServiceSet,
|
||||
max_retries: int = 3
|
||||
) -> dict:
|
||||
"""
|
||||
保存一个批次的 URL(带重试机制)
|
||||
|
||||
Args:
|
||||
batch: 数据批次
|
||||
scan_id: 扫描任务ID
|
||||
target_id: 目标ID
|
||||
batch_num: 批次编号
|
||||
services: Service 集合
|
||||
max_retries: 最大重试次数
|
||||
|
||||
Returns:
|
||||
dict: {'success': bool, 'saved_count': int}
|
||||
"""
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
count = _save_batch(batch, scan_id, target_id, batch_num, services)
|
||||
return {
|
||||
'success': True,
|
||||
'saved_count': count
|
||||
}
|
||||
|
||||
except IntegrityError as e:
|
||||
# 唯一约束等数据完整性错误通常意味着重复数据,这里记录错误但不让整个扫描失败
|
||||
logger.error("批次 %d 数据完整性错误,跳过: %s", batch_num, str(e)[:100])
|
||||
return {
|
||||
'success': False,
|
||||
'saved_count': 0
|
||||
}
|
||||
|
||||
except (OperationalError, DatabaseError, InterfaceError) as e:
|
||||
# 数据库级错误(连接中断、表结构不匹配等):按指数退避重试,最终失败时抛出异常让 Flow 失败
|
||||
if attempt < max_retries - 1:
|
||||
wait_time = 2 ** attempt
|
||||
logger.warning(
|
||||
"批次 %d 保存失败(第 %d 次尝试),%d秒后重试: %s",
|
||||
batch_num, attempt + 1, wait_time, str(e)[:100]
|
||||
)
|
||||
time.sleep(wait_time)
|
||||
else:
|
||||
logger.error(
|
||||
"批次 %d 保存失败(已重试 %d 次),将终止任务: %s",
|
||||
batch_num,
|
||||
max_retries,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
# 让上层 Task 感知失败,从而标记整个扫描为失败
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
# 其他未知异常也不再吞掉,直接抛出以便 Flow 标记为失败
|
||||
logger.error("批次 %d 未知错误: %s", batch_num, e, exc_info=True)
|
||||
raise
|
||||
|
||||
# 理论上不会走到这里,保留兜底返回值以满足类型约束
|
||||
return {
|
||||
'success': False,
|
||||
'saved_count': 0
|
||||
}
|
||||
|
||||
|
||||
def _save_batch(
|
||||
batch: list,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
batch_num: int,
|
||||
services: ServiceSet
|
||||
) -> int:
|
||||
"""
|
||||
保存一个批次的数据到数据库
|
||||
|
||||
Args:
|
||||
batch: 数据批次,list of dict
|
||||
scan_id: 扫描任务 ID
|
||||
target_id: 目标 ID
|
||||
batch_num: 批次编号
|
||||
services: Service 集合
|
||||
|
||||
Returns:
|
||||
int: 创建的记录数
|
||||
"""
|
||||
if not batch:
|
||||
logger.debug("批次 %d 为空,跳过处理", batch_num)
|
||||
return 0
|
||||
|
||||
# 批量构造 Endpoint 快照 DTO
|
||||
from apps.asset.dtos.snapshot import EndpointSnapshotDTO
|
||||
|
||||
snapshots = []
|
||||
for record in batch:
|
||||
try:
|
||||
dto = EndpointSnapshotDTO(
|
||||
scan_id=scan_id,
|
||||
url=record['url'],
|
||||
host=record.get('host', ''),
|
||||
title=record.get('title', ''),
|
||||
status_code=record.get('status_code'),
|
||||
content_length=record.get('content_length', 0),
|
||||
location=record.get('location', ''),
|
||||
webserver=record.get('webserver', ''),
|
||||
content_type=record.get('content_type', ''),
|
||||
tech=record.get('tech', []),
|
||||
body_preview=record.get('body_preview', ''),
|
||||
vhost=record.get('vhost', False),
|
||||
matched_gf_patterns=[],
|
||||
target_id=target_id,
|
||||
)
|
||||
snapshots.append(dto)
|
||||
except Exception as e:
|
||||
logger.error("处理记录失败: %s,错误: %s", record.get('url', 'Unknown'), e)
|
||||
continue
|
||||
|
||||
if snapshots:
|
||||
try:
|
||||
# 通过快照服务统一保存快照并同步到资产表
|
||||
services.snapshot.save_and_sync(snapshots)
|
||||
count = len(snapshots)
|
||||
logger.info(
|
||||
"批次 %d: 保存了 %d 个存活的 URL(共 %d 个)",
|
||||
batch_num, count, len(batch)
|
||||
)
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error("批次 %d 批量保存失败: %s", batch_num, e)
|
||||
raise
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _process_records_in_batches(
|
||||
data_generator,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
batch_size: int,
|
||||
services: ServiceSet
|
||||
) -> dict:
|
||||
"""
|
||||
分批处理记录并保存到数据库
|
||||
|
||||
Args:
|
||||
data_generator: 数据生成器
|
||||
scan_id: 扫描ID
|
||||
target_id: 目标ID
|
||||
batch_size: 批次大小
|
||||
services: Service 集合
|
||||
|
||||
Returns:
|
||||
dict: 处理统计结果
|
||||
"""
|
||||
batch = []
|
||||
batch_num = 0
|
||||
total_records = 0
|
||||
total_saved = 0
|
||||
failed_batches = []
|
||||
|
||||
for record in data_generator:
|
||||
batch.append(record)
|
||||
total_records += 1
|
||||
|
||||
# 达到批次大小,执行保存
|
||||
if len(batch) >= batch_size:
|
||||
batch_num += 1
|
||||
result = _save_batch_with_retry(
|
||||
batch, scan_id, target_id, batch_num, services
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
total_saved += result['saved_count']
|
||||
else:
|
||||
failed_batches.append(batch_num)
|
||||
|
||||
batch = [] # 清空批次
|
||||
|
||||
# 每 10 个批次输出进度
|
||||
if batch_num % 10 == 0:
|
||||
logger.info(
|
||||
"进度: 已处理 %d 批次,%d 条记录,保存 %d 条",
|
||||
batch_num, total_records, total_saved
|
||||
)
|
||||
|
||||
# 保存最后一批
|
||||
if batch:
|
||||
batch_num += 1
|
||||
result = _save_batch_with_retry(
|
||||
batch, scan_id, target_id, batch_num, services
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
total_saved += result['saved_count']
|
||||
else:
|
||||
failed_batches.append(batch_num)
|
||||
|
||||
return {
|
||||
'processed_records': total_records,
|
||||
'saved_urls': total_saved,
|
||||
'failed_urls': total_records - total_saved,
|
||||
'batch_count': batch_num,
|
||||
'failed_batches': failed_batches
|
||||
}
|
||||
|
||||
|
||||
@task(name="run_and_stream_save_urls", retries=3, retry_delay_seconds=10)
|
||||
def run_and_stream_save_urls_task(
|
||||
cmd: str,
|
||||
tool_name: str,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
cwd: Optional[str] = None,
|
||||
shell: bool = False,
|
||||
batch_size: int = 500,
|
||||
timeout: Optional[int] = None,
|
||||
log_file: Optional[str] = None
|
||||
) -> dict:
|
||||
"""
|
||||
执行 httpx 验证并流式保存存活的 URL
|
||||
|
||||
该任务将:
|
||||
1. 执行 httpx 命令验证 URL 存活
|
||||
2. 流式处理输出,实时解析
|
||||
3. 批量保存存活的 URL 到 Endpoint 表
|
||||
|
||||
Args:
|
||||
cmd: httpx 命令
|
||||
tool_name: 工具名称('httpx')
|
||||
scan_id: 扫描任务 ID
|
||||
target_id: 目标 ID
|
||||
cwd: 工作目录
|
||||
shell: 是否使用 shell 执行
|
||||
batch_size: 批次大小(默认 500)
|
||||
timeout: 超时时间(秒)
|
||||
log_file: 日志文件路径
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
'processed_records': int, # 处理的记录总数
|
||||
'saved_urls': int, # 保存的存活 URL 数
|
||||
'failed_urls': int, # 失败/死链数
|
||||
'batch_count': int, # 批次数
|
||||
'failed_batches': list # 失败的批次号
|
||||
}
|
||||
"""
|
||||
logger.info(
|
||||
"开始执行流式 URL 验证任务 - target_id=%s, 超时=%s秒, 命令: %s",
|
||||
target_id, timeout if timeout else '无限制', cmd
|
||||
)
|
||||
|
||||
data_generator = None
|
||||
|
||||
try:
|
||||
# 1. 初始化资源
|
||||
data_generator = _parse_httpx_stream_output(
|
||||
cmd, tool_name, cwd, shell, timeout, log_file
|
||||
)
|
||||
services = ServiceSet.create_default()
|
||||
|
||||
# 2. 流式处理记录并分批保存
|
||||
stats = _process_records_in_batches(
|
||||
data_generator, scan_id, target_id, batch_size, services
|
||||
)
|
||||
|
||||
# 3. 输出最终统计
|
||||
logger.info(
|
||||
"✓ URL 验证任务完成 - 处理: %d, 存活: %d, 失败: %d",
|
||||
stats['processed_records'],
|
||||
stats['saved_urls'],
|
||||
stats['failed_urls']
|
||||
)
|
||||
|
||||
return stats
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.warning(
|
||||
"⚠️ URL 验证任务超时 - target_id=%s, 超时=%s秒",
|
||||
target_id, timeout
|
||||
)
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"流式执行 URL 验证任务失败: {e}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
raise RuntimeError(error_msg) from e
|
||||
|
||||
finally:
|
||||
# 清理资源
|
||||
if data_generator is not None:
|
||||
try:
|
||||
# 确保生成器被正确关闭
|
||||
data_generator.close()
|
||||
except (GeneratorExit, StopIteration):
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning("关闭数据生成器时出错: %s", e)
|
||||
@@ -1,77 +0,0 @@
|
||||
"""导出 Endpoint URL 到文件的 Task
|
||||
|
||||
基于 EndpointService.iter_endpoint_urls_by_target 按目标流式导出端点 URL,
|
||||
用于漏洞扫描(如 Dalfox XSS)的输入文件生成。
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.services import EndpointService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(name="export_endpoints")
|
||||
def export_endpoints_task(
|
||||
target_id: int,
|
||||
output_file: str,
|
||||
batch_size: int = 1000,
|
||||
) -> Dict[str, object]:
|
||||
"""导出目标下的所有 Endpoint URL 到文本文件。
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID
|
||||
output_file: 输出文件路径(绝对路径)
|
||||
batch_size: 每次从数据库迭代的批大小
|
||||
|
||||
Returns:
|
||||
dict: {
|
||||
"success": bool,
|
||||
"output_file": str,
|
||||
"total_count": int,
|
||||
}
|
||||
"""
|
||||
try:
|
||||
logger.info("开始导出 Endpoint URL - Target ID: %d, 输出文件: %s", target_id, output_file)
|
||||
|
||||
output_path = Path(output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
service = EndpointService()
|
||||
url_iterator = service.iter_endpoint_urls_by_target(target_id, chunk_size=batch_size)
|
||||
|
||||
total_count = 0
|
||||
with open(output_path, "w", encoding="utf-8", buffering=8192) as f:
|
||||
for url in url_iterator:
|
||||
f.write(f"{url}\n")
|
||||
total_count += 1
|
||||
|
||||
if total_count % 10000 == 0:
|
||||
logger.info("已导出 %d 个 Endpoint URL...", total_count)
|
||||
|
||||
logger.info(
|
||||
"✓ Endpoint URL 导出完成 - 总数: %d, 文件: %s (%.2f KB)",
|
||||
total_count,
|
||||
str(output_path),
|
||||
output_path.stat().st_size / 1024,
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"output_file": str(output_path),
|
||||
"total_count": total_count,
|
||||
}
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.error("输出目录不存在: %s", e)
|
||||
raise
|
||||
except PermissionError as e:
|
||||
logger.error("文件写入权限不足: %s", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("导出 Endpoint URL 失败: %s", e)
|
||||
raise
|
||||
@@ -1,54 +0,0 @@
|
||||
"""
|
||||
工作空间相关的 Prefect Tasks
|
||||
|
||||
负责扫描工作空间的创建、验证和管理
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from prefect import task
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(
|
||||
name="create_scan_workspace",
|
||||
description="创建并验证 Scan 工作空间目录",
|
||||
retries=2,
|
||||
retry_delay_seconds=5
|
||||
)
|
||||
def create_scan_workspace_task(scan_workspace_dir: str) -> Path:
|
||||
"""
|
||||
创建并验证 Scan 工作空间目录
|
||||
|
||||
Args:
|
||||
scan_workspace_dir: Scan 工作空间目录路径
|
||||
|
||||
Returns:
|
||||
Path: 创建的 Scan 工作空间路径对象
|
||||
|
||||
Raises:
|
||||
OSError: 目录创建失败或不可写
|
||||
"""
|
||||
scan_workspace_path = Path(scan_workspace_dir)
|
||||
|
||||
# 创建目录
|
||||
try:
|
||||
scan_workspace_path.mkdir(parents=True, exist_ok=True)
|
||||
logger.info("✓ Scan 工作空间已创建: %s", scan_workspace_path)
|
||||
except OSError as e:
|
||||
logger.error("创建 Scan 工作空间失败: %s - %s", scan_workspace_dir, e)
|
||||
raise
|
||||
|
||||
# 验证目录是否可写
|
||||
test_file = scan_workspace_path / ".test_write"
|
||||
try:
|
||||
test_file.touch()
|
||||
test_file.unlink()
|
||||
logger.info("✓ Scan 工作空间验证通过(可写): %s", scan_workspace_path)
|
||||
except OSError as e:
|
||||
error_msg = f"Scan 工作空间不可写: {scan_workspace_path}"
|
||||
logger.error(error_msg)
|
||||
raise OSError(error_msg) from e
|
||||
|
||||
return scan_workspace_path
|
||||
@@ -1,33 +0,0 @@
|
||||
"""
|
||||
扫描模块工具包
|
||||
|
||||
提供扫描相关的工具函数。
|
||||
"""
|
||||
|
||||
from .directory_cleanup import remove_directory
|
||||
from .command_builder import build_scan_command
|
||||
from .command_executor import execute_and_wait, execute_stream
|
||||
from .wordlist_helpers import ensure_wordlist_local
|
||||
from .nuclei_helpers import ensure_nuclei_templates_local
|
||||
from .performance import FlowPerformanceTracker, CommandPerformanceTracker
|
||||
from . import config_parser
|
||||
|
||||
__all__ = [
|
||||
# 目录清理
|
||||
'remove_directory',
|
||||
# 命令构建
|
||||
'build_scan_command', # 扫描工具命令构建(基于 f-string)
|
||||
# 命令执行
|
||||
'execute_and_wait', # 等待式执行(文件输出)
|
||||
'execute_stream', # 流式执行(实时处理)
|
||||
# 字典文件
|
||||
'ensure_wordlist_local', # 确保本地字典文件(含 hash 校验)
|
||||
# Nuclei 模板
|
||||
'ensure_nuclei_templates_local', # 确保本地模板(含 commit hash 校验)
|
||||
# 性能监控
|
||||
'FlowPerformanceTracker', # Flow 性能追踪器(含系统资源采样)
|
||||
'CommandPerformanceTracker', # 命令性能追踪器
|
||||
# 配置解析
|
||||
'config_parser',
|
||||
]
|
||||
|
||||
@@ -1,421 +0,0 @@
|
||||
from rest_framework import viewsets, status
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import NotFound, APIException
|
||||
from rest_framework.filters import SearchFilter
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db.utils import DatabaseError, IntegrityError, OperationalError
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from ..models import Scan, ScheduledScan
|
||||
from ..serializers import (
|
||||
ScanSerializer, ScanHistorySerializer, QuickScanSerializer,
|
||||
ScheduledScanSerializer, CreateScheduledScanSerializer,
|
||||
UpdateScheduledScanSerializer, ToggleScheduledScanSerializer
|
||||
)
|
||||
from ..services.scan_service import ScanService
|
||||
from ..services.scheduled_scan_service import ScheduledScanService
|
||||
from ..repositories import ScheduledScanDTO
|
||||
from apps.targets.services.target_service import TargetService
|
||||
from apps.targets.services.organization_service import OrganizationService
|
||||
from apps.engine.services.engine_service import EngineService
|
||||
from apps.common.definitions import ScanStatus
|
||||
from apps.common.pagination import BasePagination
|
||||
|
||||
|
||||
class ScanViewSet(viewsets.ModelViewSet):
|
||||
"""扫描任务视图集"""
|
||||
serializer_class = ScanSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [SearchFilter]
|
||||
search_fields = ['target__name'] # 按目标名称搜索
|
||||
|
||||
def get_queryset(self):
|
||||
"""优化查询集,提升API性能
|
||||
|
||||
查询优化策略:
|
||||
- select_related: 预加载 target 和 engine(一对一/多对一关系,使用 JOIN)
|
||||
- 移除 prefetch_related: 避免加载大量资产数据到内存
|
||||
- order_by: 按创建时间降序排列(最新创建的任务排在最前面)
|
||||
|
||||
性能优化原理:
|
||||
- 列表页:使用缓存统计字段(cached_*_count),避免实时 COUNT 查询
|
||||
- 序列化器:严格验证缓存字段,确保数据一致性
|
||||
- 分页场景:每页只显示10条记录,查询高效
|
||||
- 避免大数据加载:不再预加载所有关联的资产数据
|
||||
"""
|
||||
# 只保留必要的 select_related,移除所有 prefetch_related
|
||||
scan_service = ScanService()
|
||||
queryset = scan_service.get_all_scans(prefetch_relations=True)
|
||||
|
||||
return queryset
|
||||
|
||||
def get_serializer_class(self):
|
||||
"""根据不同的 action 返回不同的序列化器
|
||||
|
||||
- list action: 使用 ScanHistorySerializer(包含 summary 和 progress)
|
||||
- retrieve action: 使用 ScanHistorySerializer(包含 summary 和 progress)
|
||||
- 其他 action: 使用标准的 ScanSerializer
|
||||
"""
|
||||
if self.action in ['list', 'retrieve']:
|
||||
return ScanHistorySerializer
|
||||
return ScanSerializer
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
"""
|
||||
删除单个扫描任务(两阶段删除)
|
||||
|
||||
1. 软删除:立即对用户不可见
|
||||
2. 硬删除:后台异步执行
|
||||
"""
|
||||
try:
|
||||
scan = self.get_object()
|
||||
scan_service = ScanService()
|
||||
result = scan_service.delete_scans_two_phase([scan.id])
|
||||
|
||||
return Response({
|
||||
'message': f'已删除扫描任务: Scan #{scan.id}',
|
||||
'scanId': scan.id,
|
||||
'deletedCount': result['soft_deleted_count'],
|
||||
'deletedScans': result['scan_names'],
|
||||
'detail': {
|
||||
'phase1': '软删除完成,用户已看不到数据',
|
||||
'phase2': '硬删除任务已分发,将在后台执行'
|
||||
}
|
||||
}, status=status.HTTP_200_OK)
|
||||
|
||||
except Scan.DoesNotExist:
|
||||
raise NotFound('扫描任务不存在')
|
||||
except ValueError as e:
|
||||
raise NotFound(str(e))
|
||||
except Exception as e:
|
||||
logger.exception("删除扫描任务时发生错误")
|
||||
raise APIException('服务器错误,请稍后重试')
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
def quick(self, request):
|
||||
"""
|
||||
快速扫描接口
|
||||
|
||||
功能:
|
||||
1. 接收目标列表和引擎配置
|
||||
2. 自动批量创建/获取目标
|
||||
3. 立即发起批量扫描
|
||||
|
||||
请求参数:
|
||||
{
|
||||
"targets": [{"name": "example.com"}, {"name": "1.1.1.1"}],
|
||||
"engine_id": 1
|
||||
}
|
||||
"""
|
||||
serializer = QuickScanSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
targets_data = serializer.validated_data['targets']
|
||||
engine_id = serializer.validated_data.get('engine_id')
|
||||
|
||||
try:
|
||||
# 1. 批量创建/获取目标
|
||||
target_service = TargetService()
|
||||
batch_result = target_service.batch_create_targets(
|
||||
targets_data=targets_data,
|
||||
organization_id=None # 快速扫描不关联组织
|
||||
)
|
||||
|
||||
# 收集所有目标对象(包括新创建和已存在的)
|
||||
# batch_create_targets 返回的是统计信息,我们需要获取目标对象列表
|
||||
# 这里重新查询刚刚创建/获取的目标
|
||||
target_names = [t['name'] for t in targets_data]
|
||||
targets = target_service.get_targets_by_names(target_names)
|
||||
|
||||
if not targets:
|
||||
return Response(
|
||||
{'error': '没有有效的目标可供扫描'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# 2. 获取扫描引擎
|
||||
engine_service = EngineService()
|
||||
engine = engine_service.get_engine(engine_id)
|
||||
if not engine:
|
||||
raise ValidationError(f'扫描引擎 ID {engine_id} 不存在')
|
||||
|
||||
# 3. 批量发起扫描
|
||||
scan_service = ScanService()
|
||||
created_scans = scan_service.create_scans(
|
||||
targets=targets,
|
||||
engine=engine
|
||||
)
|
||||
|
||||
# 序列化返回结果
|
||||
scan_serializer = ScanSerializer(created_scans, many=True)
|
||||
|
||||
return Response({
|
||||
'message': f'快速扫描已启动:{len(created_scans)} 个任务',
|
||||
'target_stats': {
|
||||
'created': batch_result['created_count'],
|
||||
'failed': batch_result['failed_count']
|
||||
},
|
||||
'scans': scan_serializer.data
|
||||
}, status=status.HTTP_201_CREATED)
|
||||
|
||||
except ValidationError as e:
|
||||
return Response({'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as e:
|
||||
logger.exception("快速扫描启动失败")
|
||||
return Response(
|
||||
{'error': '服务器内部错误,请稍后重试'},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
@action(detail=False, methods=['post'])
|
||||
def initiate(self, request):
|
||||
"""
|
||||
发起扫描任务
|
||||
|
||||
请求参数:
|
||||
- organization_id: 组织ID (int, 可选)
|
||||
- target_id: 目标ID (int, 可选)
|
||||
- engine_id: 扫描引擎ID (int, 必填)
|
||||
|
||||
注意: organization_id 和 target_id 二选一
|
||||
|
||||
返回:
|
||||
- 扫描任务详情(单个或多个)
|
||||
"""
|
||||
# 获取请求数据
|
||||
organization_id = request.data.get('organization_id')
|
||||
target_id = request.data.get('target_id')
|
||||
engine_id = request.data.get('engine_id')
|
||||
|
||||
try:
|
||||
# 步骤1:准备扫描所需的数据(验证参数、查询资源、返回目标列表和引擎)
|
||||
scan_service = ScanService()
|
||||
targets, engine = scan_service.prepare_initiate_scan(
|
||||
organization_id=organization_id,
|
||||
target_id=target_id,
|
||||
engine_id=engine_id
|
||||
)
|
||||
|
||||
# 步骤2:批量创建扫描记录并分发扫描任务
|
||||
created_scans = scan_service.create_scans(
|
||||
targets=targets,
|
||||
engine=engine
|
||||
)
|
||||
|
||||
# 序列化返回结果
|
||||
scan_serializer = ScanSerializer(created_scans, many=True)
|
||||
|
||||
return Response(
|
||||
{
|
||||
'message': f'已成功发起 {len(created_scans)} 个扫描任务',
|
||||
'count': len(created_scans),
|
||||
'scans': scan_serializer.data
|
||||
},
|
||||
status=status.HTTP_201_CREATED
|
||||
)
|
||||
|
||||
except ObjectDoesNotExist as e:
|
||||
# 资源不存在错误(由 service 层抛出)
|
||||
error_msg = str(e)
|
||||
return Response(
|
||||
{'error': error_msg},
|
||||
status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
# 参数验证错误(由 service 层抛出)
|
||||
return Response(
|
||||
{'error': str(e)},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
except (DatabaseError, IntegrityError, OperationalError):
|
||||
# 数据库错误
|
||||
return Response(
|
||||
{'error': '数据库错误,请稍后重试'},
|
||||
status=status.HTTP_503_SERVICE_UNAVAILABLE
|
||||
)
|
||||
|
||||
# 所有快照相关的 action 和 export 已迁移到 asset/views.py 中的快照 ViewSet
|
||||
# GET /api/scans/{id}/subdomains/ -> SubdomainSnapshotViewSet
|
||||
# GET /api/scans/{id}/subdomains/export/ -> SubdomainSnapshotViewSet.export
|
||||
# GET /api/scans/{id}/websites/ -> WebsiteSnapshotViewSet
|
||||
# GET /api/scans/{id}/websites/export/ -> WebsiteSnapshotViewSet.export
|
||||
# GET /api/scans/{id}/directories/ -> DirectorySnapshotViewSet
|
||||
# GET /api/scans/{id}/directories/export/ -> DirectorySnapshotViewSet.export
|
||||
# GET /api/scans/{id}/endpoints/ -> EndpointSnapshotViewSet
|
||||
# GET /api/scans/{id}/endpoints/export/ -> EndpointSnapshotViewSet.export
|
||||
# GET /api/scans/{id}/ip-addresses/ -> HostPortMappingSnapshotViewSet
|
||||
# GET /api/scans/{id}/ip-addresses/export/ -> HostPortMappingSnapshotViewSet.export
|
||||
# GET /api/scans/{id}/vulnerabilities/ -> VulnerabilitySnapshotViewSet
|
||||
|
||||
@action(detail=False, methods=['post', 'delete'], url_path='bulk-delete')
|
||||
def bulk_delete(self, request):
|
||||
"""
|
||||
批量删除扫描记录
|
||||
|
||||
请求参数:
|
||||
- ids: 扫描ID列表 (list[int], 必填)
|
||||
|
||||
示例请求:
|
||||
POST /api/scans/bulk-delete/
|
||||
{
|
||||
"ids": [1, 2, 3]
|
||||
}
|
||||
|
||||
返回:
|
||||
- message: 成功消息
|
||||
- deletedCount: 实际删除的记录数
|
||||
|
||||
注意:
|
||||
- 使用级联删除,会同时删除关联的子域名、端点等数据
|
||||
- 只删除存在的记录,不存在的ID会被忽略
|
||||
"""
|
||||
ids = request.data.get('ids', [])
|
||||
|
||||
# 参数验证
|
||||
if not ids:
|
||||
return Response(
|
||||
{'error': '缺少必填参数: ids'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
if not isinstance(ids, list):
|
||||
return Response(
|
||||
{'error': 'ids 必须是数组'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
if not all(isinstance(i, int) for i in ids):
|
||||
return Response(
|
||||
{'error': 'ids 数组中的所有元素必须是整数'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
# 使用 Service 层批量删除(两阶段删除)
|
||||
scan_service = ScanService()
|
||||
result = scan_service.delete_scans_two_phase(ids)
|
||||
|
||||
return Response({
|
||||
'message': f"已删除 {result['soft_deleted_count']} 个扫描任务",
|
||||
'deletedCount': result['soft_deleted_count'],
|
||||
'deletedScans': result['scan_names'],
|
||||
'detail': {
|
||||
'phase1': '软删除完成,用户已看不到数据',
|
||||
'phase2': '硬删除任务已分发,将在后台执行'
|
||||
}
|
||||
}, status=status.HTTP_200_OK)
|
||||
|
||||
except ValueError as e:
|
||||
# 未找到记录
|
||||
raise NotFound(str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("批量删除扫描任务时发生错误")
|
||||
raise APIException('服务器错误,请稍后重试')
|
||||
|
||||
@action(detail=False, methods=['get'])
|
||||
def statistics(self, request):
|
||||
"""
|
||||
获取扫描统计数据
|
||||
|
||||
返回扫描任务的汇总统计信息,用于仪表板和扫描历史页面。
|
||||
使用缓存字段聚合查询,性能优异。
|
||||
|
||||
返回:
|
||||
- total: 总扫描次数
|
||||
- running: 运行中的扫描数量
|
||||
- completed: 已完成的扫描数量
|
||||
- failed: 失败的扫描数量
|
||||
- totalVulns: 总共发现的漏洞数量
|
||||
- totalSubdomains: 总共发现的子域名数量
|
||||
- totalEndpoints: 总共发现的端点数量
|
||||
- totalAssets: 总资产数
|
||||
"""
|
||||
try:
|
||||
# 使用 Service 层获取统计数据
|
||||
scan_service = ScanService()
|
||||
stats = scan_service.get_statistics()
|
||||
|
||||
return Response({
|
||||
'total': stats['total'],
|
||||
'running': stats['running'],
|
||||
'completed': stats['completed'],
|
||||
'failed': stats['failed'],
|
||||
'totalVulns': stats['total_vulns'],
|
||||
'totalSubdomains': stats['total_subdomains'],
|
||||
'totalEndpoints': stats['total_endpoints'],
|
||||
'totalWebsites': stats['total_websites'],
|
||||
'totalAssets': stats['total_assets'],
|
||||
})
|
||||
|
||||
except (DatabaseError, OperationalError):
|
||||
return Response(
|
||||
{'error': '数据库错误,请稍后重试'},
|
||||
status=status.HTTP_503_SERVICE_UNAVAILABLE
|
||||
)
|
||||
|
||||
@action(detail=True, methods=['post'])
|
||||
def stop(self, request, pk=None): # pylint: disable=unused-argument
|
||||
"""
|
||||
停止扫描任务
|
||||
|
||||
URL: POST /api/scans/{id}/stop/
|
||||
|
||||
功能:
|
||||
- 终止正在运行或初始化的扫描任务
|
||||
- 更新扫描状态为 CANCELLED
|
||||
|
||||
状态限制:
|
||||
- 只能停止 RUNNING 或 INITIATED 状态的扫描
|
||||
- 已完成、失败或取消的扫描无法停止
|
||||
|
||||
返回:
|
||||
- message: 成功消息
|
||||
- revokedTaskCount: 取消的 Flow Run 数量
|
||||
"""
|
||||
try:
|
||||
# 使用 Service 层处理停止逻辑
|
||||
scan_service = ScanService()
|
||||
success, revoked_count = scan_service.stop_scan(scan_id=pk)
|
||||
|
||||
if not success:
|
||||
# 检查是否是状态不允许的问题
|
||||
scan = scan_service.get_scan(scan_id=pk, prefetch_relations=False)
|
||||
if scan and scan.status not in [ScanStatus.RUNNING, ScanStatus.INITIATED]:
|
||||
return Response(
|
||||
{
|
||||
'error': f'无法停止扫描:当前状态为 {ScanStatus(scan.status).label}',
|
||||
'detail': '只能停止运行中或初始化状态的扫描'
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
# 其他失败原因
|
||||
return Response(
|
||||
{'error': '停止扫描失败'},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
'message': f'扫描已停止,已撤销 {revoked_count} 个任务',
|
||||
'revokedTaskCount': revoked_count
|
||||
},
|
||||
status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
except ObjectDoesNotExist:
|
||||
return Response(
|
||||
{'error': f'扫描 ID {pk} 不存在'},
|
||||
status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
except (DatabaseError, IntegrityError, OperationalError):
|
||||
return Response(
|
||||
{'error': '数据库错误,请稍后重试'},
|
||||
status=status.HTTP_503_SERVICE_UNAVAILABLE
|
||||
)
|
||||
@@ -1,27 +0,0 @@
|
||||
[tool.pytest.ini_options]
|
||||
DJANGO_SETTINGS_MODULE = "config.settings"
|
||||
python_files = ["test_*.py", "*_test.py"]
|
||||
python_classes = ["Test*"]
|
||||
python_functions = ["test_*"]
|
||||
testpaths = ["apps"]
|
||||
addopts = "-v --reuse-db"
|
||||
|
||||
[tool.pylint]
|
||||
django-settings-module = "config.settings"
|
||||
load-plugins = "pylint_django"
|
||||
|
||||
[tool.pylint.messages_control]
|
||||
disable = [
|
||||
"missing-docstring",
|
||||
"invalid-name",
|
||||
"too-few-public-methods",
|
||||
"no-member",
|
||||
"import-error",
|
||||
"no-name-in-module",
|
||||
]
|
||||
|
||||
[tool.pylint.format]
|
||||
max-line-length = 120
|
||||
|
||||
[tool.pylint.basic]
|
||||
good-names = ["i", "j", "k", "ex", "Run", "_", "id", "pk", "ip", "url", "db", "qs"]
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 95 KiB |
@@ -4,27 +4,27 @@ import { VulnSeverityChart } from "@/components/dashboard/vuln-severity-chart"
|
||||
import { DashboardDataTable } from "@/components/dashboard/dashboard-data-table"
|
||||
|
||||
/**
|
||||
* 仪表板页面组件
|
||||
* 这是应用的主要仪表板页面,包含卡片、图表和数据表格
|
||||
* 布局结构已移至根布局组件中
|
||||
* Dashboard page component
|
||||
* This is the main dashboard page of the application, containing cards, charts and data tables
|
||||
* Layout structure has been moved to the root layout component
|
||||
*/
|
||||
export default function Page() {
|
||||
return (
|
||||
// 内容区域,包含卡片、图表和数据表格
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* 顶部统计卡片 */}
|
||||
// Content area containing cards, charts and data tables
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6 animate-dashboard-fade-in">
|
||||
{/* Top statistics cards */}
|
||||
<DashboardStatCards />
|
||||
|
||||
{/* 图表区域 - 趋势图 + 漏洞分布 */}
|
||||
{/* Chart area - Trend chart + Vulnerability distribution */}
|
||||
<div className="grid gap-4 px-4 lg:px-6 @xl/main:grid-cols-2">
|
||||
{/* 资产趋势折线图 */}
|
||||
{/* Asset trend line chart */}
|
||||
<AssetTrendChart />
|
||||
|
||||
{/* 漏洞严重程度分布 */}
|
||||
{/* Vulnerability severity distribution */}
|
||||
<VulnSeverityChart />
|
||||
</div>
|
||||
|
||||
{/* 漏洞 / 扫描历史 Tab */}
|
||||
{/* Vulnerabilities / Scan history tab */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<DashboardDataTable />
|
||||
</div>
|
||||
139
frontend/app/[locale]/layout.tsx
Normal file
139
frontend/app/[locale]/layout.tsx
Normal file
@@ -0,0 +1,139 @@
|
||||
import type React from "react"
|
||||
import type { Metadata } from "next"
|
||||
import { NextIntlClientProvider } from 'next-intl'
|
||||
import { getMessages, setRequestLocale, getTranslations } from 'next-intl/server'
|
||||
import { notFound } from 'next/navigation'
|
||||
import { locales, localeHtmlLang, type Locale } from '@/i18n/config'
|
||||
|
||||
// Import global style files
|
||||
import "../globals.css"
|
||||
// Import Noto Sans SC local font
|
||||
import "@fontsource/noto-sans-sc/400.css"
|
||||
import "@fontsource/noto-sans-sc/500.css"
|
||||
import "@fontsource/noto-sans-sc/700.css"
|
||||
// Import color themes
|
||||
import "@/styles/themes/bubblegum.css"
|
||||
import "@/styles/themes/quantum-rose.css"
|
||||
import "@/styles/themes/clean-slate.css"
|
||||
import "@/styles/themes/cosmic-night.css"
|
||||
import "@/styles/themes/vercel.css"
|
||||
import "@/styles/themes/vercel-dark.css"
|
||||
import "@/styles/themes/violet-bloom.css"
|
||||
import "@/styles/themes/cyberpunk-1.css"
|
||||
import { Suspense } from "react"
|
||||
import Script from "next/script"
|
||||
import { QueryProvider } from "@/components/providers/query-provider"
|
||||
import { ThemeProvider } from "@/components/providers/theme-provider"
|
||||
import { UiI18nProvider } from "@/components/providers/ui-i18n-provider"
|
||||
|
||||
// Import common layout components
|
||||
import { RoutePrefetch } from "@/components/route-prefetch"
|
||||
import { RouteProgress } from "@/components/route-progress"
|
||||
import { AuthLayout } from "@/components/auth/auth-layout"
|
||||
|
||||
// Dynamically generate metadata
|
||||
export async function generateMetadata({ params }: { params: Promise<{ locale: string }> }): Promise<Metadata> {
|
||||
const { locale } = await params
|
||||
const t = await getTranslations({ locale, namespace: 'metadata' })
|
||||
|
||||
return {
|
||||
title: t('title'),
|
||||
description: t('description'),
|
||||
keywords: t('keywords').split(',').map(k => k.trim()),
|
||||
generator: "Orbit ASM Platform",
|
||||
authors: [{ name: "yyhuni" }],
|
||||
icons: {
|
||||
icon: [{ url: "/icon.svg", type: "image/svg+xml" }],
|
||||
},
|
||||
openGraph: {
|
||||
title: t('ogTitle'),
|
||||
description: t('ogDescription'),
|
||||
type: "website",
|
||||
locale: locale === 'zh' ? 'zh_CN' : 'en_US',
|
||||
},
|
||||
robots: {
|
||||
index: true,
|
||||
follow: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Use Noto Sans SC + system font fallback, fully loaded locally
|
||||
const fontConfig = {
|
||||
className: "font-sans",
|
||||
style: {
|
||||
fontFamily: "'Noto Sans SC', system-ui, -apple-system, PingFang SC, Hiragino Sans GB, Microsoft YaHei, sans-serif"
|
||||
}
|
||||
}
|
||||
|
||||
// Generate static parameters, support all languages
|
||||
export function generateStaticParams() {
|
||||
return locales.map((locale) => ({ locale }))
|
||||
}
|
||||
|
||||
interface Props {
|
||||
children: React.ReactNode
|
||||
params: Promise<{ locale: string }>
|
||||
}
|
||||
|
||||
/**
|
||||
* Language layout component
|
||||
* Wraps all pages, provides internationalization context
|
||||
*/
|
||||
export default async function LocaleLayout({
|
||||
children,
|
||||
params,
|
||||
}: Props) {
|
||||
const { locale } = await params
|
||||
|
||||
// Validate locale validity
|
||||
if (!locales.includes(locale as Locale)) {
|
||||
notFound()
|
||||
}
|
||||
|
||||
// Enable static rendering
|
||||
setRequestLocale(locale)
|
||||
|
||||
// Load translation messages
|
||||
const messages = await getMessages()
|
||||
|
||||
return (
|
||||
<html lang={localeHtmlLang[locale as Locale]} suppressHydrationWarning>
|
||||
<body className={fontConfig.className} style={fontConfig.style}>
|
||||
{/* Load external scripts */}
|
||||
<Script
|
||||
src="https://tweakcn.com/live-preview.min.js"
|
||||
strategy="beforeInteractive"
|
||||
crossOrigin="anonymous"
|
||||
/>
|
||||
{/* Route loading progress bar */}
|
||||
<Suspense fallback={null}>
|
||||
<RouteProgress />
|
||||
</Suspense>
|
||||
{/* ThemeProvider provides theme switching functionality */}
|
||||
<ThemeProvider
|
||||
attribute="class"
|
||||
defaultTheme="dark"
|
||||
enableSystem
|
||||
disableTransitionOnChange
|
||||
>
|
||||
{/* NextIntlClientProvider provides internationalization context */}
|
||||
<NextIntlClientProvider messages={messages}>
|
||||
{/* QueryProvider provides React Query functionality */}
|
||||
<QueryProvider>
|
||||
{/* UiI18nProvider provides UI component translations */}
|
||||
<UiI18nProvider>
|
||||
{/* Route prefetch */}
|
||||
<RoutePrefetch />
|
||||
{/* AuthLayout handles authentication and sidebar display */}
|
||||
<AuthLayout>
|
||||
{children}
|
||||
</AuthLayout>
|
||||
</UiI18nProvider>
|
||||
</QueryProvider>
|
||||
</NextIntlClientProvider>
|
||||
</ThemeProvider>
|
||||
</body>
|
||||
</html>
|
||||
)
|
||||
}
|
||||
28
frontend/app/[locale]/login/layout.tsx
Normal file
28
frontend/app/[locale]/login/layout.tsx
Normal file
@@ -0,0 +1,28 @@
|
||||
import type { Metadata } from "next"
|
||||
import { getTranslations } from "next-intl/server"
|
||||
|
||||
type Props = {
|
||||
params: Promise<{ locale: string }>
|
||||
}
|
||||
|
||||
export async function generateMetadata({ params }: Props): Promise<Metadata> {
|
||||
const { locale } = await params
|
||||
const t = await getTranslations({ locale, namespace: "auth" })
|
||||
|
||||
return {
|
||||
title: t("pageTitle"),
|
||||
description: t("pageDescription"),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Login page layout
|
||||
* Does not include sidebar and header
|
||||
*/
|
||||
export default function LoginLayout({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
return children
|
||||
}
|
||||
228
frontend/app/[locale]/login/page.tsx
Normal file
228
frontend/app/[locale]/login/page.tsx
Normal file
@@ -0,0 +1,228 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { useRouter } from "next/navigation"
|
||||
import { useTranslations } from "next-intl"
|
||||
import { useQueryClient } from "@tanstack/react-query"
|
||||
import dynamic from "next/dynamic"
|
||||
import { LoginBootScreen } from "@/components/auth/login-boot-screen"
|
||||
import { TerminalLogin } from "@/components/ui/terminal-login"
|
||||
import { useLogin, useAuth } from "@/hooks/use-auth"
|
||||
import { vulnerabilityKeys } from "@/hooks/use-vulnerabilities"
|
||||
import { useRoutePrefetch } from "@/hooks/use-route-prefetch"
|
||||
import { getAssetStatistics, getStatisticsHistory } from "@/services/dashboard.service"
|
||||
import { getScans } from "@/services/scan.service"
|
||||
import { VulnerabilityService } from "@/services/vulnerability.service"
|
||||
|
||||
// Dynamic import to avoid SSR issues with WebGL
|
||||
const PixelBlast = dynamic(() => import("@/components/PixelBlast"), { ssr: false })
|
||||
|
||||
const BOOT_SPLASH_MS = 600
|
||||
const BOOT_FADE_MS = 200
|
||||
|
||||
type BootOverlayPhase = "entering" | "visible" | "leaving" | "hidden"
|
||||
|
||||
export default function LoginPage() {
|
||||
// Preload all page components on login page
|
||||
useRoutePrefetch()
|
||||
const router = useRouter()
|
||||
const queryClient = useQueryClient()
|
||||
const { data: auth, isLoading: authLoading } = useAuth()
|
||||
const { mutateAsync: login, isPending } = useLogin()
|
||||
const t = useTranslations("auth.terminal")
|
||||
|
||||
const loginStartedRef = React.useRef(false)
|
||||
const [loginReady, setLoginReady] = React.useState(false)
|
||||
|
||||
const [pixelFirstFrame, setPixelFirstFrame] = React.useState(false)
|
||||
const handlePixelFirstFrame = React.useCallback(() => {
|
||||
setPixelFirstFrame(true)
|
||||
}, [])
|
||||
|
||||
// 提取预加载逻辑为可复用函数
|
||||
const prefetchDashboardData = React.useCallback(async () => {
|
||||
const scansParams = { page: 1, pageSize: 10 }
|
||||
const vulnsParams = { page: 1, pageSize: 10 }
|
||||
|
||||
return Promise.allSettled([
|
||||
queryClient.prefetchQuery({
|
||||
queryKey: ["asset", "statistics"],
|
||||
queryFn: getAssetStatistics,
|
||||
}),
|
||||
queryClient.prefetchQuery({
|
||||
queryKey: ["asset", "statistics", "history", 7],
|
||||
queryFn: () => getStatisticsHistory(7),
|
||||
}),
|
||||
queryClient.prefetchQuery({
|
||||
queryKey: ["scans", scansParams],
|
||||
queryFn: () => getScans(scansParams),
|
||||
}),
|
||||
queryClient.prefetchQuery({
|
||||
queryKey: vulnerabilityKeys.list(vulnsParams),
|
||||
queryFn: () => VulnerabilityService.getAllVulnerabilities(vulnsParams),
|
||||
}),
|
||||
])
|
||||
}, [queryClient])
|
||||
|
||||
// Always show a short splash on entering the login page.
|
||||
const [bootMinDone, setBootMinDone] = React.useState(false)
|
||||
const [bootPhase, setBootPhase] = React.useState<BootOverlayPhase>("entering")
|
||||
|
||||
React.useEffect(() => {
|
||||
setBootMinDone(false)
|
||||
setBootPhase("entering")
|
||||
|
||||
const bootTimer = setTimeout(() => setBootMinDone(true), BOOT_SPLASH_MS)
|
||||
const raf = requestAnimationFrame(() => setBootPhase("visible"))
|
||||
|
||||
return () => {
|
||||
clearTimeout(bootTimer)
|
||||
cancelAnimationFrame(raf)
|
||||
}
|
||||
}, [])
|
||||
|
||||
|
||||
// Start hiding the splash after the minimum time AND auth check completes.
|
||||
// Note: don't schedule the fade-out timer in the same effect where we set `bootPhase`,
|
||||
// otherwise the effect cleanup will cancel the timer when `bootPhase` changes.
|
||||
React.useEffect(() => {
|
||||
if (bootPhase !== "visible") return
|
||||
if (!bootMinDone) return
|
||||
if (authLoading) return
|
||||
if (!pixelFirstFrame) return
|
||||
|
||||
setBootPhase("leaving")
|
||||
}, [authLoading, bootMinDone, bootPhase, pixelFirstFrame])
|
||||
|
||||
React.useEffect(() => {
|
||||
if (bootPhase !== "leaving") return
|
||||
|
||||
const timer = setTimeout(() => setBootPhase("hidden"), BOOT_FADE_MS)
|
||||
return () => clearTimeout(timer)
|
||||
}, [bootPhase])
|
||||
|
||||
// Memoize translations object to avoid recreating on every render
|
||||
const translations = React.useMemo(() => ({
|
||||
title: t("title"),
|
||||
subtitle: t("subtitle"),
|
||||
usernamePrompt: t("usernamePrompt"),
|
||||
passwordPrompt: t("passwordPrompt"),
|
||||
authenticating: t("authenticating"),
|
||||
processing: t("processing"),
|
||||
accessGranted: t("accessGranted"),
|
||||
welcomeMessage: t("welcomeMessage"),
|
||||
authFailed: t("authFailed"),
|
||||
invalidCredentials: t("invalidCredentials"),
|
||||
shortcuts: t("shortcuts"),
|
||||
submit: t("submit"),
|
||||
cancel: t("cancel"),
|
||||
clear: t("clear"),
|
||||
startEnd: t("startEnd"),
|
||||
}), [t])
|
||||
|
||||
// If already logged in, warm up the dashboard, then redirect.
|
||||
React.useEffect(() => {
|
||||
if (authLoading) return
|
||||
if (!auth?.authenticated) return
|
||||
if (loginStartedRef.current) return
|
||||
|
||||
let cancelled = false
|
||||
|
||||
void (async () => {
|
||||
await prefetchDashboardData()
|
||||
|
||||
if (cancelled) return
|
||||
router.replace("/dashboard/")
|
||||
})()
|
||||
|
||||
return () => {
|
||||
cancelled = true
|
||||
}
|
||||
}, [auth?.authenticated, authLoading, prefetchDashboardData, router])
|
||||
|
||||
React.useEffect(() => {
|
||||
if (!loginReady) return
|
||||
router.replace("/dashboard/")
|
||||
}, [loginReady, router])
|
||||
|
||||
const handleLogin = async (username: string, password: string) => {
|
||||
loginStartedRef.current = true
|
||||
setLoginReady(false)
|
||||
|
||||
// 并行执行独立操作:登录验证 + 预加载 dashboard bundle
|
||||
const [loginRes] = await Promise.all([
|
||||
login({ username, password }),
|
||||
router.prefetch("/dashboard/"),
|
||||
])
|
||||
|
||||
// 预加载 dashboard 数据
|
||||
await prefetchDashboardData()
|
||||
|
||||
// Prime auth cache so AuthLayout doesn't flash a full-screen loading state.
|
||||
queryClient.setQueryData(["auth", "me"], {
|
||||
authenticated: true,
|
||||
user: loginRes.user,
|
||||
})
|
||||
|
||||
setLoginReady(true)
|
||||
}
|
||||
|
||||
const loginVisible = bootPhase === "leaving" || bootPhase === "hidden"
|
||||
|
||||
return (
|
||||
<div className="relative flex min-h-svh flex-col bg-black">
|
||||
<div className={`fixed inset-0 z-0 transition-opacity duration-300 ${loginVisible ? "opacity-100" : "opacity-0"}`}>
|
||||
<PixelBlast
|
||||
onFirstFrame={handlePixelFirstFrame}
|
||||
className=""
|
||||
style={{}}
|
||||
pixelSize={6.5}
|
||||
patternScale={4.5}
|
||||
color="#FF10F0"
|
||||
speed={0.35}
|
||||
enableRipples={false}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Fingerprint identifier - for FOFA/Shodan and other search engines to identify */}
|
||||
<meta name="generator" content="Orbit ASM Platform" />
|
||||
|
||||
{/* Main content area */}
|
||||
<div
|
||||
className={`relative z-10 flex-1 flex items-center justify-center p-6 transition-[opacity,transform] duration-300 ${
|
||||
loginVisible ? "opacity-100 translate-y-0" : "opacity-0 translate-y-2"
|
||||
}`}
|
||||
>
|
||||
<TerminalLogin
|
||||
onLogin={handleLogin}
|
||||
authDone={loginReady}
|
||||
isPending={isPending}
|
||||
translations={translations}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Version number - fixed at the bottom of the page */}
|
||||
<div
|
||||
className={`relative z-10 flex-shrink-0 text-center py-4 transition-opacity duration-300 ${
|
||||
loginVisible ? "opacity-100" : "opacity-0"
|
||||
}`}
|
||||
>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{process.env.NEXT_PUBLIC_VERSION || "dev"}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Full-page splash overlay */}
|
||||
{bootPhase !== "hidden" && (
|
||||
<div
|
||||
className={`fixed inset-0 z-50 transition-opacity ease-out ${
|
||||
bootPhase === "visible" ? "opacity-100" : "opacity-0 pointer-events-none"
|
||||
}`}
|
||||
style={{ transitionDuration: `${BOOT_FADE_MS}ms` }}
|
||||
>
|
||||
<LoginBootScreen />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -4,8 +4,8 @@ import React from "react"
|
||||
import { OrganizationDetailView } from "@/components/organization/organization-detail-view"
|
||||
|
||||
/**
|
||||
* 组织详情页面
|
||||
* 显示组织的统计信息和资产列表
|
||||
* Organization detail page
|
||||
* Displays organization statistics and asset list
|
||||
*/
|
||||
export default function OrganizationDetailPage({
|
||||
params,
|
||||
@@ -1,30 +1,35 @@
|
||||
// 导入组织管理组件
|
||||
"use client"
|
||||
|
||||
// Import organization management component
|
||||
import { OrganizationList } from "@/components/organization/organization-list"
|
||||
// 导入图标
|
||||
// Import icons
|
||||
import { Building2 } from "lucide-react"
|
||||
import { useTranslations } from "next-intl"
|
||||
|
||||
/**
|
||||
* 组织管理页面
|
||||
* 资产管理下的组织管理子页面,显示组织列表和相关操作
|
||||
* Organization management page
|
||||
* Sub-page under asset management that displays organization list and related operations
|
||||
*/
|
||||
export default function OrganizationPage() {
|
||||
const t = useTranslations("pages.organization")
|
||||
|
||||
return (
|
||||
// 内容区域,包含组织管理功能
|
||||
// Content area containing organization management features
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* 页面头部 */}
|
||||
{/* Page header */}
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<div>
|
||||
<h2 className="text-2xl font-bold tracking-tight flex items-center gap-2">
|
||||
<Building2 />
|
||||
组织
|
||||
{t("title")}
|
||||
</h2>
|
||||
<p className="text-muted-foreground">
|
||||
管理和查看系统中的所有组织信息
|
||||
{t("description")}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 组织列表组件 */}
|
||||
{/* Organization list component */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<OrganizationList />
|
||||
</div>
|
||||
7
frontend/app/[locale]/page.tsx
Normal file
7
frontend/app/[locale]/page.tsx
Normal file
@@ -0,0 +1,7 @@
|
||||
import { redirect } from 'next/navigation';
|
||||
import { defaultLocale } from '@/i18n/config';
|
||||
|
||||
export default function Home() {
|
||||
// Redirect directly to dashboard page (with language prefix)
|
||||
redirect(`/${defaultLocale}/dashboard/`);
|
||||
}
|
||||
@@ -4,7 +4,8 @@ import React, { useState, useMemo } from "react"
|
||||
import { Settings, Search, Pencil, Trash2, Check, X, Plus } from "lucide-react"
|
||||
import * as yaml from "js-yaml"
|
||||
import Editor from "@monaco-editor/react"
|
||||
import { useTheme } from "next-themes"
|
||||
import { useTranslations } from "next-intl"
|
||||
import { useColorTheme } from "@/hooks/use-color-theme"
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { Input } from "@/components/ui/input"
|
||||
import { Badge } from "@/components/ui/badge"
|
||||
@@ -26,25 +27,29 @@ import { cn } from "@/lib/utils"
|
||||
import type { ScanEngine } from "@/types/engine.types"
|
||||
import { MasterDetailSkeleton } from "@/components/ui/master-detail-skeleton"
|
||||
|
||||
/** 功能配置项定义 - 与 YAML 配置结构对应 */
|
||||
/** Feature configuration item definition - corresponds to YAML configuration structure */
|
||||
const FEATURE_LIST = [
|
||||
{ key: "subdomain_discovery", label: "子域名发现" },
|
||||
{ key: "port_scan", label: "端口扫描" },
|
||||
{ key: "site_scan", label: "站点扫描" },
|
||||
{ key: "directory_scan", label: "目录扫描" },
|
||||
{ key: "url_fetch", label: "URL 抓取" },
|
||||
{ key: "vuln_scan", label: "漏洞扫描" },
|
||||
{ key: "subdomain_discovery" },
|
||||
{ key: "port_scan" },
|
||||
{ key: "site_scan" },
|
||||
{ key: "fingerprint_detect" },
|
||||
{ key: "directory_scan" },
|
||||
{ key: "screenshot" },
|
||||
{ key: "url_fetch" },
|
||||
{ key: "vuln_scan" },
|
||||
] as const
|
||||
|
||||
type FeatureKey = typeof FEATURE_LIST[number]["key"]
|
||||
|
||||
/** 解析引擎配置获取启用的功能 */
|
||||
/** Parse engine configuration to get enabled features */
|
||||
function parseEngineFeatures(engine: ScanEngine): Record<FeatureKey, boolean> {
|
||||
const defaultFeatures: Record<FeatureKey, boolean> = {
|
||||
subdomain_discovery: false,
|
||||
port_scan: false,
|
||||
site_scan: false,
|
||||
fingerprint_detect: false,
|
||||
directory_scan: false,
|
||||
screenshot: false,
|
||||
url_fetch: false,
|
||||
vuln_scan: false,
|
||||
}
|
||||
@@ -59,7 +64,9 @@ function parseEngineFeatures(engine: ScanEngine): Record<FeatureKey, boolean> {
|
||||
subdomain_discovery: !!config.subdomain_discovery,
|
||||
port_scan: !!config.port_scan,
|
||||
site_scan: !!config.site_scan,
|
||||
fingerprint_detect: !!config.fingerprint_detect,
|
||||
directory_scan: !!config.directory_scan,
|
||||
screenshot: !!config.screenshot,
|
||||
url_fetch: !!config.url_fetch,
|
||||
vuln_scan: !!config.vuln_scan,
|
||||
}
|
||||
@@ -68,14 +75,14 @@ function parseEngineFeatures(engine: ScanEngine): Record<FeatureKey, boolean> {
|
||||
}
|
||||
}
|
||||
|
||||
/** 计算启用的功能数量 */
|
||||
/** Calculate the number of enabled features */
|
||||
function countEnabledFeatures(engine: ScanEngine) {
|
||||
const features = parseEngineFeatures(engine)
|
||||
return Object.values(features).filter(Boolean).length
|
||||
}
|
||||
|
||||
/**
|
||||
* 扫描引擎页面
|
||||
* Scan engine page
|
||||
*/
|
||||
export default function ScanEnginePage() {
|
||||
const [selectedId, setSelectedId] = useState<number | null>(null)
|
||||
@@ -86,7 +93,13 @@ export default function ScanEnginePage() {
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false)
|
||||
const [engineToDelete, setEngineToDelete] = useState<ScanEngine | null>(null)
|
||||
|
||||
const { theme } = useTheme()
|
||||
const { currentTheme } = useColorTheme()
|
||||
|
||||
// Internationalization
|
||||
const tCommon = useTranslations("common")
|
||||
const tConfirm = useTranslations("common.confirm")
|
||||
const tNav = useTranslations("navigation")
|
||||
const tEngine = useTranslations("scan.engine")
|
||||
|
||||
// API Hooks
|
||||
const { data: engines = [], isLoading } = useEngines()
|
||||
@@ -94,20 +107,20 @@ export default function ScanEnginePage() {
|
||||
const updateEngineMutation = useUpdateEngine()
|
||||
const deleteEngineMutation = useDeleteEngine()
|
||||
|
||||
// 过滤引擎列表
|
||||
// Filter engine list
|
||||
const filteredEngines = useMemo(() => {
|
||||
if (!searchQuery.trim()) return engines
|
||||
const query = searchQuery.toLowerCase()
|
||||
return engines.filter((e) => e.name.toLowerCase().includes(query))
|
||||
}, [engines, searchQuery])
|
||||
|
||||
// 选中的引擎
|
||||
// Selected engine
|
||||
const selectedEngine = useMemo(() => {
|
||||
if (!selectedId) return null
|
||||
return engines.find((e) => e.id === selectedId) || null
|
||||
}, [selectedId, engines])
|
||||
|
||||
// 选中引擎的功能状态
|
||||
// Selected engine's feature status
|
||||
const selectedFeatures = useMemo(() => {
|
||||
if (!selectedEngine) return null
|
||||
return parseEngineFeatures(selectedEngine)
|
||||
@@ -150,21 +163,21 @@ export default function ScanEnginePage() {
|
||||
})
|
||||
}
|
||||
|
||||
// 加载状态
|
||||
// Loading state
|
||||
if (isLoading) {
|
||||
return <MasterDetailSkeleton title="扫描引擎" listItemCount={4} />
|
||||
return <MasterDetailSkeleton title={tNav("scanEngine")} listItemCount={4} />
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-full">
|
||||
{/* 顶部:标题 + 搜索 + 新建按钮 */}
|
||||
{/* Top: Title + Search + Create button */}
|
||||
<div className="flex items-center justify-between gap-4 px-4 py-4 lg:px-6">
|
||||
<h1 className="text-2xl font-bold shrink-0">扫描引擎</h1>
|
||||
<h1 className="text-2xl font-bold shrink-0">{tNav("scanEngine")}</h1>
|
||||
<div className="flex items-center gap-2 flex-1 max-w-md">
|
||||
<div className="relative flex-1">
|
||||
<Search className="absolute left-2.5 top-1/2 h-4 w-4 -translate-y-1/2 text-muted-foreground" />
|
||||
<Input
|
||||
placeholder="搜索引擎..."
|
||||
placeholder={tEngine("searchPlaceholder")}
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="pl-8"
|
||||
@@ -173,27 +186,27 @@ export default function ScanEnginePage() {
|
||||
</div>
|
||||
<Button onClick={() => setIsCreateDialogOpen(true)}>
|
||||
<Plus className="h-4 w-4 mr-1" />
|
||||
新建引擎
|
||||
{tEngine("createEngine")}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<Separator />
|
||||
|
||||
{/* 主体:左侧列表 + 右侧详情 */}
|
||||
{/* Main: Left list + Right details */}
|
||||
<div className="flex flex-1 min-h-0">
|
||||
{/* 左侧:引擎列表 */}
|
||||
{/* Left: Engine list */}
|
||||
<div className="w-72 lg:w-80 border-r flex flex-col">
|
||||
<div className="px-4 py-3 border-b">
|
||||
<h2 className="text-sm font-medium text-muted-foreground">
|
||||
引擎列表 ({filteredEngines.length})
|
||||
{tEngine("engineList")} ({filteredEngines.length})
|
||||
</h2>
|
||||
</div>
|
||||
<ScrollArea className="flex-1">
|
||||
{isLoading ? (
|
||||
<div className="p-4 text-sm text-muted-foreground">加载中...</div>
|
||||
<div className="p-4 text-sm text-muted-foreground">{tCommon("loading")}</div>
|
||||
) : filteredEngines.length === 0 ? (
|
||||
<div className="p-4 text-sm text-muted-foreground">
|
||||
{searchQuery ? "未找到匹配的引擎" : "暂无引擎,请先新建"}
|
||||
{searchQuery ? tEngine("noMatchingEngine") : tEngine("noEngines")}
|
||||
</div>
|
||||
) : (
|
||||
<div className="p-2">
|
||||
@@ -212,7 +225,7 @@ export default function ScanEnginePage() {
|
||||
{engine.name}
|
||||
</div>
|
||||
<div className="text-xs text-muted-foreground mt-0.5">
|
||||
{countEnabledFeatures(engine)} 个功能已启用
|
||||
{tEngine("featuresEnabled", { count: countEnabledFeatures(engine) })}
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
@@ -221,11 +234,11 @@ export default function ScanEnginePage() {
|
||||
</ScrollArea>
|
||||
</div>
|
||||
|
||||
{/* 右侧:引擎详情 */}
|
||||
{/* Right: Engine details */}
|
||||
<div className="flex-1 flex flex-col min-w-0">
|
||||
{selectedEngine && selectedFeatures ? (
|
||||
<>
|
||||
{/* 详情头部 */}
|
||||
{/* Details header */}
|
||||
<div className="px-6 py-4 border-b">
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-lg bg-primary/10 shrink-0">
|
||||
@@ -236,20 +249,20 @@ export default function ScanEnginePage() {
|
||||
{selectedEngine.name}
|
||||
</h2>
|
||||
<p className="text-sm text-muted-foreground mt-0.5">
|
||||
更新于 {new Date(selectedEngine.updatedAt).toLocaleString("zh-CN")}
|
||||
{tEngine("updatedAt")} {new Date(selectedEngine.updatedAt).toLocaleString()}
|
||||
</p>
|
||||
</div>
|
||||
<Badge variant="outline">
|
||||
{countEnabledFeatures(selectedEngine)} 个功能
|
||||
{tEngine("featuresCount", { count: countEnabledFeatures(selectedEngine) })}
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 详情内容 */}
|
||||
{/* Details content */}
|
||||
<div className="flex-1 flex flex-col min-h-0 p-6 gap-6">
|
||||
{/* 功能状态 */}
|
||||
{/* Feature status */}
|
||||
<div className="shrink-0">
|
||||
<h3 className="text-sm font-medium mb-3">已启用功能</h3>
|
||||
<h3 className="text-sm font-medium mb-3">{tEngine("enabledFeatures")}</h3>
|
||||
<div className="rounded-lg border">
|
||||
<div className="grid grid-cols-3 gap-px bg-muted">
|
||||
{FEATURE_LIST.map((feature) => {
|
||||
@@ -267,7 +280,7 @@ export default function ScanEnginePage() {
|
||||
) : (
|
||||
<X className="h-4 w-4 text-muted-foreground/50 shrink-0" />
|
||||
)}
|
||||
<span className="text-sm truncate">{feature.label}</span>
|
||||
<span className="text-sm truncate">{tEngine(`features.${feature.key}`)}</span>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
@@ -275,10 +288,10 @@ export default function ScanEnginePage() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 配置预览 */}
|
||||
{/* Configuration preview */}
|
||||
{selectedEngine.configuration && (
|
||||
<div className="flex-1 flex flex-col min-h-0">
|
||||
<h3 className="text-sm font-medium mb-3 shrink-0">配置预览</h3>
|
||||
<h3 className="text-sm font-medium mb-3 shrink-0">{tEngine("configPreview")}</h3>
|
||||
<div className="flex-1 rounded-lg border overflow-hidden min-h-0">
|
||||
<Editor
|
||||
height="100%"
|
||||
@@ -295,14 +308,14 @@ export default function ScanEnginePage() {
|
||||
wordWrap: "on",
|
||||
padding: { top: 12, bottom: 12 },
|
||||
}}
|
||||
theme={theme === "dark" ? "vs-dark" : "light"}
|
||||
theme={currentTheme.isDark ? "vs-dark" : "light"}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* 操作按钮 */}
|
||||
{/* Action buttons */}
|
||||
<div className="px-6 py-4 border-t flex items-center gap-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
@@ -310,7 +323,7 @@ export default function ScanEnginePage() {
|
||||
onClick={() => handleEdit(selectedEngine)}
|
||||
>
|
||||
<Pencil className="h-4 w-4 mr-1.5" />
|
||||
编辑配置
|
||||
{tEngine("editConfig")}
|
||||
</Button>
|
||||
<div className="flex-1" />
|
||||
<Button
|
||||
@@ -321,23 +334,23 @@ export default function ScanEnginePage() {
|
||||
disabled={deleteEngineMutation.isPending}
|
||||
>
|
||||
<Trash2 className="h-4 w-4 mr-1.5" />
|
||||
删除
|
||||
{tCommon("actions.delete")}
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
// 未选中状态
|
||||
// Unselected state
|
||||
<div className="flex-1 flex items-center justify-center">
|
||||
<div className="text-center text-muted-foreground">
|
||||
<Settings className="h-12 w-12 mx-auto mb-3 opacity-50" />
|
||||
<p className="text-sm">选择左侧引擎查看详情</p>
|
||||
<p className="text-sm">{tEngine("selectEngineHint")}</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 编辑引擎弹窗 */}
|
||||
{/* Edit engine dialog */}
|
||||
<EngineEditDialog
|
||||
engine={editingEngine}
|
||||
open={isEditDialogOpen}
|
||||
@@ -345,30 +358,30 @@ export default function ScanEnginePage() {
|
||||
onSave={handleSaveYaml}
|
||||
/>
|
||||
|
||||
{/* 新建引擎弹窗 */}
|
||||
{/* Create engine dialog */}
|
||||
<EngineCreateDialog
|
||||
open={isCreateDialogOpen}
|
||||
onOpenChange={setIsCreateDialogOpen}
|
||||
onSave={handleCreateEngine}
|
||||
/>
|
||||
|
||||
{/* 删除确认弹窗 */}
|
||||
{/* Delete confirmation dialog */}
|
||||
<AlertDialog open={deleteDialogOpen} onOpenChange={setDeleteDialogOpen}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>确认删除</AlertDialogTitle>
|
||||
<AlertDialogTitle>{tConfirm("deleteTitle")}</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
确定要删除引擎「{engineToDelete?.name}」吗?此操作无法撤销。
|
||||
{tConfirm("deleteEngineMessage", { name: engineToDelete?.name ?? "" })}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel>取消</AlertDialogCancel>
|
||||
<AlertDialogCancel>{tCommon("actions.cancel")}</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
onClick={confirmDelete}
|
||||
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
|
||||
disabled={deleteEngineMutation.isPending}
|
||||
>
|
||||
{deleteEngineMutation.isPending ? "删除中..." : "删除"}
|
||||
{deleteEngineMutation.isPending ? tConfirm("deleting") : tCommon("actions.delete")}
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
228
frontend/app/[locale]/scan/history/[id]/layout.tsx
Normal file
228
frontend/app/[locale]/scan/history/[id]/layout.tsx
Normal file
@@ -0,0 +1,228 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { usePathname, useParams } from "next/navigation"
|
||||
import Link from "next/link"
|
||||
import { Target, LayoutDashboard, Package, FolderSearch, Image, ShieldAlert } from "lucide-react"
|
||||
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"
|
||||
import { Badge } from "@/components/ui/badge"
|
||||
import { Skeleton } from "@/components/ui/skeleton"
|
||||
import { useScan } from "@/hooks/use-scans"
|
||||
import { useTranslations } from "next-intl"
|
||||
|
||||
export default function ScanHistoryLayout({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const pathname = usePathname()
|
||||
const { data: scanData, isLoading } = useScan(parseInt(id))
|
||||
const t = useTranslations("scan.history")
|
||||
|
||||
// Get primary navigation active tab
|
||||
const getPrimaryTab = () => {
|
||||
if (pathname.includes("/overview")) return "overview"
|
||||
if (pathname.includes("/directories")) return "directories"
|
||||
if (pathname.includes("/screenshots")) return "screenshots"
|
||||
if (pathname.includes("/vulnerabilities")) return "vulnerabilities"
|
||||
// All asset pages fall under "assets"
|
||||
if (
|
||||
pathname.includes("/websites") ||
|
||||
pathname.includes("/subdomain") ||
|
||||
pathname.includes("/ip-addresses") ||
|
||||
pathname.includes("/endpoints")
|
||||
) {
|
||||
return "assets"
|
||||
}
|
||||
return "overview"
|
||||
}
|
||||
|
||||
// Get secondary navigation active tab (for assets)
|
||||
const getSecondaryTab = () => {
|
||||
if (pathname.includes("/websites")) return "websites"
|
||||
if (pathname.includes("/subdomain")) return "subdomain"
|
||||
if (pathname.includes("/ip-addresses")) return "ip-addresses"
|
||||
if (pathname.includes("/endpoints")) return "endpoints"
|
||||
return "websites"
|
||||
}
|
||||
|
||||
// Check if we should show secondary navigation
|
||||
const showSecondaryNav = getPrimaryTab() === "assets"
|
||||
|
||||
const basePath = `/scan/history/${id}`
|
||||
const primaryPaths = {
|
||||
overview: `${basePath}/overview/`,
|
||||
assets: `${basePath}/websites/`, // Default to websites when clicking assets
|
||||
directories: `${basePath}/directories/`,
|
||||
screenshots: `${basePath}/screenshots/`,
|
||||
vulnerabilities: `${basePath}/vulnerabilities/`,
|
||||
}
|
||||
|
||||
const secondaryPaths = {
|
||||
websites: `${basePath}/websites/`,
|
||||
subdomain: `${basePath}/subdomain/`,
|
||||
"ip-addresses": `${basePath}/ip-addresses/`,
|
||||
endpoints: `${basePath}/endpoints/`,
|
||||
}
|
||||
|
||||
// Get counts for each tab from scan data
|
||||
const stats = scanData?.cachedStats
|
||||
const counts = {
|
||||
subdomain: stats?.subdomainsCount || 0,
|
||||
endpoints: stats?.endpointsCount || 0,
|
||||
websites: stats?.websitesCount || 0,
|
||||
directories: stats?.directoriesCount || 0,
|
||||
screenshots: stats?.screenshotsCount || 0,
|
||||
vulnerabilities: stats?.vulnsTotal || 0,
|
||||
"ip-addresses": stats?.ipsCount || 0,
|
||||
}
|
||||
|
||||
// Calculate total assets count
|
||||
const totalAssets = counts.websites + counts.subdomain + counts["ip-addresses"] + counts.endpoints
|
||||
|
||||
// Loading state
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* Header skeleton */}
|
||||
<div className="flex items-center gap-2 px-4 lg:px-6">
|
||||
<Skeleton className="h-4 w-16" />
|
||||
<span className="text-muted-foreground">/</span>
|
||||
<Skeleton className="h-4 w-32" />
|
||||
</div>
|
||||
{/* Tabs skeleton */}
|
||||
<div className="flex gap-1 px-4 lg:px-6">
|
||||
<Skeleton className="h-9 w-20" />
|
||||
<Skeleton className="h-9 w-20" />
|
||||
<Skeleton className="h-9 w-24" />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6 h-full">
|
||||
{/* Header: Page label + Scan info */}
|
||||
<div className="flex items-center gap-2 text-sm px-4 lg:px-6">
|
||||
<span className="text-muted-foreground">{t("breadcrumb.scanHistory")}</span>
|
||||
<span className="text-muted-foreground">/</span>
|
||||
<span className="font-medium flex items-center gap-1.5">
|
||||
<Target className="h-4 w-4" />
|
||||
{(scanData?.target as any)?.name || t("taskId", { id })}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Primary navigation */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<Tabs value={getPrimaryTab()}>
|
||||
<TabsList>
|
||||
<TabsTrigger value="overview" asChild>
|
||||
<Link href={primaryPaths.overview} className="flex items-center gap-1.5">
|
||||
<LayoutDashboard className="h-4 w-4" />
|
||||
{t("tabs.overview")}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="assets" asChild>
|
||||
<Link href={primaryPaths.assets} className="flex items-center gap-1.5">
|
||||
<Package className="h-4 w-4" />
|
||||
{t("tabs.assets")}
|
||||
{totalAssets > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{totalAssets}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="directories" asChild>
|
||||
<Link href={primaryPaths.directories} className="flex items-center gap-1.5">
|
||||
<FolderSearch className="h-4 w-4" />
|
||||
{t("tabs.directories")}
|
||||
{counts.directories > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.directories}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="screenshots" asChild>
|
||||
<Link href={primaryPaths.screenshots} className="flex items-center gap-1.5">
|
||||
<Image className="h-4 w-4" />
|
||||
{t("tabs.screenshots")}
|
||||
{counts.screenshots > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.screenshots}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="vulnerabilities" asChild>
|
||||
<Link href={primaryPaths.vulnerabilities} className="flex items-center gap-1.5">
|
||||
<ShieldAlert className="h-4 w-4" />
|
||||
{t("tabs.vulnerabilities")}
|
||||
{counts.vulnerabilities > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.vulnerabilities}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
</div>
|
||||
|
||||
{/* Secondary navigation (only for assets) */}
|
||||
{showSecondaryNav && (
|
||||
<div className="flex items-center px-4 lg:px-6">
|
||||
<Tabs value={getSecondaryTab()} className="w-full">
|
||||
<TabsList variant="underline">
|
||||
<TabsTrigger value="websites" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.websites} className="flex items-center gap-0.5">
|
||||
{t("tabs.websites")}
|
||||
{counts.websites > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.websites}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="subdomain" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.subdomain} className="flex items-center gap-0.5">
|
||||
{t("tabs.subdomains")}
|
||||
{counts.subdomain > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.subdomain}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ip-addresses" variant="underline" asChild>
|
||||
<Link href={secondaryPaths["ip-addresses"]} className="flex items-center gap-0.5">
|
||||
{t("tabs.ips")}
|
||||
{counts["ip-addresses"] > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts["ip-addresses"]}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="endpoints" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.endpoints} className="flex items-center gap-0.5">
|
||||
{t("tabs.urls")}
|
||||
{counts.endpoints > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.endpoints}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Sub-page content */}
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
19
frontend/app/[locale]/scan/history/[id]/overview/page.tsx
Normal file
19
frontend/app/[locale]/scan/history/[id]/overview/page.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
"use client"
|
||||
|
||||
import { useParams } from "next/navigation"
|
||||
import { ScanOverview } from "@/components/scan/history/scan-overview"
|
||||
|
||||
/**
|
||||
* Scan overview page
|
||||
* Displays scan statistics and summary information
|
||||
*/
|
||||
export default function ScanOverviewPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const scanId = Number(id)
|
||||
|
||||
return (
|
||||
<div className="flex-1 flex flex-col min-h-0 px-4 lg:px-6">
|
||||
<ScanOverview scanId={scanId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -8,7 +8,7 @@ export default function ScanHistoryDetailPage() {
|
||||
const router = useRouter()
|
||||
|
||||
useEffect(() => {
|
||||
router.replace(`/scan/history/${id}/subdomain/`)
|
||||
router.replace(`/scan/history/${id}/overview/`)
|
||||
}, [id, router])
|
||||
|
||||
return null
|
||||
15
frontend/app/[locale]/scan/history/[id]/screenshots/page.tsx
Normal file
15
frontend/app/[locale]/scan/history/[id]/screenshots/page.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
"use client"
|
||||
|
||||
import { useParams } from "next/navigation"
|
||||
import { ScreenshotsGallery } from "@/components/screenshots/screenshots-gallery"
|
||||
|
||||
export default function ScanScreenshotsPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const scanId = Number(id)
|
||||
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<ScreenshotsGallery scanId={scanId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -8,7 +8,7 @@ export default function ScanHistoryVulnerabilitiesPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
|
||||
return (
|
||||
<div className="relative flex flex-col gap-4 overflow-auto px-4 lg:px-6">
|
||||
<div className="px-4 lg:px-6">
|
||||
<VulnerabilitiesDetailView scanId={Number(id)} />
|
||||
</div>
|
||||
)
|
||||
@@ -1,31 +1,34 @@
|
||||
"use client"
|
||||
|
||||
import { useTranslations } from "next-intl"
|
||||
import { IconRadar } from "@tabler/icons-react"
|
||||
import { ScanHistoryList } from "@/components/scan/history/scan-history-list"
|
||||
import { ScanHistoryStatCards } from "@/components/scan/history/scan-history-stat-cards"
|
||||
|
||||
/**
|
||||
* 扫描历史页面
|
||||
* 显示所有扫描任务的历史记录
|
||||
* Scan history page
|
||||
* Displays historical records of all scan tasks
|
||||
*/
|
||||
export default function ScanHistoryPage() {
|
||||
const t = useTranslations("scan.history")
|
||||
|
||||
return (
|
||||
<div className="@container/main flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* 页面标题 */}
|
||||
{/* Page title */}
|
||||
<div className="flex items-center gap-3 px-4 lg:px-6">
|
||||
<IconRadar className="size-8 text-primary" />
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold">扫描历史</h1>
|
||||
<p className="text-muted-foreground">查看和管理所有扫描任务记录</p>
|
||||
<h1 className="text-3xl font-bold">{t("title")}</h1>
|
||||
<p className="text-muted-foreground">{t("description")}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 统计卡片 */}
|
||||
{/* Statistics cards */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<ScanHistoryStatCards />
|
||||
</div>
|
||||
|
||||
{/* 扫描历史列表 */}
|
||||
{/* Scan history list */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<ScanHistoryList />
|
||||
</div>
|
||||
@@ -1,6 +1,7 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { useTranslations } from "next-intl"
|
||||
import { ScheduledScanDataTable } from "@/components/scan/scheduled/scheduled-scan-data-table"
|
||||
import { createScheduledScanColumns } from "@/components/scan/scheduled/scheduled-scan-columns"
|
||||
import { CreateScheduledScanDialog } from "@/components/scan/scheduled/create-scheduled-scan-dialog"
|
||||
@@ -24,8 +25,8 @@ import type { ScheduledScan } from "@/types/scheduled-scan.types"
|
||||
import { DataTableSkeleton } from "@/components/ui/data-table-skeleton"
|
||||
|
||||
/**
|
||||
* 定时扫描页面
|
||||
* 管理定时扫描任务配置
|
||||
* Scheduled scan page
|
||||
* Manage scheduled scan task configuration
|
||||
*/
|
||||
export default function ScheduledScanPage() {
|
||||
const [createDialogOpen, setCreateDialogOpen] = React.useState(false)
|
||||
@@ -34,11 +35,50 @@ export default function ScheduledScanPage() {
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = React.useState(false)
|
||||
const [deletingScheduledScan, setDeletingScheduledScan] = React.useState<ScheduledScan | null>(null)
|
||||
|
||||
// 分页状态
|
||||
// Internationalization
|
||||
const tColumns = useTranslations("columns")
|
||||
const tCommon = useTranslations("common")
|
||||
const tScan = useTranslations("scan")
|
||||
const tConfirm = useTranslations("common.confirm")
|
||||
|
||||
// Build translation object
|
||||
const translations = React.useMemo(() => ({
|
||||
columns: {
|
||||
taskName: tColumns("scheduledScan.taskName"),
|
||||
scanEngine: tColumns("scheduledScan.scanEngine"),
|
||||
cronExpression: tColumns("scheduledScan.cronExpression"),
|
||||
scope: tColumns("scheduledScan.scope"),
|
||||
status: tColumns("common.status"),
|
||||
nextRun: tColumns("scheduledScan.nextRun"),
|
||||
runCount: tColumns("scheduledScan.runCount"),
|
||||
lastRun: tColumns("scheduledScan.lastRun"),
|
||||
},
|
||||
actions: {
|
||||
editTask: tScan("editTask"),
|
||||
delete: tCommon("actions.delete"),
|
||||
openMenu: tCommon("actions.openMenu"),
|
||||
},
|
||||
status: {
|
||||
enabled: tCommon("status.enabled"),
|
||||
disabled: tCommon("status.disabled"),
|
||||
},
|
||||
cron: {
|
||||
everyMinute: tScan("cron.everyMinute"),
|
||||
everyNMinutes: tScan.raw("cron.everyNMinutes") as string,
|
||||
everyHour: tScan.raw("cron.everyHour") as string,
|
||||
everyNHours: tScan.raw("cron.everyNHours") as string,
|
||||
everyDay: tScan.raw("cron.everyDay") as string,
|
||||
everyWeek: tScan.raw("cron.everyWeek") as string,
|
||||
everyMonth: tScan.raw("cron.everyMonth") as string,
|
||||
weekdays: tScan.raw("cron.weekdays") as string[],
|
||||
},
|
||||
}), [tColumns, tCommon, tScan])
|
||||
|
||||
// Pagination state
|
||||
const [page, setPage] = React.useState(1)
|
||||
const [pageSize, setPageSize] = React.useState(10)
|
||||
|
||||
// 搜索状态
|
||||
// Search state
|
||||
const [searchQuery, setSearchQuery] = React.useState("")
|
||||
const [isSearching, setIsSearching] = React.useState(false)
|
||||
|
||||
@@ -48,10 +88,10 @@ export default function ScheduledScanPage() {
|
||||
setPage(1)
|
||||
}
|
||||
|
||||
// 使用实际 API
|
||||
// Use actual API
|
||||
const { data, isLoading, isFetching, refetch } = useScheduledScans({ page, pageSize, search: searchQuery || undefined })
|
||||
|
||||
// 当请求完成时重置搜索状态
|
||||
// Reset search state when request completes
|
||||
React.useEffect(() => {
|
||||
if (!isFetching && isSearching) {
|
||||
setIsSearching(false)
|
||||
@@ -64,7 +104,7 @@ export default function ScheduledScanPage() {
|
||||
const total = data?.total || 0
|
||||
const totalPages = data?.totalPages || 1
|
||||
|
||||
// 格式化日期
|
||||
// Format date
|
||||
const formatDate = React.useCallback((dateString: string) => {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleString("zh-CN", {
|
||||
@@ -76,19 +116,19 @@ export default function ScheduledScanPage() {
|
||||
})
|
||||
}, [])
|
||||
|
||||
// 编辑任务
|
||||
// Edit task
|
||||
const handleEdit = React.useCallback((scan: ScheduledScan) => {
|
||||
setEditingScheduledScan(scan)
|
||||
setEditDialogOpen(true)
|
||||
}, [])
|
||||
|
||||
// 删除任务(打开确认弹窗)
|
||||
// Delete task (open confirmation dialog)
|
||||
const handleDelete = React.useCallback((scan: ScheduledScan) => {
|
||||
setDeletingScheduledScan(scan)
|
||||
setDeleteDialogOpen(true)
|
||||
}, [])
|
||||
|
||||
// 确认删除任务
|
||||
// Confirm delete task
|
||||
const confirmDelete = React.useCallback(() => {
|
||||
if (deletingScheduledScan) {
|
||||
deleteScheduledScan(deletingScheduledScan.id)
|
||||
@@ -97,28 +137,28 @@ export default function ScheduledScanPage() {
|
||||
}
|
||||
}, [deletingScheduledScan, deleteScheduledScan])
|
||||
|
||||
// 切换任务启用状态
|
||||
// Toggle task enabled status
|
||||
const handleToggleStatus = React.useCallback((scan: ScheduledScan, enabled: boolean) => {
|
||||
toggleScheduledScan({ id: scan.id, isEnabled: enabled })
|
||||
}, [toggleScheduledScan])
|
||||
|
||||
// 页码变化处理
|
||||
// Page change handler
|
||||
const handlePageChange = React.useCallback((newPage: number) => {
|
||||
setPage(newPage)
|
||||
}, [])
|
||||
|
||||
// 每页数量变化处理
|
||||
// Page size change handler
|
||||
const handlePageSizeChange = React.useCallback((newPageSize: number) => {
|
||||
setPageSize(newPageSize)
|
||||
setPage(1) // 重置到第一页
|
||||
setPage(1) // Reset to first page
|
||||
}, [])
|
||||
|
||||
// 添加新任务
|
||||
// Add new task
|
||||
const handleAddNew = React.useCallback(() => {
|
||||
setCreateDialogOpen(true)
|
||||
}, [])
|
||||
|
||||
// 创建列定义
|
||||
// Create column definition
|
||||
const columns = React.useMemo(
|
||||
() =>
|
||||
createScheduledScanColumns({
|
||||
@@ -126,8 +166,9 @@ export default function ScheduledScanPage() {
|
||||
handleEdit,
|
||||
handleDelete,
|
||||
handleToggleStatus,
|
||||
t: translations,
|
||||
}),
|
||||
[formatDate, handleEdit, handleDelete, handleToggleStatus]
|
||||
[formatDate, handleEdit, handleDelete, handleToggleStatus, translations]
|
||||
)
|
||||
|
||||
if (isLoading) {
|
||||
@@ -135,8 +176,8 @@ export default function ScheduledScanPage() {
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold">定时扫描</h1>
|
||||
<p className="text-muted-foreground mt-1">配置和管理定时扫描任务</p>
|
||||
<h1 className="text-3xl font-bold">{tScan("scheduled.title")}</h1>
|
||||
<p className="text-muted-foreground mt-1">{tScan("scheduled.description")}</p>
|
||||
</div>
|
||||
</div>
|
||||
<DataTableSkeleton
|
||||
@@ -150,26 +191,25 @@ export default function ScheduledScanPage() {
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* 页面标题 */}
|
||||
{/* Page title */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold">定时扫描</h1>
|
||||
<p className="text-muted-foreground mt-1">配置和管理定时扫描任务</p>
|
||||
<h1 className="text-3xl font-bold">{tScan("scheduled.title")}</h1>
|
||||
<p className="text-muted-foreground mt-1">{tScan("scheduled.description")}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 数据表格 */}
|
||||
{/* Data table */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<ScheduledScanDataTable
|
||||
data={scheduledScans}
|
||||
columns={columns}
|
||||
onAddNew={handleAddNew}
|
||||
searchPlaceholder="搜索任务名称..."
|
||||
searchColumn="name"
|
||||
searchPlaceholder={tScan("scheduled.searchPlaceholder")}
|
||||
searchValue={searchQuery}
|
||||
onSearch={handleSearchChange}
|
||||
isSearching={isSearching}
|
||||
addButtonText="新建定时扫描"
|
||||
addButtonText={tScan("scheduled.createTitle")}
|
||||
page={page}
|
||||
pageSize={pageSize}
|
||||
total={total}
|
||||
@@ -179,14 +219,14 @@ export default function ScheduledScanPage() {
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* 新建定时扫描对话框 */}
|
||||
{/* Create scheduled scan dialog */}
|
||||
<CreateScheduledScanDialog
|
||||
open={createDialogOpen}
|
||||
onOpenChange={setCreateDialogOpen}
|
||||
onSuccess={() => refetch()}
|
||||
/>
|
||||
|
||||
{/* 编辑定时扫描对话框 */}
|
||||
{/* Edit scheduled scan dialog */}
|
||||
<EditScheduledScanDialog
|
||||
open={editDialogOpen}
|
||||
onOpenChange={setEditDialogOpen}
|
||||
@@ -194,19 +234,19 @@ export default function ScheduledScanPage() {
|
||||
onSuccess={() => refetch()}
|
||||
/>
|
||||
|
||||
{/* 删除确认弹窗 */}
|
||||
{/* Delete confirmation dialog */}
|
||||
<AlertDialog open={deleteDialogOpen} onOpenChange={setDeleteDialogOpen}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>确认删除</AlertDialogTitle>
|
||||
<AlertDialogTitle>{tConfirm("deleteTitle")}</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
确定要删除定时扫描任务 "{deletingScheduledScan?.name}" 吗?此操作无法撤销。
|
||||
{tConfirm("deleteScheduledScanMessage", { name: deletingScheduledScan?.name ?? "" })}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel>取消</AlertDialogCancel>
|
||||
<AlertDialogCancel>{tCommon("actions.cancel")}</AlertDialogCancel>
|
||||
<AlertDialogAction onClick={confirmDelete} className="bg-destructive text-destructive-foreground hover:bg-destructive/90">
|
||||
删除
|
||||
{tCommon("actions.delete")}
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
5
frontend/app/[locale]/search/page.tsx
Normal file
5
frontend/app/[locale]/search/page.tsx
Normal file
@@ -0,0 +1,5 @@
|
||||
import { SearchPage } from "@/components/search"
|
||||
|
||||
export default function Search() {
|
||||
return <SearchPage />
|
||||
}
|
||||
306
frontend/app/[locale]/settings/api-keys/page.tsx
Normal file
306
frontend/app/[locale]/settings/api-keys/page.tsx
Normal file
@@ -0,0 +1,306 @@
|
||||
"use client"
|
||||
|
||||
import React, { useState, useEffect } from 'react'
|
||||
import { IconEye, IconEyeOff, IconWorldSearch, IconRadar2 } from '@tabler/icons-react'
|
||||
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Switch } from '@/components/ui/switch'
|
||||
import { Separator } from '@/components/ui/separator'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { useApiKeySettings, useUpdateApiKeySettings } from '@/hooks/use-api-key-settings'
|
||||
import type { ApiKeySettings } from '@/types/api-key-settings.types'
|
||||
|
||||
// 密码输入框组件(带显示/隐藏切换)
|
||||
function PasswordInput({ value, onChange, placeholder, disabled }: {
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
placeholder?: string
|
||||
disabled?: boolean
|
||||
}) {
|
||||
const [show, setShow] = useState(false)
|
||||
return (
|
||||
<div className="relative">
|
||||
<Input
|
||||
type={show ? 'text' : 'password'}
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
disabled={disabled}
|
||||
className="pr-10"
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setShow(!show)}
|
||||
className="absolute right-3 top-1/2 -translate-y-1/2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
{show ? <IconEyeOff className="h-4 w-4" /> : <IconEye className="h-4 w-4" />}
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Provider 配置定义
|
||||
const PROVIDERS = [
|
||||
{
|
||||
key: 'fofa',
|
||||
name: 'FOFA',
|
||||
description: '网络空间测绘平台,提供全球互联网资产搜索',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-blue-500',
|
||||
bgColor: 'bg-blue-500/10',
|
||||
fields: [
|
||||
{ name: 'email', label: '邮箱', type: 'text', placeholder: 'your@email.com' },
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 FOFA API Key' },
|
||||
],
|
||||
docUrl: 'https://fofa.info/api',
|
||||
},
|
||||
{
|
||||
key: 'hunter',
|
||||
name: 'Hunter (鹰图)',
|
||||
description: '奇安信威胁情报平台,提供网络空间资产测绘',
|
||||
icon: IconRadar2,
|
||||
color: 'text-orange-500',
|
||||
bgColor: 'bg-orange-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 Hunter API Key' },
|
||||
],
|
||||
docUrl: 'https://hunter.qianxin.com/',
|
||||
},
|
||||
{
|
||||
key: 'shodan',
|
||||
name: 'Shodan',
|
||||
description: '全球最大的互联网设备搜索引擎',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-red-500',
|
||||
bgColor: 'bg-red-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 Shodan API Key' },
|
||||
],
|
||||
docUrl: 'https://developer.shodan.io/',
|
||||
},
|
||||
{
|
||||
key: 'censys',
|
||||
name: 'Censys',
|
||||
description: '互联网资产搜索和监控平台',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-purple-500',
|
||||
bgColor: 'bg-purple-500/10',
|
||||
fields: [
|
||||
{ name: 'apiId', label: 'API ID', type: 'text', placeholder: '输入 Censys API ID' },
|
||||
{ name: 'apiSecret', label: 'API Secret', type: 'password', placeholder: '输入 Censys API Secret' },
|
||||
],
|
||||
docUrl: 'https://search.censys.io/api',
|
||||
},
|
||||
{
|
||||
key: 'zoomeye',
|
||||
name: 'ZoomEye (钟馗之眼)',
|
||||
description: '知道创宇网络空间搜索引擎',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-green-500',
|
||||
bgColor: 'bg-green-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 ZoomEye API Key' },
|
||||
],
|
||||
docUrl: 'https://www.zoomeye.org/doc',
|
||||
},
|
||||
{
|
||||
key: 'securitytrails',
|
||||
name: 'SecurityTrails',
|
||||
description: 'DNS 历史记录和子域名数据平台',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-cyan-500',
|
||||
bgColor: 'bg-cyan-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 SecurityTrails API Key' },
|
||||
],
|
||||
docUrl: 'https://securitytrails.com/corp/api',
|
||||
},
|
||||
{
|
||||
key: 'threatbook',
|
||||
name: 'ThreatBook (微步在线)',
|
||||
description: '威胁情报平台,提供域名和 IP 情报查询',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-indigo-500',
|
||||
bgColor: 'bg-indigo-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 ThreatBook API Key' },
|
||||
],
|
||||
docUrl: 'https://x.threatbook.com/api',
|
||||
},
|
||||
{
|
||||
key: 'quake',
|
||||
name: 'Quake (360)',
|
||||
description: '360 网络空间测绘系统',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-teal-500',
|
||||
bgColor: 'bg-teal-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 Quake API Key' },
|
||||
],
|
||||
docUrl: 'https://quake.360.net/quake/#/help',
|
||||
},
|
||||
]
|
||||
|
||||
// 默认配置
|
||||
const DEFAULT_SETTINGS: ApiKeySettings = {
|
||||
fofa: { enabled: false, email: '', apiKey: '' },
|
||||
hunter: { enabled: false, apiKey: '' },
|
||||
shodan: { enabled: false, apiKey: '' },
|
||||
censys: { enabled: false, apiId: '', apiSecret: '' },
|
||||
zoomeye: { enabled: false, apiKey: '' },
|
||||
securitytrails: { enabled: false, apiKey: '' },
|
||||
threatbook: { enabled: false, apiKey: '' },
|
||||
quake: { enabled: false, apiKey: '' },
|
||||
}
|
||||
|
||||
export default function ApiKeysSettingsPage() {
|
||||
const { data: settings, isLoading } = useApiKeySettings()
|
||||
const updateMutation = useUpdateApiKeySettings()
|
||||
|
||||
const [formData, setFormData] = useState<ApiKeySettings>(DEFAULT_SETTINGS)
|
||||
const [hasChanges, setHasChanges] = useState(false)
|
||||
|
||||
// 当数据加载完成后,更新表单数据
|
||||
useEffect(() => {
|
||||
if (settings) {
|
||||
setFormData({ ...DEFAULT_SETTINGS, ...settings })
|
||||
setHasChanges(false)
|
||||
}
|
||||
}, [settings])
|
||||
|
||||
const updateProvider = (providerKey: string, field: string, value: any) => {
|
||||
setFormData(prev => ({
|
||||
...prev,
|
||||
[providerKey]: {
|
||||
...prev[providerKey as keyof ApiKeySettings],
|
||||
[field]: value,
|
||||
}
|
||||
}))
|
||||
setHasChanges(true)
|
||||
}
|
||||
|
||||
const handleSave = async () => {
|
||||
updateMutation.mutate(formData)
|
||||
setHasChanges(false)
|
||||
}
|
||||
|
||||
const enabledCount = Object.values(formData).filter((p: any) => p?.enabled).length
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="p-4 md:p-6 space-y-6">
|
||||
<div>
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-4 w-96 mt-2" />
|
||||
</div>
|
||||
<div className="grid gap-4">
|
||||
{[1, 2, 3].map((i) => (
|
||||
<Skeleton key={i} className="h-24 w-full" />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="p-4 md:p-6 space-y-6">
|
||||
{/* 页面标题 */}
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<h1 className="text-2xl font-semibold">API 密钥配置</h1>
|
||||
{enabledCount > 0 && (
|
||||
<Badge variant="secondary">{enabledCount} 个已启用</Badge>
|
||||
)}
|
||||
</div>
|
||||
<p className="text-muted-foreground mt-1">
|
||||
配置第三方数据源的 API 密钥,用于增强子域名发现能力。启用后将在 subfinder 扫描时自动使用。
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Provider 卡片列表 */}
|
||||
<div className="grid gap-4">
|
||||
{PROVIDERS.map((provider) => {
|
||||
const data = formData[provider.key as keyof ApiKeySettings] || {}
|
||||
const isEnabled = (data as any)?.enabled || false
|
||||
|
||||
return (
|
||||
<Card key={provider.key}>
|
||||
<CardHeader className="pb-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className={`flex h-10 w-10 items-center justify-center rounded-lg ${provider.bgColor}`}>
|
||||
<provider.icon className={`h-5 w-5 ${provider.color}`} />
|
||||
</div>
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<CardTitle className="text-base">{provider.name}</CardTitle>
|
||||
{isEnabled && <Badge variant="outline" className="text-xs text-green-600">已启用</Badge>}
|
||||
</div>
|
||||
<CardDescription>{provider.description}</CardDescription>
|
||||
</div>
|
||||
</div>
|
||||
<Switch
|
||||
checked={isEnabled}
|
||||
onCheckedChange={(checked) => updateProvider(provider.key, 'enabled', checked)}
|
||||
/>
|
||||
</div>
|
||||
</CardHeader>
|
||||
|
||||
{/* 展开的配置表单 */}
|
||||
{isEnabled && (
|
||||
<CardContent className="pt-0">
|
||||
<Separator className="mb-4" />
|
||||
<div className="space-y-4">
|
||||
{provider.fields.map((field) => (
|
||||
<div key={field.name} className="space-y-2">
|
||||
<label className="text-sm font-medium">{field.label}</label>
|
||||
{field.type === 'password' ? (
|
||||
<PasswordInput
|
||||
value={(data as any)[field.name] || ''}
|
||||
onChange={(value) => updateProvider(provider.key, field.name, value)}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
type="text"
|
||||
value={(data as any)[field.name] || ''}
|
||||
onChange={(e) => updateProvider(provider.key, field.name, e.target.value)}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
<p className="text-xs text-muted-foreground">
|
||||
获取 API Key:
|
||||
<a
|
||||
href={provider.docUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-primary hover:underline ml-1"
|
||||
>
|
||||
{provider.docUrl}
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
)}
|
||||
</Card>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
|
||||
{/* 保存按钮 */}
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
onClick={handleSave}
|
||||
disabled={updateMutation.isPending || !hasChanges}
|
||||
>
|
||||
{updateMutation.isPending ? '保存中...' : '保存配置'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
132
frontend/app/[locale]/settings/blacklist/page.tsx
Normal file
132
frontend/app/[locale]/settings/blacklist/page.tsx
Normal file
@@ -0,0 +1,132 @@
|
||||
"use client"
|
||||
|
||||
import React, { useState, useEffect } from "react"
|
||||
import { useTranslations } from "next-intl"
|
||||
import { AlertTriangle, Loader2, Ban } from "lucide-react"
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { Textarea } from "@/components/ui/textarea"
|
||||
import { Skeleton } from "@/components/ui/skeleton"
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
||||
import { useGlobalBlacklist, useUpdateGlobalBlacklist } from "@/hooks/use-global-blacklist"
|
||||
|
||||
/**
|
||||
* Global blacklist settings page
|
||||
*/
|
||||
export default function GlobalBlacklistPage() {
|
||||
const t = useTranslations("pages.settings.blacklist")
|
||||
|
||||
const [blacklistText, setBlacklistText] = useState("")
|
||||
const [hasChanges, setHasChanges] = useState(false)
|
||||
|
||||
const { data, isLoading, error } = useGlobalBlacklist()
|
||||
const updateBlacklist = useUpdateGlobalBlacklist()
|
||||
|
||||
// Initialize text when data loads
|
||||
useEffect(() => {
|
||||
if (data?.patterns) {
|
||||
setBlacklistText(data.patterns.join("\n"))
|
||||
setHasChanges(false)
|
||||
}
|
||||
}, [data])
|
||||
|
||||
// Handle text change
|
||||
const handleTextChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||
setBlacklistText(e.target.value)
|
||||
setHasChanges(true)
|
||||
}
|
||||
|
||||
// Handle save
|
||||
const handleSave = () => {
|
||||
const patterns = blacklistText
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
|
||||
updateBlacklist.mutate(
|
||||
{ patterns },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setHasChanges(false)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex flex-1 flex-col gap-4 p-4">
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-4 w-96" />
|
||||
</div>
|
||||
<Skeleton className="h-[400px] w-full" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className="flex flex-1 flex-col items-center justify-center py-12">
|
||||
<AlertTriangle className="h-10 w-10 text-destructive mb-4" />
|
||||
<p className="text-muted-foreground">{t("loadError")}</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-1 flex-col gap-4 p-4">
|
||||
{/* Page header */}
|
||||
<div>
|
||||
<h1 className="text-2xl font-bold">{t("title")}</h1>
|
||||
<p className="text-muted-foreground">{t("description")}</p>
|
||||
</div>
|
||||
|
||||
{/* Blacklist card */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<div className="flex items-center gap-2">
|
||||
<Ban className="h-5 w-5 text-muted-foreground" />
|
||||
<CardTitle>{t("card.title")}</CardTitle>
|
||||
</div>
|
||||
<CardDescription>{t("card.description")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
{/* Rules hint */}
|
||||
<div className="flex flex-wrap items-center gap-x-4 gap-y-2 text-sm text-muted-foreground">
|
||||
<span className="font-medium text-foreground">{t("rules.title")}:</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">*.gov</code> {t("rules.domain")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">*cdn*</code> {t("rules.keyword")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">192.168.1.1</code> {t("rules.ip")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">10.0.0.0/8</code> {t("rules.cidr")}</span>
|
||||
</div>
|
||||
|
||||
{/* Scope hint */}
|
||||
<div className="rounded-lg border bg-muted/50 p-3 text-sm">
|
||||
<p className="text-muted-foreground">{t("scopeHint")}</p>
|
||||
</div>
|
||||
|
||||
{/* Input */}
|
||||
<Textarea
|
||||
value={blacklistText}
|
||||
onChange={handleTextChange}
|
||||
placeholder={t("placeholder")}
|
||||
className="min-h-[320px] font-mono text-sm"
|
||||
/>
|
||||
|
||||
{/* Save button */}
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
onClick={handleSave}
|
||||
disabled={!hasChanges || updateBlacklist.isPending}
|
||||
>
|
||||
{updateBlacklist.isPending && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
{t("save")}
|
||||
</Button>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import React from 'react'
|
||||
import { useForm } from 'react-hook-form'
|
||||
import { useTranslations } from 'next-intl'
|
||||
import { zodResolver } from '@hookform/resolvers/zod'
|
||||
import * as z from 'zod'
|
||||
import { IconBrandDiscord, IconMail, IconBrandSlack, IconScan, IconShieldCheck, IconWorld, IconSettings } from '@tabler/icons-react'
|
||||
@@ -16,66 +17,82 @@ import { Separator } from '@/components/ui/separator'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { useNotificationSettings, useUpdateNotificationSettings } from '@/hooks/use-notification-settings'
|
||||
|
||||
const schema = z
|
||||
.object({
|
||||
discord: z.object({
|
||||
enabled: z.boolean(),
|
||||
webhookUrl: z.string().url('请输入有效的 Discord Webhook URL').or(z.literal('')),
|
||||
}),
|
||||
categories: z.object({
|
||||
scan: z.boolean(), // 扫描任务
|
||||
vulnerability: z.boolean(), // 漏洞发现
|
||||
asset: z.boolean(), // 资产发现
|
||||
system: z.boolean(), // 系统消息
|
||||
}),
|
||||
})
|
||||
.superRefine((val, ctx) => {
|
||||
if (val.discord.enabled) {
|
||||
if (!val.discord.webhookUrl || val.discord.webhookUrl.trim() === '') {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: '启用 Discord 时必须填写 Webhook URL',
|
||||
path: ['discord', 'webhookUrl'],
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const NOTIFICATION_CATEGORIES = [
|
||||
{
|
||||
key: 'scan' as const,
|
||||
label: '扫描任务',
|
||||
description: '扫描启动、进度、完成、失败等通知',
|
||||
icon: IconScan,
|
||||
},
|
||||
{
|
||||
key: 'vulnerability' as const,
|
||||
label: '漏洞发现',
|
||||
description: '发现安全漏洞时通知',
|
||||
icon: IconShieldCheck,
|
||||
},
|
||||
{
|
||||
key: 'asset' as const,
|
||||
label: '资产发现',
|
||||
description: '发现新子域名、IP、端口等资产',
|
||||
icon: IconWorld,
|
||||
},
|
||||
{
|
||||
key: 'system' as const,
|
||||
label: '系统消息',
|
||||
description: '系统级通知和公告',
|
||||
icon: IconSettings,
|
||||
},
|
||||
]
|
||||
|
||||
export default function NotificationSettingsPage() {
|
||||
const t = useTranslations("settings.notifications")
|
||||
const { data, isLoading } = useNotificationSettings()
|
||||
const updateMutation = useUpdateNotificationSettings()
|
||||
|
||||
// Schema with translations
|
||||
const schema = z
|
||||
.object({
|
||||
discord: z.object({
|
||||
enabled: z.boolean(),
|
||||
webhookUrl: z.string().url(t("discord.urlInvalid")).or(z.literal('')),
|
||||
}),
|
||||
wecom: z.object({
|
||||
enabled: z.boolean(),
|
||||
webhookUrl: z.string().url(t("wecom.urlInvalid")).or(z.literal('')),
|
||||
}),
|
||||
categories: z.object({
|
||||
scan: z.boolean(),
|
||||
vulnerability: z.boolean(),
|
||||
asset: z.boolean(),
|
||||
system: z.boolean(),
|
||||
}),
|
||||
})
|
||||
.superRefine((val, ctx) => {
|
||||
if (val.discord.enabled) {
|
||||
if (!val.discord.webhookUrl || val.discord.webhookUrl.trim() === '') {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: t("discord.requiredError"),
|
||||
path: ['discord', 'webhookUrl'],
|
||||
})
|
||||
}
|
||||
}
|
||||
if (val.wecom.enabled) {
|
||||
if (!val.wecom.webhookUrl || val.wecom.webhookUrl.trim() === '') {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: t("wecom.requiredError"),
|
||||
path: ['wecom', 'webhookUrl'],
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const NOTIFICATION_CATEGORIES = [
|
||||
{
|
||||
key: 'scan' as const,
|
||||
label: t("categories.scan"),
|
||||
description: t("categories.scanDesc"),
|
||||
icon: IconScan,
|
||||
},
|
||||
{
|
||||
key: 'vulnerability' as const,
|
||||
label: t("categories.vulnerability"),
|
||||
description: t("categories.vulnerabilityDesc"),
|
||||
icon: IconShieldCheck,
|
||||
},
|
||||
{
|
||||
key: 'asset' as const,
|
||||
label: t("categories.asset"),
|
||||
description: t("categories.assetDesc"),
|
||||
icon: IconWorld,
|
||||
},
|
||||
{
|
||||
key: 'system' as const,
|
||||
label: t("categories.system"),
|
||||
description: t("categories.systemDesc"),
|
||||
icon: IconSettings,
|
||||
},
|
||||
]
|
||||
|
||||
const form = useForm<z.infer<typeof schema>>({
|
||||
resolver: zodResolver(schema),
|
||||
values: data ?? {
|
||||
discord: { enabled: false, webhookUrl: '' },
|
||||
wecom: { enabled: false, webhookUrl: '' },
|
||||
categories: {
|
||||
scan: true,
|
||||
vulnerability: true,
|
||||
@@ -90,25 +107,26 @@ export default function NotificationSettingsPage() {
|
||||
}
|
||||
|
||||
const discordEnabled = form.watch('discord.enabled')
|
||||
const wecomEnabled = form.watch('wecom.enabled')
|
||||
|
||||
return (
|
||||
<div className="p-4 md:p-6 space-y-6">
|
||||
<div>
|
||||
<h1 className="text-2xl font-semibold">通知设置</h1>
|
||||
<p className="text-muted-foreground mt-1">配置系统通知的推送渠道和接收偏好</p>
|
||||
<h1 className="text-2xl font-semibold">{t("pageTitle")}</h1>
|
||||
<p className="text-muted-foreground mt-1">{t("pageDesc")}</p>
|
||||
</div>
|
||||
|
||||
<Tabs defaultValue="channels" className="w-full">
|
||||
<TabsList>
|
||||
<TabsTrigger value="channels">推送渠道</TabsTrigger>
|
||||
<TabsTrigger value="preferences">通知偏好</TabsTrigger>
|
||||
<TabsTrigger value="channels">{t("tabs.channels")}</TabsTrigger>
|
||||
<TabsTrigger value="preferences">{t("tabs.preferences")}</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<Form {...form}>
|
||||
<form onSubmit={form.handleSubmit(onSubmit)}>
|
||||
{/* 推送渠道 Tab */}
|
||||
{/* Push channels tab */}
|
||||
<TabsContent value="channels" className="space-y-4 mt-4">
|
||||
{/* Discord 卡片 */}
|
||||
{/* Discord card */}
|
||||
<Card>
|
||||
<CardHeader className="pb-4">
|
||||
<div className="flex items-center justify-between">
|
||||
@@ -117,8 +135,8 @@ export default function NotificationSettingsPage() {
|
||||
<IconBrandDiscord className="h-5 w-5 text-[#5865F2]" />
|
||||
</div>
|
||||
<div>
|
||||
<CardTitle className="text-base">Discord</CardTitle>
|
||||
<CardDescription>将通知推送到你的 Discord 频道</CardDescription>
|
||||
<CardTitle className="text-base">{t("discord.title")}</CardTitle>
|
||||
<CardDescription>{t("discord.description")}</CardDescription>
|
||||
</div>
|
||||
</div>
|
||||
<FormField
|
||||
@@ -144,16 +162,16 @@ export default function NotificationSettingsPage() {
|
||||
name="discord.webhookUrl"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Webhook URL</FormLabel>
|
||||
<FormLabel>{t("discord.webhookLabel")}</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="https://discord.com/api/webhooks/..."
|
||||
placeholder={t("discord.webhookPlaceholder")}
|
||||
{...field}
|
||||
disabled={isLoading || updateMutation.isPending}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
在 Discord 频道设置中创建 Webhook 并粘贴地址
|
||||
{t("discord.webhookHelp")}
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
@@ -163,7 +181,7 @@ export default function NotificationSettingsPage() {
|
||||
)}
|
||||
</Card>
|
||||
|
||||
{/* 邮件 - 即将支持 */}
|
||||
{/* Email - Coming soon */}
|
||||
<Card className="opacity-60">
|
||||
<CardHeader className="pb-4">
|
||||
<div className="flex items-center justify-between">
|
||||
@@ -173,10 +191,10 @@ export default function NotificationSettingsPage() {
|
||||
</div>
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<CardTitle className="text-base">邮件</CardTitle>
|
||||
<Badge variant="secondary" className="text-xs">即将支持</Badge>
|
||||
<CardTitle className="text-base">{t("emailChannel.title")}</CardTitle>
|
||||
<Badge variant="secondary" className="text-xs">{t("emailChannel.comingSoon")}</Badge>
|
||||
</div>
|
||||
<CardDescription>通过邮件接收通知</CardDescription>
|
||||
<CardDescription>{t("emailChannel.description")}</CardDescription>
|
||||
</div>
|
||||
</div>
|
||||
<Switch disabled />
|
||||
@@ -184,34 +202,68 @@ export default function NotificationSettingsPage() {
|
||||
</CardHeader>
|
||||
</Card>
|
||||
|
||||
{/* 飞书/钉钉/企微 - 即将支持 */}
|
||||
<Card className="opacity-60">
|
||||
{/* 企业微信 */}
|
||||
<Card>
|
||||
<CardHeader className="pb-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-lg bg-muted">
|
||||
<IconBrandSlack className="h-5 w-5 text-muted-foreground" />
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-lg bg-[#07C160]/10">
|
||||
<IconBrandSlack className="h-5 w-5 text-[#07C160]" />
|
||||
</div>
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<CardTitle className="text-base">飞书 / 钉钉 / 企微</CardTitle>
|
||||
<Badge variant="secondary" className="text-xs">即将支持</Badge>
|
||||
</div>
|
||||
<CardDescription>推送到企业协作平台</CardDescription>
|
||||
<CardTitle className="text-base">{t("wecom.title")}</CardTitle>
|
||||
<CardDescription>{t("wecom.description")}</CardDescription>
|
||||
</div>
|
||||
</div>
|
||||
<Switch disabled />
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="wecom.enabled"
|
||||
render={({ field }) => (
|
||||
<FormControl>
|
||||
<Switch
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
disabled={isLoading || updateMutation.isPending}
|
||||
/>
|
||||
</FormControl>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</CardHeader>
|
||||
{wecomEnabled && (
|
||||
<CardContent className="pt-0">
|
||||
<Separator className="mb-4" />
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="wecom.webhookUrl"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>{t("wecom.webhookLabel")}</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder={t("wecom.webhookPlaceholder")}
|
||||
{...field}
|
||||
disabled={isLoading || updateMutation.isPending}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
{t("wecom.webhookHelp")}
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</CardContent>
|
||||
)}
|
||||
</Card>
|
||||
</TabsContent>
|
||||
|
||||
{/* 通知偏好 Tab */}
|
||||
{/* Notification preferences tab */}
|
||||
<TabsContent value="preferences" className="mt-4">
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle className="text-base">通知分类</CardTitle>
|
||||
<CardDescription>选择你想要接收的通知类型</CardDescription>
|
||||
<CardTitle className="text-base">{t("categories.title")}</CardTitle>
|
||||
<CardDescription>{t("categories.description")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-1">
|
||||
{NOTIFICATION_CATEGORIES.map((category) => (
|
||||
@@ -249,10 +301,10 @@ export default function NotificationSettingsPage() {
|
||||
</Card>
|
||||
</TabsContent>
|
||||
|
||||
{/* 保存按钮 */}
|
||||
{/* Save button */}
|
||||
<div className="flex justify-end mt-6">
|
||||
<Button type="submit" disabled={updateMutation.isPending || isLoading}>
|
||||
保存设置
|
||||
{t("saveSettings")}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
11
frontend/app/[locale]/settings/system-logs/page.tsx
Normal file
11
frontend/app/[locale]/settings/system-logs/page.tsx
Normal file
@@ -0,0 +1,11 @@
|
||||
"use client"
|
||||
|
||||
import { SystemLogsView } from "@/components/settings/system-logs"
|
||||
|
||||
export default function SystemLogsPage() {
|
||||
return (
|
||||
<div className="flex flex-1 flex-col p-4 h-full">
|
||||
<SystemLogsView />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,15 +1,18 @@
|
||||
"use client"
|
||||
|
||||
import { WorkerList } from "@/components/settings/workers"
|
||||
import { useTranslations } from "next-intl"
|
||||
|
||||
export default function WorkersPage() {
|
||||
const t = useTranslations("pages.workers")
|
||||
|
||||
return (
|
||||
<div className="flex flex-1 flex-col gap-4 p-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<h1 className="text-2xl font-bold tracking-tight">扫描节点</h1>
|
||||
<h1 className="text-2xl font-bold tracking-tight">{t("title")}</h1>
|
||||
<p className="text-muted-foreground">
|
||||
管理分布式扫描节点,支持远程 VPS 自动部署
|
||||
{t("description")}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -4,16 +4,16 @@ import { useParams, useRouter } from "next/navigation"
|
||||
import { useEffect } from "react"
|
||||
|
||||
/**
|
||||
* 目标详情页面(兼容旧路由)
|
||||
* 自动重定向到域名页面
|
||||
* Target detail page (compatible with old routes)
|
||||
* Automatically redirects to overview page
|
||||
*/
|
||||
export default function TargetDetailsPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const router = useRouter()
|
||||
|
||||
useEffect(() => {
|
||||
// 重定向到子域名页面
|
||||
router.replace(`/target/${id}/subdomain/`)
|
||||
// Redirect to overview page
|
||||
router.replace(`/target/${id}/overview/`)
|
||||
}, [id, router])
|
||||
|
||||
return null
|
||||
@@ -5,8 +5,8 @@ import { useParams } from "next/navigation"
|
||||
import { EndpointsDetailView } from "@/components/endpoints"
|
||||
|
||||
/**
|
||||
* 目标端点页面
|
||||
* 显示目标下的端点详情
|
||||
* Target endpoints page
|
||||
* Displays endpoint details under the target
|
||||
*/
|
||||
export default function TargetEndpointsPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
301
frontend/app/[locale]/target/[id]/layout.tsx
Normal file
301
frontend/app/[locale]/target/[id]/layout.tsx
Normal file
@@ -0,0 +1,301 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { usePathname, useParams } from "next/navigation"
|
||||
import Link from "next/link"
|
||||
import { Target, LayoutDashboard, Package, FolderSearch, Image, ShieldAlert, Settings, HelpCircle } from "lucide-react"
|
||||
import { Skeleton } from "@/components/ui/skeleton"
|
||||
import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs"
|
||||
import { Badge } from "@/components/ui/badge"
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip"
|
||||
import { useTarget } from "@/hooks/use-targets"
|
||||
import { useTranslations } from "next-intl"
|
||||
|
||||
/**
|
||||
* Target detail layout
|
||||
* Two-level navigation: Overview / Assets / Vulnerabilities
|
||||
* Assets has secondary navigation for different asset types
|
||||
*/
|
||||
export default function TargetLayout({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const pathname = usePathname()
|
||||
const t = useTranslations("pages.targetDetail")
|
||||
|
||||
// Use React Query to get target data
|
||||
const {
|
||||
data: target,
|
||||
isLoading,
|
||||
error
|
||||
} = useTarget(Number(id))
|
||||
|
||||
// Get primary navigation active tab
|
||||
const getPrimaryTab = () => {
|
||||
if (pathname.includes("/overview")) return "overview"
|
||||
if (pathname.includes("/directories")) return "directories"
|
||||
if (pathname.includes("/screenshots")) return "screenshots"
|
||||
if (pathname.includes("/vulnerabilities")) return "vulnerabilities"
|
||||
if (pathname.includes("/settings")) return "settings"
|
||||
// All asset pages fall under "assets"
|
||||
if (
|
||||
pathname.includes("/websites") ||
|
||||
pathname.includes("/subdomain") ||
|
||||
pathname.includes("/ip-addresses") ||
|
||||
pathname.includes("/endpoints")
|
||||
) {
|
||||
return "assets"
|
||||
}
|
||||
return "overview"
|
||||
}
|
||||
|
||||
// Get secondary navigation active tab (for assets)
|
||||
const getSecondaryTab = () => {
|
||||
if (pathname.includes("/websites")) return "websites"
|
||||
if (pathname.includes("/subdomain")) return "subdomain"
|
||||
if (pathname.includes("/ip-addresses")) return "ip-addresses"
|
||||
if (pathname.includes("/endpoints")) return "endpoints"
|
||||
return "websites"
|
||||
}
|
||||
|
||||
// Check if we should show secondary navigation
|
||||
const showSecondaryNav = getPrimaryTab() === "assets"
|
||||
|
||||
// Tab path mapping
|
||||
const basePath = `/target/${id}`
|
||||
const primaryPaths = {
|
||||
overview: `${basePath}/overview/`,
|
||||
assets: `${basePath}/websites/`, // Default to websites when clicking assets
|
||||
directories: `${basePath}/directories/`,
|
||||
screenshots: `${basePath}/screenshots/`,
|
||||
vulnerabilities: `${basePath}/vulnerabilities/`,
|
||||
settings: `${basePath}/settings/`,
|
||||
}
|
||||
|
||||
const secondaryPaths = {
|
||||
websites: `${basePath}/websites/`,
|
||||
subdomain: `${basePath}/subdomain/`,
|
||||
"ip-addresses": `${basePath}/ip-addresses/`,
|
||||
endpoints: `${basePath}/endpoints/`,
|
||||
}
|
||||
|
||||
// Get counts for each tab from target data
|
||||
const counts = {
|
||||
subdomain: (target as any)?.summary?.subdomains || 0,
|
||||
endpoints: (target as any)?.summary?.endpoints || 0,
|
||||
websites: (target as any)?.summary?.websites || 0,
|
||||
directories: (target as any)?.summary?.directories || 0,
|
||||
vulnerabilities: (target as any)?.summary?.vulnerabilities?.total || 0,
|
||||
"ip-addresses": (target as any)?.summary?.ips || 0,
|
||||
screenshots: (target as any)?.summary?.screenshots || 0,
|
||||
}
|
||||
|
||||
// Calculate total assets count
|
||||
const totalAssets = counts.websites + counts.subdomain + counts["ip-addresses"] + counts.endpoints
|
||||
|
||||
// Loading state
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* Header skeleton */}
|
||||
<div className="flex items-center gap-2 px-4 lg:px-6">
|
||||
<Skeleton className="h-4 w-16" />
|
||||
<span className="text-muted-foreground">/</span>
|
||||
<Skeleton className="h-4 w-32" />
|
||||
</div>
|
||||
{/* Tabs skeleton */}
|
||||
<div className="flex gap-1 px-4 lg:px-6">
|
||||
<Skeleton className="h-9 w-20" />
|
||||
<Skeleton className="h-9 w-20" />
|
||||
<Skeleton className="h-9 w-24" />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Error state
|
||||
if (error) {
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center">
|
||||
<Target className="mx-auto text-destructive mb-4" />
|
||||
<h3 className="text-lg font-semibold mb-2">{t("error.title")}</h3>
|
||||
<p className="text-muted-foreground">
|
||||
{error.message || t("error.message")}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (!target) {
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
<div className="flex items-center justify-center py-12">
|
||||
<div className="text-center">
|
||||
<Target className="mx-auto text-muted-foreground mb-4" />
|
||||
<h3 className="text-lg font-semibold mb-2">{t("notFound.title")}</h3>
|
||||
<p className="text-muted-foreground">
|
||||
{t("notFound.message", { id })}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* Header: Page label + Target name */}
|
||||
<div className="flex items-center gap-2 text-sm px-4 lg:px-6">
|
||||
<span className="text-muted-foreground">{t("breadcrumb.targetDetail")}</span>
|
||||
<span className="text-muted-foreground">/</span>
|
||||
<span className="font-medium flex items-center gap-1.5">
|
||||
<Target className="h-4 w-4" />
|
||||
{target.name}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Primary navigation */}
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<div className="flex items-center gap-3">
|
||||
<Tabs value={getPrimaryTab()}>
|
||||
<TabsList>
|
||||
<TabsTrigger value="overview" asChild>
|
||||
<Link href={primaryPaths.overview} className="flex items-center gap-1.5">
|
||||
<LayoutDashboard className="h-4 w-4" />
|
||||
{t("tabs.overview")}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="assets" asChild>
|
||||
<Link href={primaryPaths.assets} className="flex items-center gap-1.5">
|
||||
<Package className="h-4 w-4" />
|
||||
{t("tabs.assets")}
|
||||
{totalAssets > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{totalAssets}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="directories" asChild>
|
||||
<Link href={primaryPaths.directories} className="flex items-center gap-1.5">
|
||||
<FolderSearch className="h-4 w-4" />
|
||||
{t("tabs.directories")}
|
||||
{counts.directories > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.directories}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="screenshots" asChild>
|
||||
<Link href={primaryPaths.screenshots} className="flex items-center gap-1.5">
|
||||
<Image className="h-4 w-4" />
|
||||
{t("tabs.screenshots")}
|
||||
{counts.screenshots > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.screenshots}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="vulnerabilities" asChild>
|
||||
<Link href={primaryPaths.vulnerabilities} className="flex items-center gap-1.5">
|
||||
<ShieldAlert className="h-4 w-4" />
|
||||
{t("tabs.vulnerabilities")}
|
||||
{counts.vulnerabilities > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.vulnerabilities}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="settings" asChild>
|
||||
<Link href={primaryPaths.settings} className="flex items-center gap-1.5">
|
||||
<Settings className="h-4 w-4" />
|
||||
{t("tabs.settings")}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
|
||||
{getPrimaryTab() === "directories" && (
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<HelpCircle className="h-4 w-4 text-muted-foreground cursor-help" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right" className="max-w-sm">
|
||||
{t("directoriesHelp")}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Secondary navigation (only for assets) */}
|
||||
{showSecondaryNav && (
|
||||
<div className="flex items-center px-4 lg:px-6">
|
||||
<Tabs value={getSecondaryTab()} className="w-full">
|
||||
<TabsList variant="underline">
|
||||
<TabsTrigger value="websites" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.websites} className="flex items-center gap-0.5">
|
||||
{t("tabs.websites")}
|
||||
{counts.websites > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.websites}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="subdomain" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.subdomain} className="flex items-center gap-0.5">
|
||||
{t("tabs.subdomains")}
|
||||
{counts.subdomain > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.subdomain}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ip-addresses" variant="underline" asChild>
|
||||
<Link href={secondaryPaths["ip-addresses"]} className="flex items-center gap-0.5">
|
||||
{t("tabs.ips")}
|
||||
{counts["ip-addresses"] > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts["ip-addresses"]}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="endpoints" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.endpoints} className="flex items-center gap-0.5">
|
||||
{t("tabs.urls")}
|
||||
{counts.endpoints > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.endpoints}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Sub-page content */}
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
19
frontend/app/[locale]/target/[id]/overview/page.tsx
Normal file
19
frontend/app/[locale]/target/[id]/overview/page.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
"use client"
|
||||
|
||||
import { useParams } from "next/navigation"
|
||||
import { TargetOverview } from "@/components/target/target-overview"
|
||||
|
||||
/**
|
||||
* Target overview page
|
||||
* Displays target statistics and summary information
|
||||
*/
|
||||
export default function TargetOverviewPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const targetId = Number(id)
|
||||
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<TargetOverview targetId={targetId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -4,16 +4,16 @@ import { useParams, useRouter } from "next/navigation"
|
||||
import { useEffect } from "react"
|
||||
|
||||
/**
|
||||
* 目标详情默认页面
|
||||
* 自动重定向到域名页面
|
||||
* Target detail default page
|
||||
* Automatically redirects to overview page
|
||||
*/
|
||||
export default function TargetDetailPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const router = useRouter()
|
||||
|
||||
useEffect(() => {
|
||||
// 重定向到子域名页面
|
||||
router.replace(`/target/${id}/subdomain/`)
|
||||
// Redirect to overview page
|
||||
router.replace(`/target/${id}/overview/`)
|
||||
}, [id, router])
|
||||
|
||||
return null
|
||||
15
frontend/app/[locale]/target/[id]/screenshots/page.tsx
Normal file
15
frontend/app/[locale]/target/[id]/screenshots/page.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
"use client"
|
||||
|
||||
import { useParams } from "next/navigation"
|
||||
import { ScreenshotsGallery } from "@/components/screenshots/screenshots-gallery"
|
||||
|
||||
export default function ScreenshotsPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const targetId = Number(id)
|
||||
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<ScreenshotsGallery targetId={targetId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
19
frontend/app/[locale]/target/[id]/settings/page.tsx
Normal file
19
frontend/app/[locale]/target/[id]/settings/page.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
"use client"
|
||||
|
||||
import { useParams } from "next/navigation"
|
||||
import { TargetSettings } from "@/components/target/target-settings"
|
||||
|
||||
/**
|
||||
* Target settings page
|
||||
* Contains blacklist configuration and other settings
|
||||
*/
|
||||
export default function TargetSettingsPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const targetId = Number(id)
|
||||
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<TargetSettings targetId={targetId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -5,14 +5,14 @@ import { useParams } from "next/navigation"
|
||||
import { VulnerabilitiesDetailView } from "@/components/vulnerabilities"
|
||||
|
||||
/**
|
||||
* 目标漏洞页面
|
||||
* 显示目标下的漏洞详情
|
||||
* Target vulnerabilities page
|
||||
* Displays vulnerability details under the target
|
||||
*/
|
||||
export default function TargetVulnerabilitiesPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
|
||||
return (
|
||||
<div className="relative flex flex-col gap-4 overflow-auto px-4 lg:px-6">
|
||||
<div className="px-4 lg:px-6">
|
||||
<VulnerabilitiesDetailView targetId={parseInt(id)} />
|
||||
</div>
|
||||
)
|
||||
@@ -1,23 +1,28 @@
|
||||
"use client"
|
||||
|
||||
import { AllTargetsDetailView } from "@/components/target/all-targets-detail-view"
|
||||
import { Target } from "lucide-react"
|
||||
import { useTranslations } from "next-intl"
|
||||
|
||||
export default function AllTargetsPage() {
|
||||
const t = useTranslations("pages.target")
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* 页面头部 */}
|
||||
{/* Page header */}
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<div>
|
||||
<h2 className="text-2xl font-bold tracking-tight flex items-center gap-2">
|
||||
<Target />
|
||||
目标
|
||||
{t("title")}
|
||||
</h2>
|
||||
<p className="text-muted-foreground">
|
||||
管理系统中的所有目标信息
|
||||
{t("description")}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 内容区域 */}
|
||||
{/* Content area */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<AllTargetsDetailView />
|
||||
</div>
|
||||
8
frontend/app/[locale]/tools/config/custom/page.tsx
Normal file
8
frontend/app/[locale]/tools/config/custom/page.tsx
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Custom tools page
|
||||
* Display and manage custom scanning scripts and tools
|
||||
*/
|
||||
export default function CustomToolsPage() {
|
||||
// Tool configuration feature has been deprecated, this page is kept as placeholder to avoid broken historical links
|
||||
return null
|
||||
}
|
||||
8
frontend/app/[locale]/tools/config/opensource/page.tsx
Normal file
8
frontend/app/[locale]/tools/config/opensource/page.tsx
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Open source tools page
|
||||
* Display and manage open source scanning tools
|
||||
*/
|
||||
export default function OpensourceToolsPage() {
|
||||
// Tool configuration feature has been deprecated, this page is kept as placeholder to avoid broken historical links
|
||||
return null
|
||||
}
|
||||
10
frontend/app/[locale]/tools/config/page.tsx
Normal file
10
frontend/app/[locale]/tools/config/page.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
"use client"
|
||||
|
||||
/**
|
||||
* Tool configuration page
|
||||
* Display and manage scanning tool sets (open source tools and custom tools)
|
||||
*/
|
||||
export default function ToolConfigPage() {
|
||||
// Tool configuration feature has been deprecated, this page is kept as placeholder to avoid broken historical links
|
||||
return null
|
||||
}
|
||||
12
frontend/app/[locale]/tools/fingerprints/arl/page.tsx
Normal file
12
frontend/app/[locale]/tools/fingerprints/arl/page.tsx
Normal file
@@ -0,0 +1,12 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { ARLFingerprintView } from "@/components/fingerprints"
|
||||
|
||||
export default function ARLFingerprintPage() {
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<ARLFingerprintView />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
12
frontend/app/[locale]/tools/fingerprints/ehole/page.tsx
Normal file
12
frontend/app/[locale]/tools/fingerprints/ehole/page.tsx
Normal file
@@ -0,0 +1,12 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { EholeFingerprintView } from "@/components/fingerprints"
|
||||
|
||||
export default function EholeFingerprintPage() {
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<EholeFingerprintView />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { FingerPrintHubFingerprintView } from "@/components/fingerprints"
|
||||
|
||||
export default function FingerPrintHubFingerprintPage() {
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<FingerPrintHubFingerprintView />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
12
frontend/app/[locale]/tools/fingerprints/fingers/page.tsx
Normal file
12
frontend/app/[locale]/tools/fingerprints/fingers/page.tsx
Normal file
@@ -0,0 +1,12 @@
|
||||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { FingersFingerprintView } from "@/components/fingerprints"
|
||||
|
||||
export default function FingersFingerprintPage() {
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<FingersFingerprintView />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user